using proxy
Browse files
App/Generate/utils/HuggingChat.py
CHANGED
|
@@ -1,45 +1,24 @@
|
|
| 1 |
-
|
| 2 |
-
|
|
|
|
| 3 |
|
| 4 |
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
|
|
|
|
|
|
|
|
|
| 10 |
|
| 11 |
-
|
| 12 |
-
cookie_path_dir = "/srv/App/Generate/utils/cookies"
|
| 13 |
-
sign.saveCookiesToDir(cookie_path_dir)
|
| 14 |
|
| 15 |
-
|
| 16 |
-
# sign = login(email, None)
|
| 17 |
-
cookies = sign.loadCookiesFromDir(cookie_path_dir)
|
| 18 |
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
)
|
| 26 |
-
# Create a ChatBot
|
| 27 |
-
chatbot = hugchat.ChatBot(
|
| 28 |
-
cookies={
|
| 29 |
-
"hf-chat": "831aa60e-832c-4bc8-9609-feee1a63cabd",
|
| 30 |
-
"token": "kybodoshVPnQLMIerSDRJjgLMnsoldLxjyoYHTdTsNWGXmOmdiGmQqjIXHDbwulMYnRqpnHkhwTcebrfTMxwXcVjLOfcPShwAJGzWwDTXjnxewKGgHICpagCZhnotfKA",
|
| 31 |
-
},
|
| 32 |
-
system_prompt="You are a true creative master genius and a great story teller that keeps the viewer/listener engauged. Make sure that you narrate the sequence of events properly so that the listener can understand. Use smart/insiteful quotes from the book. Don't speak to the viewers just tell the story accurately. Each scene should carry one topic and if the narration is long add more image_prompts, by default a short naration should have 2 image_prompts",
|
| 33 |
-
)
|
| 34 |
-
|
| 35 |
-
model_index = 0
|
| 36 |
-
models = chatbot.get_available_llm_models()
|
| 37 |
-
print(chatbot.active_model)
|
| 38 |
-
if not chatbot.active_model.name == "CohereForAI/c4ai-command-r-plus":
|
| 39 |
-
for model in models:
|
| 40 |
-
print(model.name, "switching..")
|
| 41 |
-
if model.name == "CohereForAI/c4ai-command-r-plus":
|
| 42 |
-
model_index = models.index(model)
|
| 43 |
-
chatbot.switch_llm(model_index)
|
| 44 |
-
break
|
| 45 |
-
# print(chatbot.current_conversation.system_prompt)
|
|
|
|
| 1 |
+
import aiohttp
|
| 2 |
+
import json
|
| 3 |
+
import re
|
| 4 |
|
| 5 |
|
| 6 |
+
class Hugging:
|
| 7 |
+
def __init__(self):
|
| 8 |
+
self.url = "https://p01--hugging--legal-stuff--a96n-7tyr.code.run/"
|
| 9 |
+
self.headers = {
|
| 10 |
+
"Authorization": "Bearer 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",
|
| 11 |
+
"accept": "application/json",
|
| 12 |
+
"content-type": "application/json",
|
| 13 |
+
}
|
| 14 |
|
| 15 |
+
async def chat(self, prompt: str):
|
|
|
|
|
|
|
| 16 |
|
| 17 |
+
payload = {"prompt": prompt}
|
|
|
|
|
|
|
| 18 |
|
| 19 |
+
async with aiohttp.ClientSession() as session:
|
| 20 |
+
async with session.post(
|
| 21 |
+
self.url, headers=self.headers, data=json.dumps(payload)
|
| 22 |
+
) as response:
|
| 23 |
+
result = await response.text()
|
| 24 |
+
return result
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|