Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -2,12 +2,13 @@ import discord
|
|
| 2 |
import logging
|
| 3 |
import os
|
| 4 |
import requests
|
| 5 |
-
from huggingface_hub import InferenceClient
|
| 6 |
from transformers import pipeline
|
| 7 |
import asyncio
|
| 8 |
import subprocess
|
| 9 |
import re
|
| 10 |
import urllib.parse
|
|
|
|
| 11 |
|
| 12 |
# ๋ก๊น
์ค์
|
| 13 |
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s:%(levelname)s:%(name)s:%(message)s', handlers=[logging.StreamHandler()])
|
|
@@ -86,7 +87,7 @@ class MyClient(discord.Client):
|
|
| 86 |
cohere_result = ''.join([part.choices[0].delta.content for part in cohere_response if part.choices and part.choices[0].delta and part.choices[0].delta.content])
|
| 87 |
|
| 88 |
combined_response = f"์ํ ์ ์๋ ๋ต๋ณ: {cohere_result}"
|
| 89 |
-
except
|
| 90 |
logging.error(f"Hugging Face API error: {e}")
|
| 91 |
combined_response = "An error occurred while processing the request."
|
| 92 |
|
|
@@ -113,7 +114,7 @@ class MyClient(discord.Client):
|
|
| 113 |
messages, max_tokens=1000, stream=True, temperature=0.7, top_p=0.85))
|
| 114 |
full_response = ''.join([part.choices[0].delta.content for part in response if part.choices and part.choices[0].delta and part.choices[0].delta.content])
|
| 115 |
conversation_history.append({"role": "assistant", "content": full_response})
|
| 116 |
-
except
|
| 117 |
logging.error(f"Hugging Face API error: {e}")
|
| 118 |
full_response = "An error occurred while generating the response."
|
| 119 |
|
|
|
|
| 2 |
import logging
|
| 3 |
import os
|
| 4 |
import requests
|
| 5 |
+
from huggingface_hub import InferenceClient
|
| 6 |
from transformers import pipeline
|
| 7 |
import asyncio
|
| 8 |
import subprocess
|
| 9 |
import re
|
| 10 |
import urllib.parse
|
| 11 |
+
from requests.exceptions import HTTPError
|
| 12 |
|
| 13 |
# ๋ก๊น
์ค์
|
| 14 |
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s:%(levelname)s:%(name)s:%(message)s', handlers=[logging.StreamHandler()])
|
|
|
|
| 87 |
cohere_result = ''.join([part.choices[0].delta.content for part in cohere_response if part.choices and part.choices[0].delta and part.choices[0].delta.content])
|
| 88 |
|
| 89 |
combined_response = f"์ํ ์ ์๋ ๋ต๋ณ: {cohere_result}"
|
| 90 |
+
except HTTPError as e:
|
| 91 |
logging.error(f"Hugging Face API error: {e}")
|
| 92 |
combined_response = "An error occurred while processing the request."
|
| 93 |
|
|
|
|
| 114 |
messages, max_tokens=1000, stream=True, temperature=0.7, top_p=0.85))
|
| 115 |
full_response = ''.join([part.choices[0].delta.content for part in response if part.choices and part.choices[0].delta and part.choices[0].delta.content])
|
| 116 |
conversation_history.append({"role": "assistant", "content": full_response})
|
| 117 |
+
except HTTPError as e:
|
| 118 |
logging.error(f"Hugging Face API error: {e}")
|
| 119 |
full_response = "An error occurred while generating the response."
|
| 120 |
|