Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -11,19 +11,26 @@ import os
|
|
| 11 |
load_dotenv() # load environment variables from .env file
|
| 12 |
api_key = os.getenv("OPENAI_API_KEY") # access the value of the OPENAI_API_KEY environment variable
|
| 13 |
|
| 14 |
-
def
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 22 |
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
|
|
|
| 27 |
|
| 28 |
title = "ChatSherman"
|
| 29 |
description = "This is an AI chatbot powered by ShermanAI. Enter your question below to get started."
|
|
@@ -32,7 +39,4 @@ examples = [
|
|
| 32 |
["Is my personal information and data safe when I use the ChatSherman chatbot?", []],
|
| 33 |
["What are some common applications of deep learning in engineering?", []]
|
| 34 |
]
|
| 35 |
-
|
| 36 |
-
outputs = ["chatbot", "state"]
|
| 37 |
-
interface = gr.Interface(fn=chatbot, inputs=inputs, outputs=outputs, title=title, description=description, examples=examples)
|
| 38 |
-
interface.launch(debug=True)
|
|
|
|
| 11 |
load_dotenv() # load environment variables from .env file
|
| 12 |
api_key = os.getenv("OPENAI_API_KEY") # access the value of the OPENAI_API_KEY environment variable
|
| 13 |
|
| 14 |
+
def predict(message, history):
|
| 15 |
+
prompt = "I'm an AI chatbot named ChatSherman designed by a student named ShermanAI at the Department of Electronic and Information Engineering at The Hong Kong Polytechnic University to help you with your engineering questions. Also, I can assist with a wide range of topics and questions."
|
| 16 |
+
history = [(prompt, '')] + history
|
| 17 |
+
history_openai_format = []
|
| 18 |
+
for human, assistant in history:
|
| 19 |
+
history_openai_format.append({"role": "user", "content": human })
|
| 20 |
+
history_openai_format.append({"role": "assistant", "content": assistant})
|
| 21 |
+
history_openai_format.append({"role": "user", "content": message})
|
| 22 |
+
response = openai.ChatCompletion.create(
|
| 23 |
+
model='gpt-3.5-turbo',
|
| 24 |
+
messages= history_openai_format,
|
| 25 |
+
temperature=1.0,
|
| 26 |
+
stream=True
|
| 27 |
+
)
|
| 28 |
|
| 29 |
+
partial_message = ""
|
| 30 |
+
for chunk in response:
|
| 31 |
+
if len(chunk['choices'][0]['delta']) != 0:
|
| 32 |
+
partial_message = partial_message + chunk['choices'][0]['delta']['content']
|
| 33 |
+
yield partial_message
|
| 34 |
|
| 35 |
title = "ChatSherman"
|
| 36 |
description = "This is an AI chatbot powered by ShermanAI. Enter your question below to get started."
|
|
|
|
| 39 |
["Is my personal information and data safe when I use the ChatSherman chatbot?", []],
|
| 40 |
["What are some common applications of deep learning in engineering?", []]
|
| 41 |
]
|
| 42 |
+
gr.ChatInterface(predict, title=title, description=description, examples=examples).queue().launch(debug=True)
|
|
|
|
|
|
|
|
|