Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -268,10 +268,16 @@ if user_prompt := st.chat_input():
|
|
| 268 |
with st.chat_message("assistant", avatar="🧠"):
|
| 269 |
with st.spinner("Math Mentor is thinking... 🤔"):
|
| 270 |
try:
|
| 271 |
-
|
| 272 |
-
|
| 273 |
-
|
| 274 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 275 |
response = chat_session.send_message(user_prompt)
|
| 276 |
ai_response_text = response.text
|
| 277 |
st.markdown(ai_response_text)
|
|
|
|
| 268 |
with st.chat_message("assistant", avatar="🧠"):
|
| 269 |
with st.spinner("Math Mentor is thinking... 🤔"):
|
| 270 |
try:
|
| 271 |
+
def convert_role_for_gemini(role):
|
| 272 |
+
"""Convert Streamlit chat roles to Gemini API roles"""
|
| 273 |
+
if role == "assistant":
|
| 274 |
+
return "model"
|
| 275 |
+
return role # "user" stays the same
|
| 276 |
+
|
| 277 |
+
chat_session = model.start_chat(history=[
|
| 278 |
+
{'role': convert_role_for_gemini(msg['role']), 'parts': [msg['content']]}
|
| 279 |
+
for msg in active_chat[:-1]
|
| 280 |
+
])
|
| 281 |
response = chat_session.send_message(user_prompt)
|
| 282 |
ai_response_text = response.text
|
| 283 |
st.markdown(ai_response_text)
|