Spaces:
Sleeping
Sleeping
layerdiffusion
commited on
Commit
·
a9cb4cf
1
Parent(s):
0bc71e0
- app.py +11 -26
- chat_interface.py +5 -1
app.py
CHANGED
|
@@ -1,18 +1,3 @@
|
|
| 1 |
-
# import gradio as gr
|
| 2 |
-
#
|
| 3 |
-
# import torch
|
| 4 |
-
#
|
| 5 |
-
# zero = torch.Tensor([0]).cuda()
|
| 6 |
-
# print(zero.device) # <-- 'cpu' 🤔
|
| 7 |
-
#
|
| 8 |
-
# @spaces.GPU
|
| 9 |
-
# def greet(n):
|
| 10 |
-
# print(zero.device) # <-- 'cuda:0' 🤗
|
| 11 |
-
# return f"Hello {zero + n} Tensor"
|
| 12 |
-
#
|
| 13 |
-
# demo = gr.Interface(fn=greet, inputs=gr.Number(), outputs=gr.Text())
|
| 14 |
-
# demo.launch()
|
| 15 |
-
|
| 16 |
import os
|
| 17 |
import spaces
|
| 18 |
|
|
@@ -174,15 +159,15 @@ def post_chat(history):
|
|
| 174 |
canvas_outputs = None
|
| 175 |
|
| 176 |
try:
|
| 177 |
-
|
| 178 |
-
|
| 179 |
-
|
| 180 |
-
|
|
|
|
| 181 |
except Exception as e:
|
| 182 |
print('Last assistant response is not valid canvas:', e)
|
| 183 |
|
| 184 |
-
|
| 185 |
-
return canvas_outputs, gr.update(visible=canvas_outputs is not None)
|
| 186 |
|
| 187 |
|
| 188 |
@spaces.GPU
|
|
@@ -290,9 +275,9 @@ with gr.Blocks(
|
|
| 290 |
with gr.Row(elem_classes='outer_parent'):
|
| 291 |
with gr.Column(scale=25):
|
| 292 |
with gr.Row():
|
| 293 |
-
|
| 294 |
-
|
| 295 |
-
|
| 296 |
|
| 297 |
seed = gr.Number(label="Random Seed", value=123456, precision=0)
|
| 298 |
|
|
@@ -358,7 +343,7 @@ with gr.Blocks(
|
|
| 358 |
chatInterface = ChatInterface(
|
| 359 |
fn=chat_fn,
|
| 360 |
post_fn=post_chat,
|
| 361 |
-
post_fn_kwargs=dict(inputs=[chatbot], outputs=[canvas_state, render_button]),
|
| 362 |
pre_fn=lambda: gr.update(visible=False),
|
| 363 |
pre_fn_kwargs=dict(outputs=[render_button]),
|
| 364 |
chatbot=chatbot,
|
|
@@ -380,4 +365,4 @@ with gr.Blocks(
|
|
| 380 |
], outputs=[chatInterface.chatbot_state])
|
| 381 |
|
| 382 |
if __name__ == "__main__":
|
| 383 |
-
demo.queue().launch(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import os
|
| 2 |
import spaces
|
| 3 |
|
|
|
|
| 159 |
canvas_outputs = None
|
| 160 |
|
| 161 |
try:
|
| 162 |
+
if history:
|
| 163 |
+
history = [(user, assistant) for user, assistant in history if isinstance(user, str) and isinstance(assistant, str)]
|
| 164 |
+
last_assistant = history[-1][1] if len(history) > 0 else None
|
| 165 |
+
canvas = omost_canvas.Canvas.from_bot_response(last_assistant)
|
| 166 |
+
canvas_outputs = canvas.process()
|
| 167 |
except Exception as e:
|
| 168 |
print('Last assistant response is not valid canvas:', e)
|
| 169 |
|
| 170 |
+
return canvas_outputs, gr.update(visible=canvas_outputs is not None), gr.update(interactive=len(history) > 0)
|
|
|
|
| 171 |
|
| 172 |
|
| 173 |
@spaces.GPU
|
|
|
|
| 275 |
with gr.Row(elem_classes='outer_parent'):
|
| 276 |
with gr.Column(scale=25):
|
| 277 |
with gr.Row():
|
| 278 |
+
clear_btn = gr.Button("➕ New Chat", variant="secondary", size="sm", min_width=60)
|
| 279 |
+
retry_btn = gr.Button("Retry", variant="secondary", size="sm", min_width=60, visible=False)
|
| 280 |
+
undo_btn = gr.Button("✏️️ Edit Last Input", variant="secondary", size="sm", min_width=60, interactive=False)
|
| 281 |
|
| 282 |
seed = gr.Number(label="Random Seed", value=123456, precision=0)
|
| 283 |
|
|
|
|
| 343 |
chatInterface = ChatInterface(
|
| 344 |
fn=chat_fn,
|
| 345 |
post_fn=post_chat,
|
| 346 |
+
post_fn_kwargs=dict(inputs=[chatbot], outputs=[canvas_state, render_button, undo_btn]),
|
| 347 |
pre_fn=lambda: gr.update(visible=False),
|
| 348 |
pre_fn_kwargs=dict(outputs=[render_button]),
|
| 349 |
chatbot=chatbot,
|
|
|
|
| 365 |
], outputs=[chatInterface.chatbot_state])
|
| 366 |
|
| 367 |
if __name__ == "__main__":
|
| 368 |
+
demo.queue().launch()
|
chat_interface.py
CHANGED
|
@@ -624,5 +624,9 @@ class ChatInterface(Blocks):
|
|
| 624 |
)
|
| 625 |
history = history[:-remove_input]
|
| 626 |
else:
|
| 627 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 628 |
return history, message or "", history
|
|
|
|
| 624 |
)
|
| 625 |
history = history[:-remove_input]
|
| 626 |
else:
|
| 627 |
+
while history:
|
| 628 |
+
deleted_a, deleted_b = history[-1]
|
| 629 |
+
history = history[:-1]
|
| 630 |
+
if isinstance(deleted_a, str) and isinstance(deleted_b, str):
|
| 631 |
+
break
|
| 632 |
return history, message or "", history
|