Spaces:
Runtime error
Runtime error
Update app.py
Browse filesEnable chat toggle
app.py
CHANGED
|
@@ -211,7 +211,7 @@ class RavenDemo(gr.Blocks):
|
|
| 211 |
show_label=False,
|
| 212 |
autofocus=True,
|
| 213 |
)
|
| 214 |
-
|
| 215 |
raven_function_call = gr.Code(
|
| 216 |
label="π¦ββ¬ NexusRaven V2 13B zero-shot generated function call",
|
| 217 |
language="python",
|
|
@@ -267,7 +267,7 @@ class RavenDemo(gr.Blocks):
|
|
| 267 |
has_error = gr.State(False)
|
| 268 |
user_input.submit(
|
| 269 |
fn=self.on_submit,
|
| 270 |
-
inputs=[user_input],
|
| 271 |
outputs=[
|
| 272 |
user_input,
|
| 273 |
raven_function_call,
|
|
@@ -301,7 +301,7 @@ class RavenDemo(gr.Blocks):
|
|
| 301 |
outputs=gmaps_html,
|
| 302 |
)
|
| 303 |
|
| 304 |
-
def on_submit(self, query: str, request: gr.Request):
|
| 305 |
def get_returns():
|
| 306 |
return (
|
| 307 |
user_input,
|
|
@@ -420,7 +420,7 @@ class RavenDemo(gr.Blocks):
|
|
| 420 |
steps_accordion = gr.Accordion(open=False)
|
| 421 |
yield get_returns()
|
| 422 |
|
| 423 |
-
while True:
|
| 424 |
try:
|
| 425 |
summary_model_prompt = self.get_summary_model_prompt(results, query)
|
| 426 |
print(
|
|
|
|
| 211 |
show_label=False,
|
| 212 |
autofocus=True,
|
| 213 |
)
|
| 214 |
+
should_chat = gr.Checkbox(label="Enable Chat Summary", info="If set, summarizes the returned results.", value=True)
|
| 215 |
raven_function_call = gr.Code(
|
| 216 |
label="π¦ββ¬ NexusRaven V2 13B zero-shot generated function call",
|
| 217 |
language="python",
|
|
|
|
| 267 |
has_error = gr.State(False)
|
| 268 |
user_input.submit(
|
| 269 |
fn=self.on_submit,
|
| 270 |
+
inputs=[user_input, should_chat],
|
| 271 |
outputs=[
|
| 272 |
user_input,
|
| 273 |
raven_function_call,
|
|
|
|
| 301 |
outputs=gmaps_html,
|
| 302 |
)
|
| 303 |
|
| 304 |
+
def on_submit(self, query: str, should_chat : bool, request: gr.Request):
|
| 305 |
def get_returns():
|
| 306 |
return (
|
| 307 |
user_input,
|
|
|
|
| 420 |
steps_accordion = gr.Accordion(open=False)
|
| 421 |
yield get_returns()
|
| 422 |
|
| 423 |
+
while True and should_chat:
|
| 424 |
try:
|
| 425 |
summary_model_prompt = self.get_summary_model_prompt(results, query)
|
| 426 |
print(
|