KangLiao commited on
Commit
2226ff8
·
1 Parent(s): 42ca6a2
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -117,7 +117,7 @@ def camera_understanding(image_src, question, seed, progress=gr.Progress(track_t
117
  lat_img = fig_to_image(fig)
118
  plt.close(fig)
119
 
120
- return text, up_img, lat_img
121
 
122
 
123
  @torch.inference_mode()
@@ -214,8 +214,8 @@ with gr.Blocks(css=css) as demo:
214
  understanding_button = gr.Button("Chat")
215
  understanding_output = gr.Textbox(label="Response")
216
 
217
- camera1 = gr.Gallery(label="Camera Maps", columns=1, rows=1)
218
- camera2 = gr.Gallery(label="Camera Maps", columns=1, rows=1)
219
 
220
  with gr.Accordion("Advanced options", open=False):
221
  und_seed_input = gr.Number(label="Seed", precision=0, value=42)
@@ -242,7 +242,7 @@ with gr.Blocks(css=css) as demo:
242
  understanding_button.click(
243
  camera_understanding,
244
  inputs=[image_input, und_seed_input],
245
- outputs=[understanding_output, camera1, camera2]
246
  )
247
 
248
  demo.launch(share=True)
 
117
  lat_img = fig_to_image(fig)
118
  plt.close(fig)
119
 
120
+ return text#, up_img, lat_img
121
 
122
 
123
  @torch.inference_mode()
 
214
  understanding_button = gr.Button("Chat")
215
  understanding_output = gr.Textbox(label="Response")
216
 
217
+ #camera1 = gr.Gallery(label="Camera Maps", columns=1, rows=1)
218
+ #camera2 = gr.Gallery(label="Camera Maps", columns=1, rows=1)
219
 
220
  with gr.Accordion("Advanced options", open=False):
221
  und_seed_input = gr.Number(label="Seed", precision=0, value=42)
 
242
  understanding_button.click(
243
  camera_understanding,
244
  inputs=[image_input, und_seed_input],
245
+ outputs=[understanding_output]#, camera1, camera2]
246
  )
247
 
248
  demo.launch(share=True)