Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
e35953d
1
Parent(s):
9350a8c
test
Browse files
app.py
CHANGED
|
@@ -16,7 +16,6 @@ auth_token = os.environ.get("TOKEN_FROM_SECRET")
|
|
| 16 |
import torch
|
| 17 |
from transformers import AutoProcessor, AutoTokenizer
|
| 18 |
from transformers import Qwen2ForCausalLM, Qwen2_5_VLForConditionalGeneration, TextIteratorStreamer
|
| 19 |
-
from vllm import LLM, SamplingParams
|
| 20 |
from qwen_vl_utils import process_vision_info
|
| 21 |
from threading import Thread
|
| 22 |
|
|
@@ -108,7 +107,7 @@ def add_text(state, text, image, image_process_mode):
|
|
| 108 |
logging.info(f"add_text. len: {len(text)}")
|
| 109 |
if len(text) <= 0 or image is None:
|
| 110 |
state.skip_next = True
|
| 111 |
-
return (state, state.to_gradio_chatbot_public(), ""
|
| 112 |
|
| 113 |
# Deal with image inputs
|
| 114 |
if image is not None:
|
|
@@ -140,6 +139,8 @@ def http_bot(state):
|
|
| 140 |
pload = {"prompt": prompt, "images": f'List of {len(state.get_images())} images: {all_images}'}
|
| 141 |
logging.info(f"==== request ====\n{pload}")
|
| 142 |
|
|
|
|
|
|
|
| 143 |
# Construct prompt
|
| 144 |
cap_msgs, qa_msgs = build_messages(all_images, prompt)
|
| 145 |
cap_prompt = processor.apply_chat_template([cap_msgs], tokenize=False, add_generation_prompt=True)
|
|
@@ -220,7 +221,7 @@ def build_demo(embed_mode):
|
|
| 220 |
label="Preprocess for non-square image", visible=False)
|
| 221 |
|
| 222 |
gr.Examples(examples=[
|
| 223 |
-
["./examples/demo_example.png", "When the canister is momentarily stopped by the spring, by what distance $d$ is the spring compressed?"],
|
| 224 |
], inputs=[imagebox, textbox], label='Examples')
|
| 225 |
|
| 226 |
with gr.Column(scale=8):
|
|
|
|
| 16 |
import torch
|
| 17 |
from transformers import AutoProcessor, AutoTokenizer
|
| 18 |
from transformers import Qwen2ForCausalLM, Qwen2_5_VLForConditionalGeneration, TextIteratorStreamer
|
|
|
|
| 19 |
from qwen_vl_utils import process_vision_info
|
| 20 |
from threading import Thread
|
| 21 |
|
|
|
|
| 107 |
logging.info(f"add_text. len: {len(text)}")
|
| 108 |
if len(text) <= 0 or image is None:
|
| 109 |
state.skip_next = True
|
| 110 |
+
return (state, state.to_gradio_chatbot_public(), "") + (no_change_btn,) * 2
|
| 111 |
|
| 112 |
# Deal with image inputs
|
| 113 |
if image is not None:
|
|
|
|
| 139 |
pload = {"prompt": prompt, "images": f'List of {len(state.get_images())} images: {all_images}'}
|
| 140 |
logging.info(f"==== request ====\n{pload}")
|
| 141 |
|
| 142 |
+
return
|
| 143 |
+
|
| 144 |
# Construct prompt
|
| 145 |
cap_msgs, qa_msgs = build_messages(all_images, prompt)
|
| 146 |
cap_prompt = processor.apply_chat_template([cap_msgs], tokenize=False, add_generation_prompt=True)
|
|
|
|
| 221 |
label="Preprocess for non-square image", visible=False)
|
| 222 |
|
| 223 |
gr.Examples(examples=[
|
| 224 |
+
["./examples/image-text/demo_example.png", "When the canister is momentarily stopped by the spring, by what distance $d$ is the spring compressed?"],
|
| 225 |
], inputs=[imagebox, textbox], label='Examples')
|
| 226 |
|
| 227 |
with gr.Column(scale=8):
|