Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -51,7 +51,6 @@ auth_token = os.environ.get("TOKEN_FROM_SECRET") or True
|
|
| 51 |
tokenizer = AutoTokenizer.from_pretrained("vikhyatk/moondream2")
|
| 52 |
moondream = AutoModelForCausalLM.from_pretrained(
|
| 53 |
"vikhyatk/moondream-next",
|
| 54 |
-
revision="d197f98731eb17b21708da2f3c69e8d295e683ed",
|
| 55 |
trust_remote_code=True,
|
| 56 |
torch_dtype=torch.float16,
|
| 57 |
device_map={"": "cuda"},
|
|
@@ -89,8 +88,7 @@ def answer_question(img, prompt):
|
|
| 89 |
yield buffer.strip(), "Thinking..."
|
| 90 |
|
| 91 |
answer = queue.get()
|
| 92 |
-
|
| 93 |
-
yield answer["answer"], ""
|
| 94 |
|
| 95 |
|
| 96 |
@spaces.GPU(duration=10)
|
|
@@ -264,7 +262,7 @@ css = """
|
|
| 264 |
font-size: 1.4rem !important;
|
| 265 |
}
|
| 266 |
|
| 267 |
-
.chain-of-thought
|
| 268 |
opacity: 0.7 !important;
|
| 269 |
}
|
| 270 |
|
|
@@ -278,12 +276,12 @@ css = """
|
|
| 278 |
opacity: 0.3;
|
| 279 |
}
|
| 280 |
|
| 281 |
-
body, body gradio-app {
|
| 282 |
-
background: none !important;
|
| 283 |
-
}
|
| 284 |
"""
|
| 285 |
|
| 286 |
with gr.Blocks(title="moondream vl (new)", css=css, js=js) as demo:
|
|
|
|
|
|
|
|
|
|
| 287 |
gr.Markdown(
|
| 288 |
"""
|
| 289 |
# 🌔 moondream vl (new)
|
|
@@ -343,7 +341,7 @@ with gr.Blocks(title="moondream vl (new)", css=css, js=js) as demo:
|
|
| 343 |
gr.Markdown("Coming soon!")
|
| 344 |
|
| 345 |
with gr.Column():
|
| 346 |
-
thought = gr.Markdown(elem_classes=["chain-of-thought"])
|
| 347 |
output = gr.Markdown(label="Response", elem_classes=["output-text"])
|
| 348 |
ann = gr.Image(visible=False)
|
| 349 |
|
|
|
|
| 51 |
tokenizer = AutoTokenizer.from_pretrained("vikhyatk/moondream2")
|
| 52 |
moondream = AutoModelForCausalLM.from_pretrained(
|
| 53 |
"vikhyatk/moondream-next",
|
|
|
|
| 54 |
trust_remote_code=True,
|
| 55 |
torch_dtype=torch.float16,
|
| 56 |
device_map={"": "cuda"},
|
|
|
|
| 88 |
yield buffer.strip(), "Thinking..."
|
| 89 |
|
| 90 |
answer = queue.get()
|
| 91 |
+
yield answer["answer"], answer["thought"]
|
|
|
|
| 92 |
|
| 93 |
|
| 94 |
@spaces.GPU(duration=10)
|
|
|
|
| 262 |
font-size: 1.4rem !important;
|
| 263 |
}
|
| 264 |
|
| 265 |
+
.chain-of-thought {
|
| 266 |
opacity: 0.7 !important;
|
| 267 |
}
|
| 268 |
|
|
|
|
| 276 |
opacity: 0.3;
|
| 277 |
}
|
| 278 |
|
|
|
|
|
|
|
|
|
|
| 279 |
"""
|
| 280 |
|
| 281 |
with gr.Blocks(title="moondream vl (new)", css=css, js=js) as demo:
|
| 282 |
+
if IN_SPACES:
|
| 283 |
+
gr.HTML("<style>body, body gradio-app { background: none !important; }</style>")
|
| 284 |
+
|
| 285 |
gr.Markdown(
|
| 286 |
"""
|
| 287 |
# 🌔 moondream vl (new)
|
|
|
|
| 341 |
gr.Markdown("Coming soon!")
|
| 342 |
|
| 343 |
with gr.Column():
|
| 344 |
+
thought = gr.Markdown(elem_classes=["chain-of-thought"], line_breaks=True)
|
| 345 |
output = gr.Markdown(label="Response", elem_classes=["output-text"])
|
| 346 |
ann = gr.Image(visible=False)
|
| 347 |
|