Spaces:
Running
Running
add qwq
Browse files- app.py +0 -11
- app_huggingface.py +2 -2
- pyproject.toml +1 -1
- requirements.txt +1 -1
app.py
CHANGED
|
@@ -1,5 +1,4 @@
|
|
| 1 |
from app_huggingface import demo as demo_huggingface
|
| 2 |
-
from app_openrouter import demo as demo_openrouter
|
| 3 |
from utils import get_app
|
| 4 |
import gradio as gr
|
| 5 |
|
|
@@ -14,22 +13,12 @@ gr.load(
|
|
| 14 |
coder=True,
|
| 15 |
provider="together"
|
| 16 |
).launch()""",
|
| 17 |
-
"OpenRouter Coder": """
|
| 18 |
-
import gradio as gr
|
| 19 |
-
import ai_gradio
|
| 20 |
-
gr.load(
|
| 21 |
-
name='openrouter:anthropic/claude-3.7-sonnet',
|
| 22 |
-
src=ai_gradio.registry,
|
| 23 |
-
coder=True
|
| 24 |
-
).launch()""",
|
| 25 |
-
|
| 26 |
# Add similar snippets for other providers
|
| 27 |
}
|
| 28 |
|
| 29 |
# Create mapping of providers to their demos
|
| 30 |
PROVIDERS = {
|
| 31 |
"Hugging Face": demo_huggingface,
|
| 32 |
-
"OpenRouter Coder": demo_openrouter
|
| 33 |
}
|
| 34 |
|
| 35 |
# Modified get_app implementation
|
|
|
|
| 1 |
from app_huggingface import demo as demo_huggingface
|
|
|
|
| 2 |
from utils import get_app
|
| 3 |
import gradio as gr
|
| 4 |
|
|
|
|
| 13 |
coder=True,
|
| 14 |
provider="together"
|
| 15 |
).launch()""",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
# Add similar snippets for other providers
|
| 17 |
}
|
| 18 |
|
| 19 |
# Create mapping of providers to their demos
|
| 20 |
PROVIDERS = {
|
| 21 |
"Hugging Face": demo_huggingface,
|
|
|
|
| 22 |
}
|
| 23 |
|
| 24 |
# Modified get_app implementation
|
app_huggingface.py
CHANGED
|
@@ -12,10 +12,10 @@ HUGGINGFACE_MODELS_DISPLAY = [k.replace("huggingface:", "") for k in HUGGINGFACE
|
|
| 12 |
# Create and launch the interface using get_app utility
|
| 13 |
demo = get_app(
|
| 14 |
models=HUGGINGFACE_MODELS_FULL, # Use the full names with prefix
|
| 15 |
-
default_model=HUGGINGFACE_MODELS_FULL[
|
| 16 |
dropdown_label="Select Huggingface Model",
|
| 17 |
choices=HUGGINGFACE_MODELS_DISPLAY, # Display names without prefix
|
| 18 |
fill_height=True,
|
| 19 |
coder=True,
|
| 20 |
-
provider="
|
| 21 |
)
|
|
|
|
| 12 |
# Create and launch the interface using get_app utility
|
| 13 |
demo = get_app(
|
| 14 |
models=HUGGINGFACE_MODELS_FULL, # Use the full names with prefix
|
| 15 |
+
default_model=HUGGINGFACE_MODELS_FULL[16],
|
| 16 |
dropdown_label="Select Huggingface Model",
|
| 17 |
choices=HUGGINGFACE_MODELS_DISPLAY, # Display names without prefix
|
| 18 |
fill_height=True,
|
| 19 |
coder=True,
|
| 20 |
+
provider="fireworks-ai"
|
| 21 |
)
|
pyproject.toml
CHANGED
|
@@ -38,7 +38,7 @@ dependencies = [
|
|
| 38 |
"langchain>=0.3.14",
|
| 39 |
"chromadb>=0.5.23",
|
| 40 |
"openai>=1.55.0",
|
| 41 |
-
"ai-gradio[crewai,deepseek,gemini,groq,hyperbolic,openai,smolagents,transformers, langchain, mistral,minimax,nvidia, qwen, openrouter, huggingface]>=0.2.
|
| 42 |
]
|
| 43 |
|
| 44 |
[tool.uv.sources]
|
|
|
|
| 38 |
"langchain>=0.3.14",
|
| 39 |
"chromadb>=0.5.23",
|
| 40 |
"openai>=1.55.0",
|
| 41 |
+
"ai-gradio[crewai,deepseek,gemini,groq,hyperbolic,openai,smolagents,transformers, langchain, mistral,minimax,nvidia, qwen, openrouter, huggingface]>=0.2.53",
|
| 42 |
]
|
| 43 |
|
| 44 |
[tool.uv.sources]
|
requirements.txt
CHANGED
|
@@ -2,7 +2,7 @@
|
|
| 2 |
# uv pip compile pyproject.toml -o requirements.txt
|
| 3 |
accelerate==1.2.1
|
| 4 |
# via ai-gradio
|
| 5 |
-
ai-gradio==0.2.
|
| 6 |
# via anychat (pyproject.toml)
|
| 7 |
aiofiles==23.2.1
|
| 8 |
# via gradio
|
|
|
|
| 2 |
# uv pip compile pyproject.toml -o requirements.txt
|
| 3 |
accelerate==1.2.1
|
| 4 |
# via ai-gradio
|
| 5 |
+
ai-gradio==0.2.53
|
| 6 |
# via anychat (pyproject.toml)
|
| 7 |
aiofiles==23.2.1
|
| 8 |
# via gradio
|