add support for mistral
Browse files- app.py +54 -0
- requirements.txt +2 -1
app.py
CHANGED
|
@@ -6,6 +6,7 @@ import sambanova_gradio
|
|
| 6 |
import xai_gradio
|
| 7 |
import hyperbolic_gradio
|
| 8 |
import perplexity_gradio
|
|
|
|
| 9 |
|
| 10 |
|
| 11 |
|
|
@@ -247,6 +248,59 @@ with gr.Blocks(fill_height=True) as demo:
|
|
| 247 |
|
| 248 |
**Note:** This model is supported by Hyperbolic. You need to use a Hyperbolic API key from [Hyperbolic](https://app.hyperbolic.xyz/).
|
| 249 |
""")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 250 |
|
| 251 |
demo.launch(ssr_mode=False)
|
| 252 |
|
|
|
|
| 6 |
import xai_gradio
|
| 7 |
import hyperbolic_gradio
|
| 8 |
import perplexity_gradio
|
| 9 |
+
import mistral_gradio
|
| 10 |
|
| 11 |
|
| 12 |
|
|
|
|
| 248 |
|
| 249 |
**Note:** This model is supported by Hyperbolic. You need to use a Hyperbolic API key from [Hyperbolic](https://app.hyperbolic.xyz/).
|
| 250 |
""")
|
| 251 |
+
with gr.Tab("Mistral"):
|
| 252 |
+
with gr.Row():
|
| 253 |
+
mistral_model = gr.Dropdown(
|
| 254 |
+
choices=[
|
| 255 |
+
# Premier Models
|
| 256 |
+
'mistral-large-latest', # Top-tier reasoning model (128k)
|
| 257 |
+
'pixtral-large-latest', # Frontier-class multimodal model (128k)
|
| 258 |
+
'ministral-3b-latest', # Best edge model (128k)
|
| 259 |
+
'ministral-8b-latest', # High performance edge model (128k)
|
| 260 |
+
'mistral-small-latest', # Enterprise-grade small model (32k)
|
| 261 |
+
'codestral-latest', # Code-specialized model (32k)
|
| 262 |
+
'mistral-embed', # Semantic text representation (8k)
|
| 263 |
+
'mistral-moderation-latest', # Content moderation service (8k)
|
| 264 |
+
# Free Models
|
| 265 |
+
'pixtral-12b-2409', # Free 12B multimodal model (128k)
|
| 266 |
+
'open-mistral-nemo', # Multilingual model (128k)
|
| 267 |
+
'open-codestral-mamba' # Mamba-based coding model (256k)
|
| 268 |
+
],
|
| 269 |
+
value='mistral-large-latest', # Default to most capable model
|
| 270 |
+
label="Select Mistral Model",
|
| 271 |
+
interactive=True
|
| 272 |
+
)
|
| 273 |
+
|
| 274 |
+
mistral_interface = gr.load(
|
| 275 |
+
name=mistral_model.value,
|
| 276 |
+
src=mistral_gradio.registry,
|
| 277 |
+
accept_token=True,
|
| 278 |
+
fill_height=True
|
| 279 |
+
)
|
| 280 |
+
|
| 281 |
+
def update_mistral_model(new_model):
|
| 282 |
+
return gr.load(
|
| 283 |
+
name=new_model,
|
| 284 |
+
src=mistral_gradio.registry,
|
| 285 |
+
accept_token=True,
|
| 286 |
+
fill_height=True
|
| 287 |
+
)
|
| 288 |
+
|
| 289 |
+
mistral_model.change(
|
| 290 |
+
fn=update_mistral_model,
|
| 291 |
+
inputs=[mistral_model],
|
| 292 |
+
outputs=[mistral_interface]
|
| 293 |
+
)
|
| 294 |
+
|
| 295 |
+
gr.Markdown("""
|
| 296 |
+
**Note:** You need a Mistral API key to use these models. Get one at [Mistral AI Platform](https://console.mistral.ai/).
|
| 297 |
+
|
| 298 |
+
Models are grouped into two categories:
|
| 299 |
+
- **Premier Models**: Require a paid API key
|
| 300 |
+
- **Free Models**: Available with free API keys
|
| 301 |
+
|
| 302 |
+
Each model has different context window sizes (from 8k to 256k tokens) and specialized capabilities.
|
| 303 |
+
""")
|
| 304 |
|
| 305 |
demo.launch(ssr_mode=False)
|
| 306 |
|
requirements.txt
CHANGED
|
@@ -4,4 +4,5 @@ git+https://github.com/AK391/anthropic-gradio.git
|
|
| 4 |
sambanova-gradio
|
| 5 |
xai-gradio
|
| 6 |
hyperbolic-gradio
|
| 7 |
-
perplexity-gradio
|
|
|
|
|
|
| 4 |
sambanova-gradio
|
| 5 |
xai-gradio
|
| 6 |
hyperbolic-gradio
|
| 7 |
+
perplexity-gradio
|
| 8 |
+
mistral-gradio
|