model_id
stringclasses
120 values
model_name
stringclasses
120 values
author
stringclasses
50 values
created_at
stringdate
2022-03-02 23:29:04
2025-10-25 18:04:25
downloads
int64
19
11M
likes
int64
5
12.8k
library
stringclasses
5 values
tags
stringclasses
107 values
trending_score
int64
5
620
trending_rank
int64
1
1k
architecture
stringclasses
37 values
model_type
stringclasses
37 values
num_parameters
float64
268M
65.7B
max_position_embeddings
float64
2.05k
10.2M
hidden_size
float64
768
8.19k
num_attention_heads
float64
12
128
num_hidden_layers
float64
16
92
vocab_size
float64
32k
256k
primary_category
stringclasses
4 values
secondary_categories
stringclasses
28 values
task_types
stringclasses
34 values
language_support
stringclasses
19 values
use_cases
stringclasses
38 values
performance_metrics
stringclasses
2 values
a2ap_compatibility_score
float64
40
96
merge_difficulty
stringclasses
4 values
evolution_potential
float64
0.4
0.96
analysis_timestamp
stringdate
2025-10-29 02:19:34
2025-10-29 02:31:28
readme_summary
stringclasses
92 values
special_features
stringclasses
49 values
arcee-ai/AFM-4.5B-Base
AFM-4.5B-Base
arcee-ai
2025-07-29T15:01:10+00:00
10,468
30
transformers
['transformers', 'safetensors', 'arcee', 'text-generation', 'en', 'es', 'fr', 'de', 'it', 'pt', 'ru', 'ar', 'hi', 'ko', 'zh', 'arxiv:2410.06511', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
10
901
ArceeForCausalLM
arcee
3,158,845,440
65,536
2,560
20
36
128,004
Language Model
['RLHF']
['text-generation', 'code-generation', 'reasoning']
['English', 'Chinese', 'Korean', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Production', 'Education']
{}
65
Medium
0.65
2025-10-29T02:30:22.047017
license: apache-2.0 language: - en - es - fr - de - it - pt - ru - ar - hi - ko - zh library_name: transformers extra_gated_prompt: Company name is optional, please put NA if you would prefer not to s...
[]
cerebras/GLM-4.6-REAP-218B-A32B
GLM-4.6-REAP-218B-A32B
cerebras
2025-10-23T23:44:22+00:00
228
10
transformers
['transformers', 'safetensors', 'glm4_moe', 'text-generation', 'glm', 'MOE', 'pruning', 'compression', 'conversational', 'en', 'arxiv:2510.13999', 'base_model:zai-org/GLM-4.6-FP8', 'base_model:finetune:zai-org/GLM-4.6-FP8', 'license:mit', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
10
902
Glm4MoeForCausalLM
glm4_moe
29,716,643,840
202,752
5,120
96
92
151,552
Language Model
['Specialized']
['text-generation', 'question-answering', 'code-generation', 'reasoning']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Production', 'Creative Writing']
{}
60
Hard
0.6
2025-10-29T02:30:22.545889
language: - en library_name: transformers tags: - glm - MOE - pruning - compression license: mit name: cerebras/GLM-4.6-REAP-218B-A32B-FP8 description: > This model was obtained by uniformly pruning 4...
['Function Calling', 'Fast Inference', 'Memory Efficient', 'Multi-turn']
utter-project/EuroLLM-1.7B
EuroLLM-1.7B
utter-project
2024-08-06T09:33:16+00:00
13,168
89
transformers
['transformers', 'pytorch', 'llama', 'text-generation', 'en', 'de', 'es', 'fr', 'it', 'pt', 'pl', 'nl', 'tr', 'sv', 'cs', 'el', 'hu', 'ro', 'fi', 'uk', 'sl', 'sk', 'da', 'lt', 'lv', 'et', 'bg', 'no', 'ca', 'hr', 'ga', 'mt', 'gl', 'zh', 'ru', 'ko', 'ja', 'ar', 'hi', 'arxiv:2409.16235', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
9
903
LlamaForCausalLM
llama
1,470,103,552
4,096
2,048
16
24
128,000
Language Model
[]
['question-answering', 'translation', 'summarization', 'code-generation']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['General Purpose']
{}
83
Medium
0.83
2025-10-29T02:30:22.769931
license: apache-2.0 language: - en - de - es - fr - it - pt - pl - nl - tr - sv - cs - el - hu - ro - fi - uk - sl - sk - da - lt - lv - et - bg - 'no' - ca - hr - ga - mt - gl - zh - ru - ko - ja - a...
['RAG Support', 'Fast Inference', 'Safety Aligned']
Qwen/Qwen3-4B-Thinking-2507
Qwen3-4B-Thinking-2507
Qwen
2025-08-05T11:02:27+00:00
299,052
437
transformers
['transformers', 'safetensors', 'qwen3', 'text-generation', 'conversational', 'arxiv:2505.09388', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
9
904
Qwen3ForCausalLM
qwen3
3,220,111,360
262,144
2,560
32
36
151,936
Language Model
[]
['text-generation', 'question-answering', 'code-generation', 'reasoning', 'conversation']
['English', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production', 'Creative Writing']
{}
68
Medium
0.68
2025-10-29T02:30:23.012123
library_name: transformers license: apache-2.0 license_link: https://huggingface.co/Qwen/Qwen3-4B-Thinking-2507/blob/main/LICENSE pipeline_tag: text-generation <a href="https://chat.qwen.ai/" target="...
['Multi-turn', 'Safety Aligned']
google/gemma-3-270m
gemma-3-270m
google
2025-08-05T18:50:31+00:00
130,610
889
transformers
['transformers', 'safetensors', 'gemma3_text', 'text-generation', 'gemma3', 'gemma', 'google', 'arxiv:2503.19786', 'arxiv:1905.07830', 'arxiv:1905.10044', 'arxiv:1911.11641', 'arxiv:1705.03551', 'arxiv:1911.01547', 'arxiv:1907.10641', 'arxiv:2311.07911', 'arxiv:2311.12022', 'arxiv:2411.04368', 'arxiv:1904.09728', 'arxiv:1903.00161', 'arxiv:2009.03300', 'arxiv:2304.06364', 'arxiv:2103.03874', 'arxiv:2110.14168', 'arxiv:2108.07732', 'arxiv:2107.03374', 'arxiv:2403.07974', 'arxiv:2305.03111', 'arxiv:2405.04520', 'arxiv:2210.03057', 'arxiv:2106.03193', 'arxiv:1910.11856', 'arxiv:2502.12404', 'arxiv:2502.21228', 'arxiv:2404.16816', 'arxiv:2104.12756', 'arxiv:2311.16502', 'arxiv:2203.10244', 'arxiv:2404.12390', 'arxiv:1810.12440', 'arxiv:1908.02660', 'arxiv:2310.02255', 'arxiv:2312.11805', 'license:gemma', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
9
905
Unknown
unknown
null
null
null
null
null
null
General Language Model
[]
['text-generation']
['English']
['General Purpose']
{}
40
Critical
0.4
2025-10-29T02:30:24.312118
No README available
[]
mistralai/Mistral-7B-Instruct-v0.2
Mistral-7B-Instruct-v0.2
mistralai
2023-12-11T13:18:44+00:00
2,592,561
2,994
transformers
['transformers', 'pytorch', 'safetensors', 'mistral', 'text-generation', 'finetuned', 'mistral-common', 'conversational', 'arxiv:2310.06825', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'region:us']
8
906
MistralForCausalLM
mistral
6,573,522,944
32,768
4,096
32
32
32,000
Language Model
['Fine-tuned']
['text-generation', 'conversation']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Production', 'Education']
{}
78
Medium
0.78
2025-10-29T02:30:24.546203
library_name: transformers license: apache-2.0 tags: - finetuned - mistral-common new_version: mistralai/Mistral-7B-Instruct-v0.3 inference: false widget: - messages: - role: user content: What is you...
['RAG Support', 'Long Context']
microsoft/Phi-4-multimodal-instruct
Phi-4-multimodal-instruct
microsoft
2025-02-24T22:33:32+00:00
541,437
1,519
transformers
['transformers', 'safetensors', 'phi4mm', 'text-generation', 'nlp', 'code', 'audio', 'automatic-speech-recognition', 'speech-summarization', 'speech-translation', 'visual-question-answering', 'phi-4-multimodal', 'phi', 'phi-4-mini', 'custom_code', 'multilingual', 'ar', 'zh', 'cs', 'da', 'nl', 'en', 'fi', 'fr', 'de', 'he', 'hu', 'it', 'ja', 'ko', 'no', 'pl', 'pt', 'ru', 'es', 'sv', 'th', 'tr', 'uk', 'arxiv:2503.01743', 'arxiv:2407.13833', 'license:mit', 'autotrain_compatible', 'region:us']
8
907
Phi4MMForCausalLM
phi4mm
4,238,475,264
131,072
3,072
24
32
200,064
Language Model
['Fine-tuned', 'RLHF', 'LoRA', 'Specialized']
['text-generation', 'question-answering', 'text-classification', 'translation', 'summarization', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production', 'Education', 'Business', 'Healthcare', 'Legal']
{}
91
Easy
0.91
2025-10-29T02:30:24.744552
license: mit license_link: https://huggingface.co/microsoft/Phi-4-multimodal-instruct/resolve/main/LICENSE language: - multilingual - ar - zh - cs - da - nl - en - fi - fr - de - he - hu - it - ja - k...
['Function Calling', 'RAG Support', 'Long Context', 'Fast Inference', 'Multi-turn', 'Safety Aligned']
Qwen/Qwen3-1.7B
Qwen3-1.7B
Qwen
2025-04-27T03:41:05+00:00
1,217,922
300
transformers
['transformers', 'safetensors', 'qwen3', 'text-generation', 'conversational', 'arxiv:2505.09388', 'base_model:Qwen/Qwen3-1.7B-Base', 'base_model:finetune:Qwen/Qwen3-1.7B-Base', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
8
908
Qwen3ForCausalLM
qwen3
1,720,451,072
40,960
2,048
16
28
151,936
Language Model
[]
['text-generation', 'question-answering', 'translation', 'code-generation', 'reasoning', 'conversation']
['English', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Production', 'Creative Writing']
{}
78
Medium
0.78
2025-10-29T02:30:24.948566
library_name: transformers license: apache-2.0 license_link: https://huggingface.co/Qwen/Qwen3-1.7B/blob/main/LICENSE pipeline_tag: text-generation base_model: - Qwen/Qwen3-1.7B-Base <a href="https://...
['Fast Inference', 'Multi-turn', 'Safety Aligned']
MiniMaxAI/MiniMax-M1-80k
MiniMax-M1-80k
MiniMaxAI
2025-06-13T08:21:14+00:00
214
682
transformers
['transformers', 'safetensors', 'minimax_m1', 'text-generation', 'vllm', 'conversational', 'custom_code', 'arxiv:2506.13585', 'license:apache-2.0', 'autotrain_compatible', 'region:us']
8
909
MiniMaxM1ForCausalLM
minimax_m1
37,467,979,776
10,240,000
6,144
64
80
200,064
Language Model
['RLHF', 'Merged']
['text-generation', 'question-answering', 'translation', 'summarization', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Production', 'Education', 'Creative Writing', 'Business']
{}
66
Medium
0.66
2025-10-29T02:30:25.189902
pipeline_tag: text-generation license: apache-2.0 library_name: transformers tags: - vllm <div align="center"> <svg width="60%" height="auto" viewBox="0 0 144 48" fill="none" xmlns="http://www.w3.org/...
['Function Calling', 'RAG Support', 'Long Context', 'Fast Inference', 'Memory Efficient', 'Multi-turn', 'Safety Aligned']
Qwen/Qwen3-30B-A3B-Instruct-2507
Qwen3-30B-A3B-Instruct-2507
Qwen
2025-07-28T07:31:27+00:00
1,398,323
634
transformers
['transformers', 'safetensors', 'qwen3_moe', 'text-generation', 'conversational', 'arxiv:2402.17463', 'arxiv:2407.02490', 'arxiv:2501.15383', 'arxiv:2404.06654', 'arxiv:2505.09388', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
8
910
Qwen3MoeForCausalLM
qwen3_moe
2,727,084,032
262,144
2,048
32
48
151,936
Language Model
['Specialized']
['text-generation', 'question-answering', 'code-generation', 'reasoning', 'conversation']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Production', 'Creative Writing']
{}
96
Easy
0.96
2025-10-29T02:30:25.439591
library_name: transformers license: apache-2.0 license_link: https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507/blob/main/LICENSE pipeline_tag: text-generation <a href="https://chat.qwen.ai/?mode...
['Function Calling', 'RAG Support', 'Long Context', 'Fast Inference', 'Safety Aligned']
unsloth/Qwen3-Coder-30B-A3B-Instruct-GGUF
Qwen3-Coder-30B-A3B-Instruct-GGUF
unsloth
2025-07-31T10:27:38+00:00
126,817
298
transformers
['transformers', 'gguf', 'unsloth', 'qwen3', 'qwen', 'text-generation', 'arxiv:2505.09388', 'base_model:Qwen/Qwen3-Coder-30B-A3B-Instruct', 'base_model:quantized:Qwen/Qwen3-Coder-30B-A3B-Instruct', 'license:apache-2.0', 'endpoints_compatible', 'region:us', 'imatrix', 'conversational']
8
911
Unknown
unknown
null
null
null
null
null
null
Language Model
[]
['text-generation', 'question-answering', 'code-generation', 'conversation']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research']
{}
48
Hard
0.48
2025-10-29T02:30:28.754629
tags: - unsloth - qwen3 - qwen base_model: - Qwen/Qwen3-Coder-30B-A3B-Instruct library_name: transformers license: apache-2.0 license_link: https://huggingface.co/Qwen/Qwen3-Coder-30B-A3B-Instruct/blo...
['Fast Inference', 'Memory Efficient', 'Multi-turn', 'Safety Aligned']
nvidia/NVIDIA-Nemotron-Nano-12B-v2
NVIDIA-Nemotron-Nano-12B-v2
nvidia
2025-08-21T01:31:50+00:00
83,318
111
transformers
['transformers', 'safetensors', 'nvidia', 'pytorch', 'text-generation', 'conversational', 'en', 'es', 'fr', 'de', 'it', 'ja', 'dataset:nvidia/Nemotron-Post-Training-Dataset-v1', 'dataset:nvidia/Nemotron-Post-Training-Dataset-v2', 'dataset:nvidia/Nemotron-Pretraining-Dataset-sample', 'dataset:nvidia/Nemotron-CC-v2', 'dataset:nvidia/Nemotron-CC-Math-v1', 'dataset:nvidia/Nemotron-Pretraining-SFT-v1', 'arxiv:2504.03624', 'arxiv:2508.14444', 'arxiv:2412.02595', 'base_model:nvidia/NVIDIA-Nemotron-Nano-12B-v2-Base', 'base_model:finetune:nvidia/NVIDIA-Nemotron-Nano-12B-v2-Base', 'license:other', 'endpoints_compatible', 'region:us']
8
912
NemotronHForCausalLM
nemotron_h
20,174,602,240
131,072
5,120
40
62
131,072
Language Model
['Fine-tuned', 'Quantized', 'Merged', 'Specialized']
['text-generation', 'question-answering', 'translation', 'summarization', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production', 'Education', 'Business', 'Legal', 'Finance']
{}
61
Hard
0.61
2025-10-29T02:30:29.034006
license: other license_name: nvidia-open-model-license license_link: >- https://www.nvidia.com/en-us/agreements/enterprise-software/nvidia-open-model-license/ pipeline_tag: text-generation datasets: -...
['RAG Support', 'Long Context', 'Fast Inference', 'Memory Efficient', 'Multi-turn', 'Safety Aligned']
deepseek-ai/DeepSeek-V3.1
DeepSeek-V3.1
deepseek-ai
2025-08-21T02:37:52+00:00
577,824
794
transformers
['transformers', 'safetensors', 'deepseek_v3', 'text-generation', 'conversational', 'custom_code', 'arxiv:2412.19437', 'base_model:deepseek-ai/DeepSeek-V3.1-Base', 'base_model:quantized:deepseek-ai/DeepSeek-V3.1-Base', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'fp8', 'region:us']
8
913
DeepseekV3ForCausalLM
deepseek_v3
38,537,003,008
163,840
7,168
128
61
129,280
Chat/Instruct
[]
['text-generation', 'question-answering', 'conversation']
['English', 'Chinese', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Business']
{}
61
Hard
0.61
2025-10-29T02:30:29.230261
license: mit library_name: transformers base_model: - deepseek-ai/DeepSeek-V3.1-Base <!-- markdownlint-disable first-line-h1 --> <!-- markdownlint-disable html --> <!-- markdownlint-disable no-duplica...
['Function Calling', 'RAG Support', 'Long Context', 'Multi-turn', 'Safety Aligned']
Qwen/Qwen3-Next-80B-A3B-Instruct
Qwen3-Next-80B-A3B-Instruct
Qwen
2025-09-09T15:40:56+00:00
2,973,054
838
transformers
['transformers', 'safetensors', 'qwen3_next', 'text-generation', 'conversational', 'arxiv:2309.00071', 'arxiv:2404.06654', 'arxiv:2505.09388', 'arxiv:2501.15383', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
8
914
Qwen3NextForCausalLM
qwen3_next
2,727,084,032
262,144
2,048
16
48
151,936
Language Model
['Merged']
['text-generation', 'question-answering', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production', 'Creative Writing']
{}
86
Easy
0.86
2025-10-29T02:30:29.755724
library_name: transformers license: apache-2.0 license_link: https://huggingface.co/Qwen/Qwen3-Next-80B-A3B-Instruct/blob/main/LICENSE pipeline_tag: text-generation <a href="https://chat.qwen.ai/" tar...
['Long Context', 'Fast Inference', 'Multi-turn', 'Safety Aligned']
unsloth/GLM-4.6-GGUF
GLM-4.6-GGUF
unsloth
2025-09-30T21:59:12+00:00
100,999
113
transformers
['transformers', 'gguf', 'unsloth', 'text-generation', 'en', 'zh', 'arxiv:2508.06471', 'base_model:zai-org/GLM-4.6', 'base_model:quantized:zai-org/GLM-4.6', 'license:mit', 'endpoints_compatible', 'region:us', 'imatrix', 'conversational']
8
915
Unknown
unknown
null
null
null
null
null
null
Language Model
[]
['text-generation', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Creative Writing']
{}
58
Hard
0.58
2025-10-29T02:30:31.040128
tags: - unsloth base_model: - zai-org/GLM-4.6 language: - en - zh library_name: transformers license: mit pipeline_tag: text-generation > [!NOTE] > Please use latest version of `llama.cpp`. This GGUF...
['Function Calling', 'Long Context']
TinyLlama/TinyLlama-1.1B-Chat-v1.0
TinyLlama-1.1B-Chat-v1.0
TinyLlama
2023-12-30T06:27:30+00:00
4,382,624
1,436
transformers
['transformers', 'safetensors', 'llama', 'text-generation', 'conversational', 'en', 'dataset:cerebras/SlimPajama-627B', 'dataset:bigcode/starcoderdata', 'dataset:HuggingFaceH4/ultrachat_200k', 'dataset:HuggingFaceH4/ultrafeedback_binarized', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
7
916
LlamaForCausalLM
llama
1,172,832,256
2,048
2,048
32
22
32,000
Language Model
['Fine-tuned']
['text-generation', 'conversation']
['English', 'Chinese', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['General Purpose']
{}
86
Easy
0.86
2025-10-29T02:30:31.226866
license: apache-2.0 datasets: - cerebras/SlimPajama-627B - bigcode/starcoderdata - HuggingFaceH4/ultrachat_200k - HuggingFaceH4/ultrafeedback_binarized language: - en widget: - example_title: Fibonacc...
['Long Context', 'Multi-turn', 'Safety Aligned']
meta-llama/Llama-3.3-70B-Instruct
Llama-3.3-70B-Instruct
meta-llama
2024-11-26T16:08:47+00:00
801,225
2,545
transformers
['transformers', 'safetensors', 'llama', 'text-generation', 'facebook', 'meta', 'pytorch', 'llama-3', 'conversational', 'en', 'fr', 'it', 'pt', 'hi', 'es', 'th', 'de', 'arxiv:2204.05149', 'base_model:meta-llama/Llama-3.1-70B', 'base_model:finetune:meta-llama/Llama-3.1-70B', 'license:llama3.3', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
7
917
Unknown
unknown
null
null
null
null
null
null
General Language Model
[]
['text-generation']
['English']
['General Purpose']
{}
40
Critical
0.4
2025-10-29T02:30:32.499518
No README available
[]
AlicanKiraz0/Cybersecurity-BaronLLM_Offensive_Security_LLM_Q6_K_GGUF
Cybersecurity-BaronLLM_Offensive_Security_LLM_Q6_K_GGUF
AlicanKiraz0
2025-01-21T03:41:56+00:00
692
107
transformers
['transformers', 'gguf', 'llama-cpp', 'gguf-my-repo', 'text-generation', 'en', 'base_model:meta-llama/Llama-3.1-8B-Instruct', 'base_model:quantized:meta-llama/Llama-3.1-8B-Instruct', 'license:mit', 'endpoints_compatible', 'region:us', 'conversational']
7
918
Unknown
unknown
null
null
null
null
null
null
General Language Model
[]
['text-generation']
['English']
['General Purpose']
{}
40
Critical
0.4
2025-10-29T02:30:33.822071
No README available
[]
google/gemma-3-1b-it
gemma-3-1b-it
google
2025-03-10T12:09:00+00:00
4,106,973
676
transformers
['transformers', 'safetensors', 'gemma3_text', 'text-generation', 'conversational', 'arxiv:1905.07830', 'arxiv:1905.10044', 'arxiv:1911.11641', 'arxiv:1904.09728', 'arxiv:1705.03551', 'arxiv:1911.01547', 'arxiv:1907.10641', 'arxiv:1903.00161', 'arxiv:2009.03300', 'arxiv:2304.06364', 'arxiv:2103.03874', 'arxiv:2110.14168', 'arxiv:2311.12022', 'arxiv:2108.07732', 'arxiv:2107.03374', 'arxiv:2210.03057', 'arxiv:2106.03193', 'arxiv:1910.11856', 'arxiv:2502.12404', 'arxiv:2502.21228', 'arxiv:2404.16816', 'arxiv:2104.12756', 'arxiv:2311.16502', 'arxiv:2203.10244', 'arxiv:2404.12390', 'arxiv:1810.12440', 'arxiv:1908.02660', 'arxiv:2312.11805', 'base_model:google/gemma-3-1b-pt', 'base_model:finetune:google/gemma-3-1b-pt', 'license:gemma', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
7
919
Unknown
unknown
null
null
null
null
null
null
General Language Model
[]
['text-generation']
['English']
['General Purpose']
{}
40
Critical
0.4
2025-10-29T02:30:35.139486
No README available
[]
nvidia/Nemotron-H-4B-Instruct-128K
Nemotron-H-4B-Instruct-128K
nvidia
2025-04-15T19:24:02+00:00
1,154
7
transformers
['transformers', 'safetensors', 'nvidia', 'pytorch', 'text-generation', 'conversational', 'en', 'arxiv:2504.11409', 'base_model:nvidia/Nemotron-H-4B-Base-8K', 'base_model:finetune:nvidia/Nemotron-H-4B-Base-8K', 'license:other', 'endpoints_compatible', 'region:us']
7
920
NemotronHForCausalLM
nemotron_h
6,291,456,000
131,072
3,072
32
52
131,072
Language Model
['Merged']
['text-generation', 'question-answering', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Production', 'Business']
{'accuracy': 0.0}
86
Easy
0.86
2025-10-29T02:30:35.366830
library_name: transformers license_name: nvidia-internal-scientific-research-and-development-model-license license_link: >- https://www.nvidia.com/en-us/agreements/enterprise-software/nvidia-internal-...
['RAG Support', 'Long Context', 'Fast Inference', 'Memory Efficient', 'Multi-turn', 'Safety Aligned']
zai-org/GLM-4.5
GLM-4.5
zai-org
2025-07-20T03:25:36+00:00
21,056
1,367
transformers
['transformers', 'safetensors', 'glm4_moe', 'text-generation', 'conversational', 'en', 'zh', 'arxiv:2508.06471', 'license:mit', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
7
921
Glm4MoeForCausalLM
glm4_moe
29,716,643,840
131,072
5,120
96
92
151,552
Language Model
['LoRA']
['text-generation', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Business']
{}
58
Hard
0.58
2025-10-29T02:30:37.982999
language: - en - zh library_name: transformers license: mit pipeline_tag: text-generation <div align="center"> <img src=https://raw.githubusercontent.com/zai-org/GLM-4.5/refs/heads/main/resources/logo...
['Long Context', 'Fast Inference']
Qwen/Qwen3-30B-A3B-Thinking-2507
Qwen3-30B-A3B-Thinking-2507
Qwen
2025-07-29T11:05:11+00:00
107,349
307
transformers
['transformers', 'safetensors', 'qwen3_moe', 'text-generation', 'conversational', 'arxiv:2402.17463', 'arxiv:2407.02490', 'arxiv:2501.15383', 'arxiv:2404.06654', 'arxiv:2505.09388', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
7
922
Qwen3MoeForCausalLM
qwen3_moe
2,727,084,032
262,144
2,048
32
48
151,936
Language Model
['Specialized']
['text-generation', 'question-answering', 'code-generation', 'reasoning', 'conversation']
['English', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production', 'Creative Writing']
{}
86
Easy
0.86
2025-10-29T02:30:38.254473
library_name: transformers license: apache-2.0 license_link: https://huggingface.co/Qwen/Qwen3-30B-A3B-Thinking-2507/blob/main/LICENSE pipeline_tag: text-generation <a href="https://chat.qwen.ai/" tar...
['RAG Support', 'Long Context', 'Fast Inference', 'Multi-turn', 'Safety Aligned']
nvidia/NVIDIA-Nemotron-Nano-9B-v2
NVIDIA-Nemotron-Nano-9B-v2
nvidia
2025-08-12T22:43:32+00:00
235,449
415
transformers
['transformers', 'safetensors', 'nvidia', 'pytorch', 'text-generation', 'conversational', 'en', 'es', 'fr', 'de', 'it', 'ja', 'dataset:nvidia/Nemotron-Post-Training-Dataset-v1', 'dataset:nvidia/Nemotron-Post-Training-Dataset-v2', 'dataset:nvidia/Nemotron-Pretraining-Dataset-sample', 'dataset:nvidia/Nemotron-CC-v2', 'dataset:nvidia/Nemotron-CC-Math-v1', 'dataset:nvidia/Nemotron-Pretraining-SFT-v1', 'arxiv:2504.03624', 'arxiv:2508.14444', 'arxiv:2412.02595', 'base_model:nvidia/NVIDIA-Nemotron-Nano-12B-v2', 'base_model:finetune:nvidia/NVIDIA-Nemotron-Nano-12B-v2', 'license:other', 'endpoints_compatible', 'region:us']
7
923
NemotronHForCausalLM
nemotron_h
14,074,511,360
131,072
4,480
40
56
131,072
Language Model
['Quantized', 'Merged', 'Specialized']
['text-generation', 'question-answering', 'translation', 'summarization', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production', 'Education', 'Business', 'Legal', 'Finance']
{}
71
Medium
0.71
2025-10-29T02:30:38.598387
license: other license_name: nvidia-open-model-license license_link: >- https://www.nvidia.com/en-us/agreements/enterprise-software/nvidia-open-model-license/ pipeline_tag: text-generation datasets: -...
['RAG Support', 'Long Context', 'Fast Inference', 'Memory Efficient', 'Multi-turn', 'Safety Aligned']
moondream/moondream3-preview
moondream3-preview
moondream
2025-09-11T19:48:53+00:00
27,812
459
transformers
['transformers', 'safetensors', 'moondream3', 'text-generation', 'image-text-to-text', 'custom_code', 'doi:10.57967/hf/6761', 'license:other', 'autotrain_compatible', 'region:us']
7
924
Unknown
unknown
null
null
null
null
null
null
General Language Model
[]
['text-generation']
['English']
['General Purpose']
{}
40
Critical
0.4
2025-10-29T02:30:39.870910
No README available
[]
FractalAIResearch/Fathom-Search-4B
Fathom-Search-4B
FractalAIResearch
2025-09-24T11:14:33+00:00
2,034
121
transformers
['transformers', 'safetensors', 'qwen3', 'text-generation', 'conversational', 'arxiv:2509.24107', 'base_model:Qwen/Qwen3-4B', 'base_model:finetune:Qwen/Qwen3-4B', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
7
925
Qwen3ForCausalLM
qwen3
3,220,111,360
40,960
2,560
32
36
151,936
Language Model
['RLHF', 'LoRA', 'Specialized']
['text-generation', 'question-answering', 'summarization', 'conversation']
['English', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Education']
{}
88
Easy
0.88
2025-10-29T02:30:40.148453
base_model: - Qwen/Qwen3-4B library_name: transformers license: mit pipeline_tag: text-generation This model was presented in the paper [Fathom-DeepResearch: Unlocking Long Horizon Information Retriev...
['Function Calling', 'RAG Support', 'Long Context', 'Fast Inference', 'Memory Efficient', 'Multi-turn']
zai-org/GLM-4.6-FP8
GLM-4.6-FP8
zai-org
2025-09-29T07:52:20+00:00
557,146
73
transformers
['transformers', 'safetensors', 'glm4_moe', 'text-generation', 'conversational', 'en', 'zh', 'arxiv:2508.06471', 'license:mit', 'autotrain_compatible', 'endpoints_compatible', 'compressed-tensors', 'region:us']
7
926
Glm4MoeForCausalLM
glm4_moe
29,716,643,840
202,752
5,120
96
92
151,552
Language Model
[]
['text-generation', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Arabic']
['Creative Writing']
{}
63
Hard
0.63
2025-10-29T02:30:40.409282
language: - en - zh library_name: transformers license: mit pipeline_tag: text-generation <div align="center"> <img src=https://raw.githubusercontent.com/zai-org/GLM-4.5/refs/heads/main/resources/logo...
['Function Calling', 'Long Context']
cerebras/Qwen3-Coder-REAP-246B-A35B-FP8
Qwen3-Coder-REAP-246B-A35B-FP8
cerebras
2025-10-14T06:57:34+00:00
335
15
transformers
['transformers', 'safetensors', 'qwen3_moe', 'text-generation', 'qwen-coder', 'MOE', 'pruning', 'compression', 'conversational', 'en', 'arxiv:2510.13999', 'base_model:Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8', 'base_model:quantized:Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'fp8', 'region:us']
7
927
Qwen3MoeForCausalLM
qwen3_moe
29,018,554,368
262,144
6,144
96
62
151,936
Language Model
['Specialized']
['text-generation', 'question-answering', 'code-generation', 'reasoning']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Production', 'Creative Writing']
{}
60
Hard
0.6
2025-10-29T02:30:40.614883
language: - en library_name: transformers tags: - qwen-coder - MOE - pruning - compression license: apache-2.0 name: cerebras/Qwen3-Coder-REAP-246B-A35B-FP8 description: > This model was obtained by u...
['Function Calling', 'Fast Inference', 'Memory Efficient', 'Multi-turn']
cerebras/GLM-4.6-REAP-268B-A32B
GLM-4.6-REAP-268B-A32B
cerebras
2025-10-24T02:49:48+00:00
57
7
transformers
['transformers', 'safetensors', 'glm4_moe', 'text-generation', 'glm', 'MOE', 'pruning', 'compression', 'conversational', 'en', 'arxiv:2510.13999', 'base_model:zai-org/GLM-4.6', 'base_model:finetune:zai-org/GLM-4.6', 'license:mit', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
7
928
Glm4MoeForCausalLM
glm4_moe
29,716,643,840
202,752
5,120
96
92
151,552
Language Model
['Specialized']
['text-generation', 'question-answering', 'code-generation', 'reasoning']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Production', 'Creative Writing']
{}
60
Hard
0.6
2025-10-29T02:30:40.909190
language: - en library_name: transformers tags: - glm - MOE - pruning - compression license: mit name: cerebras/GLM-4.6-REAP-268B-A32B description: > This model was obtained by uniformly pruning 25% o...
['Function Calling', 'Fast Inference', 'Memory Efficient', 'Multi-turn']
google/gemma-2b-it
gemma-2b-it
google
2024-02-08T13:23:59+00:00
68,294
812
transformers
['transformers', 'safetensors', 'gguf', 'gemma', 'text-generation', 'conversational', 'arxiv:2312.11805', 'arxiv:2009.03300', 'arxiv:1905.07830', 'arxiv:1911.11641', 'arxiv:1904.09728', 'arxiv:1905.10044', 'arxiv:1907.10641', 'arxiv:1811.00937', 'arxiv:1809.02789', 'arxiv:1911.01547', 'arxiv:1705.03551', 'arxiv:2107.03374', 'arxiv:2108.07732', 'arxiv:2110.14168', 'arxiv:2304.06364', 'arxiv:2206.04615', 'arxiv:1804.06876', 'arxiv:2110.08193', 'arxiv:2009.11462', 'arxiv:2101.11718', 'arxiv:1804.09301', 'arxiv:2109.07958', 'arxiv:2203.09509', 'license:gemma', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
6
929
Unknown
unknown
null
null
null
null
null
null
General Language Model
[]
['text-generation']
['English']
['General Purpose']
{}
40
Critical
0.4
2025-10-29T02:30:42.181680
No README available
[]
meta-llama/Meta-Llama-3-8B
Meta-Llama-3-8B
meta-llama
2024-04-17T09:35:16+00:00
1,578,691
6,353
transformers
['transformers', 'safetensors', 'llama', 'text-generation', 'facebook', 'meta', 'pytorch', 'llama-3', 'en', 'license:llama3', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
6
930
Unknown
unknown
null
null
null
null
null
null
General Language Model
[]
['text-generation']
['English']
['General Purpose']
{}
40
Critical
0.4
2025-10-29T02:30:43.449310
No README available
[]
deepseek-ai/DeepSeek-Coder-V2-Lite-Instruct
DeepSeek-Coder-V2-Lite-Instruct
deepseek-ai
2024-06-14T06:23:33+00:00
278,216
493
transformers
['transformers', 'safetensors', 'deepseek_v2', 'text-generation', 'conversational', 'custom_code', 'arxiv:2401.06066', 'license:other', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
6
931
DeepseekV2ForCausalLM
deepseek_v2
1,568,669,696
163,840
2,048
16
27
102,400
Language Model
['Merged']
['text-generation', 'question-answering', 'code-generation', 'reasoning', 'conversation']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Business']
{}
78
Medium
0.78
2025-10-29T02:30:45.808983
license: other license_name: deepseek-license license_link: LICENSE <!-- markdownlint-disable first-line-h1 --> <!-- markdownlint-disable html --> <!-- markdownlint-disable no-duplicate-header --> <di...
['Long Context']
microsoft/Phi-4-mini-instruct
Phi-4-mini-instruct
microsoft
2025-02-19T01:00:58+00:00
230,826
618
transformers
['transformers', 'safetensors', 'phi3', 'text-generation', 'nlp', 'code', 'conversational', 'custom_code', 'multilingual', 'ar', 'zh', 'cs', 'da', 'nl', 'en', 'fi', 'fr', 'de', 'he', 'hu', 'it', 'ja', 'ko', 'no', 'pl', 'pt', 'ru', 'es', 'sv', 'th', 'tr', 'uk', 'arxiv:2503.01743', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
6
932
Phi3ForCausalLM
phi3
4,238,475,264
131,072
3,072
24
32
200,064
Language Model
['Specialized']
['text-generation', 'question-answering', 'summarization', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production', 'Education', 'Business', 'Healthcare', 'Legal']
{}
86
Easy
0.86
2025-10-29T02:30:46.165385
language: - multilingual - ar - zh - cs - da - nl - en - fi - fr - de - he - hu - it - ja - ko - 'no' - pl - pt - ru - es - sv - th - tr - uk library_name: transformers license: mit license_link: http...
['Function Calling', 'RAG Support', 'Long Context', 'Multi-turn', 'Safety Aligned']
fdtn-ai/Foundation-Sec-8B
Foundation-Sec-8B
fdtn-ai
2025-04-26T17:20:37+00:00
9,175
259
transformers
['transformers', 'safetensors', 'llama', 'text-generation', 'security', 'en', 'arxiv:2504.21039', 'base_model:meta-llama/Llama-3.1-8B', 'base_model:finetune:meta-llama/Llama-3.1-8B', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
6
933
LlamaForCausalLM
llama
6,968,311,808
131,072
4,096
32
32
128,384
Language Model
['Specialized']
['text-generation', 'question-answering', 'text-classification', 'summarization', 'reasoning']
['English', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Production', 'Healthcare', 'Legal']
{}
78
Medium
0.78
2025-10-29T02:30:46.421312
base_model: - meta-llama/Llama-3.1-8B language: - en library_name: transformers license: apache-2.0 pipeline_tag: text-generation tags: - security Foundation-Sec-8B (Llama-3.1-FoundationAI-SecurityLLM...
['RAG Support', 'Fast Inference', 'Memory Efficient', 'Safety Aligned']
Qwen/Qwen3-4B
Qwen3-4B
Qwen
2025-04-27T03:41:29+00:00
1,310,379
425
transformers
['transformers', 'safetensors', 'qwen3', 'text-generation', 'conversational', 'arxiv:2309.00071', 'arxiv:2505.09388', 'base_model:Qwen/Qwen3-4B-Base', 'base_model:finetune:Qwen/Qwen3-4B-Base', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
6
934
Qwen3ForCausalLM
qwen3
3,220,111,360
40,960
2,560
32
36
151,936
Language Model
[]
['text-generation', 'question-answering', 'translation', 'code-generation', 'reasoning', 'conversation']
['English', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Production', 'Creative Writing']
{}
81
Medium
0.81
2025-10-29T02:30:46.688150
library_name: transformers license: apache-2.0 license_link: https://huggingface.co/Qwen/Qwen3-4B/blob/main/LICENSE pipeline_tag: text-generation base_model: - Qwen/Qwen3-4B-Base <a href="https://chat...
['RAG Support', 'Long Context', 'Fast Inference', 'Multi-turn', 'Safety Aligned']
Qwen/Qwen3-30B-A3B
Qwen3-30B-A3B
Qwen
2025-04-27T03:43:05+00:00
446,084
805
transformers
['transformers', 'safetensors', 'qwen3_moe', 'text-generation', 'conversational', 'arxiv:2309.00071', 'arxiv:2505.09388', 'base_model:Qwen/Qwen3-30B-A3B-Base', 'base_model:finetune:Qwen/Qwen3-30B-A3B-Base', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
6
935
Qwen3MoeForCausalLM
qwen3_moe
2,727,084,032
40,960
2,048
32
48
151,936
Language Model
[]
['text-generation', 'question-answering', 'translation', 'code-generation', 'reasoning', 'conversation']
['English', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Production', 'Creative Writing']
{}
86
Easy
0.86
2025-10-29T02:30:47.052263
library_name: transformers license: apache-2.0 license_link: https://huggingface.co/Qwen/Qwen3-30B-A3B/blob/main/LICENSE pipeline_tag: text-generation base_model: - Qwen/Qwen3-30B-A3B-Base <a href="ht...
['RAG Support', 'Long Context', 'Fast Inference', 'Multi-turn', 'Safety Aligned']
Goekdeniz-Guelmez/Josiefied-Qwen3-8B-abliterated-v1
Josiefied-Qwen3-8B-abliterated-v1
Goekdeniz-Guelmez
2025-04-29T19:15:21+00:00
1,065
165
transformers
['transformers', 'safetensors', 'qwen3', 'text-generation', 'chat', 'conversational', 'base_model:Qwen/Qwen3-8B', 'base_model:finetune:Qwen/Qwen3-8B', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
6
936
Qwen3ForCausalLM
qwen3
7,870,087,168
40,960
4,096
32
36
151,936
Language Model
['Fine-tuned']
['text-generation', 'question-answering', 'code-generation', 'conversation']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['General Purpose']
{}
81
Medium
0.81
2025-10-29T02:30:47.309241
tags: - chat base_model: Qwen/Qwen3-8B pipeline_tag: text-generation library_name: transformers ![Logo/JPG](josiefied.jpeg) The **JOSIEFIED** model family represents a series of highly advanced langua...
['Long Context', 'Fast Inference', 'Memory Efficient', 'Multi-turn', 'Safety Aligned']
huihui-ai/Huihui-Qwen3-4B-Instruct-2507-abliterated
Huihui-Qwen3-4B-Instruct-2507-abliterated
huihui-ai
2025-08-07T12:05:04+00:00
1,254
31
transformers
['transformers', 'safetensors', 'qwen3', 'text-generation', 'abliterated', 'uncensored', 'conversational', 'base_model:Qwen/Qwen3-4B-Instruct-2507', 'base_model:finetune:Qwen/Qwen3-4B-Instruct-2507', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
6
937
Qwen3ForCausalLM
qwen3
3,220,111,360
262,144
2,560
32
36
151,936
Language Model
['Quantized', 'Specialized']
['text-generation', 'conversation']
['English', 'Korean', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Production', 'Creative Writing', 'Business', 'Legal']
{}
73
Medium
0.73
2025-10-29T02:30:47.570025
license: apache-2.0 license_link: https://huggingface.co/Qwen/Qwen3-4B-Instruct-2507/blob/main/LICENSE base_model: - Qwen/Qwen3-4B-Instruct-2507 pipeline_tag: text-generation library_name: transformer...
['RAG Support', 'Fast Inference', 'Multi-turn', 'Safety Aligned']
meituan-longcat/LongCat-Flash-Chat
LongCat-Flash-Chat
meituan-longcat
2025-08-29T07:39:50+00:00
22,527
496
LongCat-Flash-Chat
['LongCat-Flash-Chat', 'safetensors', 'text-generation', 'transformers', 'conversational', 'custom_code', 'arxiv:2509.01322', 'license:mit', 'region:us']
6
938
LongcatFlashForCausalLM
unknown
null
131,072
6,144
64
null
131,072
Language Model
['Fine-tuned', 'Merged', 'Specialized']
['text-generation', 'question-answering', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Production', 'Legal']
{}
81
Medium
0.81
2025-10-29T02:30:47.862574
license: mit library_name: LongCat-Flash-Chat pipeline_tag: text-generation tags: - transformers <div align="center"> <img src="https://raw.githubusercontent.com/meituan-longcat/LongCat-Flash-Chat/mai...
['Function Calling', 'RAG Support', 'Long Context', 'Fast Inference', 'Memory Efficient', 'Multi-turn', 'Safety Aligned']
nineninesix/kani-tts-370m
kani-tts-370m
nineninesix
2025-09-30T07:31:22+00:00
8,129
127
transformers
['transformers', 'safetensors', 'lfm2', 'text-generation', 'text-to-speech', 'en', 'de', 'ar', 'zh', 'es', 'ko', 'arxiv:2505.20506', 'base_model:nineninesix/kani-tts-450m-0.2-pt', 'base_model:finetune:nineninesix/kani-tts-450m-0.2-pt', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
6
939
Lfm2ForCausalLM
lfm2
283,798,528
128,000
1,024
16
16
80,539
Language Model
['Fine-tuned', 'Specialized']
['conversation']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production', 'Education', 'Legal']
{}
75
Medium
0.75
2025-10-29T02:30:48.115747
license: apache-2.0 language: - en - de - ar - zh - es - ko pipeline_tag: text-to-speech library_name: transformers base_model: - nineninesix/kani-tts-450m-0.2-pt <p> <img src="https://www.nineninesix...
['Fast Inference', 'Memory Efficient', 'Multi-turn']
KORMo-Team/KORMo-10B-sft
KORMo-10B-sft
KORMo-Team
2025-10-12T12:13:51+00:00
2,098
101
transformers
['transformers', 'safetensors', 'kormo', 'text-generation', 'conversational', 'custom_code', 'arxiv:2510.09426', 'license:apache-2.0', 'autotrain_compatible', 'region:us']
6
940
KORMoForCausalLM
kormo
8,565,817,344
131,072
4,096
32
40
125,184
Language Model
['LoRA']
['text-generation', 'question-answering', 'reasoning', 'conversation']
['English', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Education', 'Healthcare']
{}
58
Hard
0.58
2025-10-29T02:30:48.368809
library_name: transformers license: apache-2.0 <!-- <p align="center"> <img src="https://github.com/MLP-Lab/KORMo-tutorial/blob/main/tutorial/attachment/kormo_logo.png?raw=true" style="width: 100%; ma...
['RAG Support', 'Safety Aligned']
Vortex5/Crimson-Twilight-12B
Crimson-Twilight-12B
Vortex5
2025-10-25T18:04:25+00:00
52
6
transformers
['transformers', 'safetensors', 'mistral', 'text-generation', 'mergekit', 'merge', 'roleplay', 'base_model:Vortex5/Abyssal-Seraph-12B', 'base_model:merge:Vortex5/Abyssal-Seraph-12B', 'base_model:Vortex5/Luminous-Shadow-12B', 'base_model:merge:Vortex5/Luminous-Shadow-12B', 'base_model:Vortex5/Lunar-Abyss-12B', 'base_model:merge:Vortex5/Lunar-Abyss-12B', 'base_model:Vortex5/Moonlit-Shadow-12B', 'base_model:merge:Vortex5/Moonlit-Shadow-12B', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
6
941
MistralForCausalLM
mistral
13,254,016,000
131,072
5,120
32
40
131,075
Code Generation
['Merged']
['summarization']
['English', 'Spanish', 'German', 'Arabic']
['Production']
{}
40
Critical
0.4
2025-10-29T02:30:51.076330
base_model: - Vortex5/Abyssal-Seraph-12B - Vortex5/Moonlit-Shadow-12B - Vortex5/Lunar-Abyss-12B - Vortex5/Luminous-Shadow-12B library_name: transformers tags: - mergekit - merge - roleplay <div style=...
[]
mistralai/Mistral-7B-v0.1
Mistral-7B-v0.1
mistralai
2023-09-20T13:03:50+00:00
471,244
3,995
transformers
['transformers', 'pytorch', 'safetensors', 'mistral', 'text-generation', 'pretrained', 'mistral-common', 'en', 'arxiv:2310.06825', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'region:us']
5
942
MistralForCausalLM
mistral
6,573,522,944
32,768
4,096
32
32
32,000
Language Model
[]
['text-generation']
['English', 'Spanish', 'German', 'Arabic']
['General Purpose']
{}
70
Medium
0.7
2025-10-29T02:30:51.429411
library_name: transformers language: - en license: apache-2.0 tags: - pretrained - mistral-common inference: false extra_gated_description: >- If you want to learn more about how we process your perso...
[]
vikhyatk/moondream2
moondream2
vikhyatk
2024-03-04T18:03:06+00:00
1,617,138
1,322
transformers
['transformers', 'safetensors', 'moondream1', 'text-generation', 'image-text-to-text', 'custom_code', 'doi:10.57967/hf/6762', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
5
943
HfMoondream
moondream1
null
null
null
null
null
null
Language Model
['RLHF']
['text-generation', 'question-answering', 'reasoning']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Production', 'Education']
{}
45
Hard
0.45
2025-10-29T02:30:51.852846
license: apache-2.0 pipeline_tag: image-text-to-text new_version: moondream/moondream3-preview ⚠️ This repository contains the latest version of Moondream 2, our previous generation model. The latest ...
['Fast Inference']
microsoft/Phi-3-mini-4k-instruct
Phi-3-mini-4k-instruct
microsoft
2024-04-22T16:18:17+00:00
1,586,120
1,316
transformers
['transformers', 'safetensors', 'phi3', 'text-generation', 'nlp', 'code', 'conversational', 'custom_code', 'en', 'fr', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
5
944
Phi3ForCausalLM
phi3
3,722,379,264
4,096
3,072
32
32
32,064
Language Model
['Fine-tuned', 'Quantized', 'Specialized']
['text-generation', 'question-answering', 'summarization', 'code-generation', 'reasoning', 'conversation']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production', 'Education', 'Business', 'Legal']
{}
81
Medium
0.81
2025-10-29T02:30:52.093479
license: mit license_link: https://huggingface.co/microsoft/Phi-3-mini-4k-instruct/resolve/main/LICENSE language: - en - fr pipeline_tag: text-generation tags: - nlp - code inference: parameters: temp...
['RAG Support', 'Long Context', 'Fast Inference', 'Memory Efficient', 'Multi-turn', 'Safety Aligned']
Sao10K/L3-8B-Stheno-v3.2
L3-8B-Stheno-v3.2
Sao10K
2024-06-05T10:30:57+00:00
2,003
349
transformers
['transformers', 'safetensors', 'llama', 'text-generation', 'conversational', 'en', 'dataset:Gryphe/Opus-WritingPrompts', 'dataset:Sao10K/Claude-3-Opus-Instruct-15K', 'dataset:Sao10K/Short-Storygen-v2', 'dataset:Sao10K/c2-Logs-Filtered', 'license:cc-by-nc-4.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
5
945
LlamaForCausalLM
llama
6,967,787,520
8,192
4,096
32
32
128,256
Language Model
['Merged']
['text-generation']
['English', 'Korean', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Creative Writing']
{}
75
Medium
0.75
2025-10-29T02:30:52.293128
license: cc-by-nc-4.0 language: - en datasets: - Gryphe/Opus-WritingPrompts - Sao10K/Claude-3-Opus-Instruct-15K - Sao10K/Short-Storygen-v2 - Sao10K/c2-Logs-Filtered *Just message me on discord if you ...
['Fast Inference', 'Multi-turn']
Qwen/Qwen2.5-0.5B-Instruct
Qwen2.5-0.5B-Instruct
Qwen
2024-09-16T11:52:46+00:00
1,546,378
382
transformers
['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2407.10671', 'base_model:Qwen/Qwen2.5-0.5B', 'base_model:finetune:Qwen/Qwen2.5-0.5B', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
5
946
Qwen2ForCausalLM
qwen2
367,345,664
32,768
896
14
24
151,936
Language Model
['Specialized']
['text-generation', 'question-answering', 'code-generation', 'conversation']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['General Purpose']
{}
91
Easy
0.91
2025-10-29T02:30:52.476059
license: apache-2.0 license_link: https://huggingface.co/Qwen/Qwen2.5-0.5B-Instruct/blob/main/LICENSE language: - en pipeline_tag: text-generation base_model: Qwen/Qwen2.5-0.5B tags: - chat library_na...
['Long Context', 'Fast Inference', 'Safety Aligned']
meta-llama/Llama-3.2-3B
Llama-3.2-3B
meta-llama
2024-09-18T15:23:48+00:00
272,782
648
transformers
['transformers', 'safetensors', 'llama', 'text-generation', 'facebook', 'meta', 'pytorch', 'llama-3', 'en', 'de', 'fr', 'it', 'pt', 'hi', 'es', 'th', 'arxiv:2204.05149', 'arxiv:2405.16406', 'license:llama3.2', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
5
947
Unknown
unknown
null
null
null
null
null
null
General Language Model
[]
['text-generation']
['English']
['General Purpose']
{}
40
Critical
0.4
2025-10-29T02:30:53.712020
No README available
[]
Steelskull/L3.3-MS-Nevoria-70b
L3.3-MS-Nevoria-70b
Steelskull
2025-01-14T02:01:02+00:00
1,337
138
transformers
['transformers', 'safetensors', 'llama', 'text-generation', 'merge', 'conversational', 'base_model:EVA-UNIT-01/EVA-LLaMA-3.33-70B-v0.1', 'base_model:merge:EVA-UNIT-01/EVA-LLaMA-3.33-70B-v0.1', 'base_model:Sao10K/L3.3-70B-Euryale-v2.3', 'base_model:merge:Sao10K/L3.3-70B-Euryale-v2.3', 'base_model:SicariusSicariiStuff/Negative_LLAMA_70B', 'base_model:merge:SicariusSicariiStuff/Negative_LLAMA_70B', 'base_model:TheDrummer/Anubis-70B-v1', 'base_model:merge:TheDrummer/Anubis-70B-v1', 'base_model:nbeerbower/Llama-3.1-Nemotron-lorablated-70B', 'base_model:merge:nbeerbower/Llama-3.1-Nemotron-lorablated-70B', 'license:other', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
5
948
LlamaForCausalLM
llama
65,475,182,592
131,072
8,192
64
80
128,256
Language Model
['Quantized', 'LoRA', 'Merged']
['question-answering', 'translation', 'summarization', 'code-generation', 'conversation']
['English', 'Korean', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Production', 'Creative Writing', 'Legal']
{}
45
Hard
0.45
2025-10-29T02:30:55.544769
base_model: - Sao10K/L3.3-70B-Euryale-v2.3 - nbeerbower/Llama-3.1-Nemotron-lorablated-70B - EVA-UNIT-01/EVA-LLaMA-3.33-70B-v0.1 - SicariusSicariiStuff/Negative_LLAMA_70B - TheDrummer/Anubis-70B-v1 lib...
['RAG Support', 'Multi-turn']
deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B
DeepSeek-R1-Distill-Qwen-1.5B
deepseek-ai
2025-01-20T09:04:18+00:00
830,469
1,367
transformers
['transformers', 'safetensors', 'qwen2', 'text-generation', 'conversational', 'arxiv:2501.12948', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
5
949
Qwen2ForCausalLM
qwen2
1,026,097,152
131,072
1,536
12
28
151,936
Language Model
['Fine-tuned', 'RLHF', 'Merged']
['text-generation', 'question-answering', 'summarization', 'reasoning', 'conversation']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Education', 'Business']
{}
83
Medium
0.83
2025-10-29T02:30:55.787771
license: mit library_name: transformers <!-- markdownlint-disable first-line-h1 --> <!-- markdownlint-disable html --> <!-- markdownlint-disable no-duplicate-header --> <div align="center"> <img src="...
['RAG Support', 'Long Context']
deepseek-ai/DeepSeek-R1-Distill-Qwen-32B
DeepSeek-R1-Distill-Qwen-32B
deepseek-ai
2025-01-20T09:19:00+00:00
2,413,381
1,462
transformers
['transformers', 'safetensors', 'qwen2', 'text-generation', 'conversational', 'arxiv:2501.12948', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
5
950
Qwen2ForCausalLM
qwen2
20,911,226,880
131,072
5,120
40
64
152,064
Language Model
['Fine-tuned', 'RLHF', 'Merged']
['text-generation', 'question-answering', 'summarization', 'reasoning', 'conversation']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Education', 'Business']
{}
58
Hard
0.58
2025-10-29T02:30:56.021995
license: mit library_name: transformers <!-- markdownlint-disable first-line-h1 --> <!-- markdownlint-disable html --> <!-- markdownlint-disable no-duplicate-header --> <div align="center"> <img src="...
['RAG Support', 'Long Context']
nvidia/Nemotron-H-4B-Base-8K
Nemotron-H-4B-Base-8K
nvidia
2025-03-20T15:20:54+00:00
1,124
5
transformers
['transformers', 'safetensors', 'nvidia', 'pytorch', 'text-generation', 'en', 'arxiv:2504.11409', 'license:other', 'endpoints_compatible', 'region:us']
5
951
NemotronHForCausalLM
nemotron_h
6,291,456,000
8,192
3,072
32
52
131,072
Language Model
['Fine-tuned', 'RLHF', 'LoRA', 'Merged', 'Specialized']
['text-generation', 'question-answering', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production', 'Business', 'Legal', 'Finance']
{}
78
Medium
0.78
2025-10-29T02:30:58.348177
library_name: transformers license_name: nvidia-internal-scientific-research-and-development-model-license license_link: >- https://www.nvidia.com/en-us/agreements/enterprise-software/nvidia-internal-...
['RAG Support', 'Long Context', 'Fast Inference', 'Memory Efficient', 'Multi-turn']
Qwen/Qwen3-14B
Qwen3-14B
Qwen
2025-04-27T03:42:45+00:00
654,239
297
transformers
['transformers', 'safetensors', 'qwen3', 'text-generation', 'conversational', 'arxiv:2309.00071', 'arxiv:2505.09388', 'base_model:Qwen/Qwen3-14B-Base', 'base_model:finetune:Qwen/Qwen3-14B-Base', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
5
952
Qwen3ForCausalLM
qwen3
13,360,824,320
40,960
5,120
40
40
151,936
Language Model
[]
['text-generation', 'question-answering', 'translation', 'code-generation', 'reasoning', 'conversation']
['English', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Production', 'Creative Writing']
{}
71
Medium
0.71
2025-10-29T02:30:58.641118
library_name: transformers license: apache-2.0 license_link: https://huggingface.co/Qwen/Qwen3-14B/blob/main/LICENSE pipeline_tag: text-generation base_model: - Qwen/Qwen3-14B-Base <a href="https://ch...
['RAG Support', 'Long Context', 'Fast Inference', 'Multi-turn', 'Safety Aligned']
Qwen/Qwen3-Reranker-8B
Qwen3-Reranker-8B
Qwen
2025-05-29T13:30:18+00:00
61,672
178
transformers
['transformers', 'safetensors', 'qwen3', 'text-generation', 'text-ranking', 'arxiv:2506.05176', 'base_model:Qwen/Qwen3-8B-Base', 'base_model:finetune:Qwen/Qwen3-8B-Base', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
5
953
Qwen3ForCausalLM
qwen3
7,868,993,536
40,960
4,096
32
36
151,669
Language Model
[]
['text-classification', 'code-generation', 'reasoning']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['General Purpose']
{}
76
Medium
0.76
2025-10-29T02:30:58.900555
license: apache-2.0 base_model: - Qwen/Qwen3-8B-Base library_name: transformers pipeline_tag: text-ranking <p align="center"> <img src="https://qianwen-res.oss-accelerate-overseas.aliyuncs.com/logo_qw...
['Long Context', 'Safety Aligned']
unsloth/DeepSeek-R1-0528-Qwen3-8B-GGUF
DeepSeek-R1-0528-Qwen3-8B-GGUF
unsloth
2025-05-29T14:17:25+00:00
112,160
336
transformers
['transformers', 'gguf', 'qwen3', 'text-generation', 'unsloth', 'deepseek', 'qwen', 'arxiv:2501.12948', 'base_model:deepseek-ai/DeepSeek-R1-0528-Qwen3-8B', 'base_model:quantized:deepseek-ai/DeepSeek-R1-0528-Qwen3-8B', 'license:mit', 'autotrain_compatible', 'endpoints_compatible', 'region:us', 'conversational']
5
954
Qwen3ForCausalLM
qwen3
7,870,087,168
131,072
4,096
32
36
151,936
Language Model
['RLHF', 'Merged']
['text-generation', 'question-answering', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Education', 'Creative Writing', 'Business']
{}
83
Medium
0.83
2025-10-29T02:30:59.138277
tags: - unsloth - deepseek - qwen base_model: - deepseek-ai/DeepSeek-R1-0528-Qwen3-8B license: mit library_name: transformers <div> <p style="margin-bottom: 0; margin-top: 0;"> <strong>Learn how to ru...
['Function Calling', 'RAG Support', 'Long Context']
LiquidAI/LFM2-350M
LFM2-350M
LiquidAI
2025-07-10T12:01:24+00:00
25,083
163
transformers
['transformers', 'safetensors', 'lfm2', 'text-generation', 'liquid', 'edge', 'conversational', 'en', 'ar', 'zh', 'fr', 'de', 'ja', 'ko', 'es', 'license:other', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
5
955
Lfm2ForCausalLM
lfm2
268,435,456
128,000
1,024
16
16
65,536
Language Model
['LoRA']
['text-generation', 'question-answering', 'translation', 'code-generation', 'conversation']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production', 'Creative Writing', 'Healthcare']
{}
88
Easy
0.88
2025-10-29T02:30:59.333630
library_name: transformers license: other license_name: lfm1.0 license_link: LICENSE language: - en - ar - zh - fr - de - ja - ko - es pipeline_tag: text-generation tags: - liquid - lfm2 - edge <cente...
['Function Calling', 'RAG Support', 'Fast Inference', 'Multi-turn', 'Safety Aligned']
google/gemma-3-270m-it
gemma-3-270m-it
google
2025-07-30T18:06:27+00:00
208,147
440
transformers
['transformers', 'safetensors', 'gemma3_text', 'text-generation', 'gemma3', 'gemma', 'google', 'conversational', 'arxiv:2503.19786', 'arxiv:1905.07830', 'arxiv:1905.10044', 'arxiv:1911.11641', 'arxiv:1705.03551', 'arxiv:1911.01547', 'arxiv:1907.10641', 'arxiv:2311.07911', 'arxiv:2311.12022', 'arxiv:2411.04368', 'arxiv:1904.09728', 'arxiv:1903.00161', 'arxiv:2009.03300', 'arxiv:2304.06364', 'arxiv:2103.03874', 'arxiv:2110.14168', 'arxiv:2108.07732', 'arxiv:2107.03374', 'arxiv:2403.07974', 'arxiv:2305.03111', 'arxiv:2405.04520', 'arxiv:2210.03057', 'arxiv:2106.03193', 'arxiv:1910.11856', 'arxiv:2502.12404', 'arxiv:2502.21228', 'arxiv:2404.16816', 'arxiv:2104.12756', 'arxiv:2311.16502', 'arxiv:2203.10244', 'arxiv:2404.12390', 'arxiv:1810.12440', 'arxiv:1908.02660', 'arxiv:2310.02255', 'arxiv:2312.11805', 'base_model:google/gemma-3-270m', 'base_model:finetune:google/gemma-3-270m', 'license:gemma', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
5
956
Unknown
unknown
null
null
null
null
null
null
General Language Model
[]
['text-generation']
['English']
['General Purpose']
{}
40
Critical
0.4
2025-10-29T02:31:00.570483
No README available
[]
dousery/medical-reasoning-gpt-oss-20b
medical-reasoning-gpt-oss-20b
dousery
2025-08-09T01:43:56+00:00
2,219
43
transformers
['transformers', 'safetensors', 'gpt_oss', 'text-generation', 'medical', 'reasoning', 'healthcare', 'fine-tuned', 'gpt-oss', 'clinical', 'diagnosis', 'unsloth', 'conversational', 'en', 'dataset:Freedomintelligence/medical-o1-reasoning-SFT', 'base_model:unsloth/gpt-oss-20b', 'base_model:quantized:unsloth/gpt-oss-20b', 'license:apache-2.0', 'model-index', 'autotrain_compatible', 'endpoints_compatible', '4-bit', 'bitsandbytes', 'region:us']
5
957
GptOssForCausalLM
gpt_oss
2,967,920,640
131,072
2,880
64
24
201,088
Language Model
['Fine-tuned', 'LoRA', 'Specialized']
['text-generation', 'question-answering', 'reasoning']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Education', 'Business', 'Healthcare']
{}
78
Medium
0.78
2025-10-29T02:31:00.928490
language: - en license: apache-2.0 base_model: unsloth/gpt-oss-20b tags: - medical - reasoning - healthcare - fine-tuned - gpt-oss - clinical - diagnosis - unsloth datasets: - Freedomintelligence/medi...
['RAG Support', 'Fast Inference', 'Memory Efficient', 'Safety Aligned']
baichuan-inc/Baichuan-M2-32B
Baichuan-M2-32B
baichuan-inc
2025-08-10T06:40:13+00:00
130,617
103
transformers
['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'zh', 'arxiv:2509.02208', 'base_model:Qwen/Qwen2.5-32B', 'base_model:finetune:Qwen/Qwen2.5-32B', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
5
958
Qwen2ForCausalLM
qwen2
20,911,226,880
131,072
5,120
40
64
152,064
Language Model
['RLHF', 'Quantized', 'Specialized']
['text-generation', 'question-answering', 'reasoning', 'conversation']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Production', 'Education', 'Creative Writing', 'Business', 'Healthcare']
{}
63
Hard
0.63
2025-10-29T02:31:01.280651
license: apache-2.0 tags: - chat library_name: transformers language: - en - zh base_model: - Qwen/Qwen2.5-32B [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensour...
['Long Context', 'Fast Inference']
swiss-ai/Apertus-8B-Instruct-2509
Apertus-8B-Instruct-2509
swiss-ai
2025-08-13T09:30:23+00:00
347,397
381
transformers
['transformers', 'safetensors', 'apertus', 'text-generation', 'multilingual', 'compliant', 'swiss-ai', 'conversational', 'arxiv:2509.14233', 'base_model:swiss-ai/Apertus-8B-2509', 'base_model:finetune:swiss-ai/Apertus-8B-2509', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
5
959
ApertusForCausalLM
apertus
6,979,321,856
65,536
4,096
32
32
131,072
Language Model
['Specialized']
['text-generation', 'question-answering', 'summarization', 'reasoning', 'conversation']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Production', 'Legal']
{}
86
Easy
0.86
2025-10-29T02:31:01.523760
license: apache-2.0 base_model: - swiss-ai/Apertus-8B-2509 pipeline_tag: text-generation library_name: transformers tags: - multilingual - compliant - swiss-ai - apertus extra_gated_prompt: "### Apert...
['Function Calling', 'RAG Support', 'Long Context', 'Safety Aligned']
apple/FastVLM-0.5B
FastVLM-0.5B
apple
2025-08-25T17:04:42+00:00
13,612
338
ml-fastvlm
['ml-fastvlm', 'safetensors', 'llava_qwen2', 'text-generation', 'transformers', 'conversational', 'custom_code', 'arxiv:2412.13303', 'license:apple-amlr', 'region:us']
5
960
LlavaQwen2ForCausalLM
llava_qwen2
367,345,664
32,768
896
14
24
151,936
Language Model
[]
['text-generation', 'question-answering', 'code-generation', 'conversation']
['English', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['General Purpose']
{}
75
Medium
0.75
2025-10-29T02:31:03.265353
license: apple-amlr library_name: ml-fastvlm tags: - transformers FastVLM was introduced in **[FastVLM: Efficient Vision Encoding for Vision Language Models](https://www.arxiv.org/abs/2412.13303). (CV...
['RAG Support', 'Fast Inference']
MiniMaxAI/MiniMax-M2
MiniMax-M2
MiniMaxAI
2025-10-22T13:45:10+00:00
28,398
620
transformers
['transformers', 'safetensors', 'minimax', 'text-generation', 'conversational', 'arxiv:2504.07164', 'arxiv:2509.06501', 'arxiv:2509.13160', 'license:mit', 'autotrain_compatible', 'endpoints_compatible', 'fp8', 'region:us']
620
961
MiniMaxM2ForCausalLM
minimax
7,635,861,504
196,608
3,072
48
62
200,064
Language Model
['LoRA']
['text-generation', 'question-answering', 'code-generation', 'conversation']
['English', 'Chinese', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production']
{}
91
Easy
0.91
2025-10-29T02:31:05.690188
pipeline_tag: text-generation license: mit library_name: transformers <div align="center"> <svg width="60%" height="auto" viewBox="0 0 144 48" fill="none" xmlns="http://www.w3.org/2000/svg"> <path d="...
['Function Calling', 'RAG Support', 'Long Context', 'Fast Inference', 'Safety Aligned']
PokeeAI/pokee_research_7b
pokee_research_7b
PokeeAI
2025-10-17T20:38:37+00:00
3,806
87
transformers
['transformers', 'safetensors', 'qwen2', 'text-generation', 'agent', 'deepresearch', 'llm', 'rl', 'reinforcementlearning', 'conversational', 'en', 'dataset:miromind-ai/MiroRL-GenQA', 'arxiv:2510.15862', 'base_model:Qwen/Qwen2.5-7B-Instruct', 'base_model:finetune:Qwen/Qwen2.5-7B-Instruct', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
87
962
Qwen2ForCausalLM
qwen2
4,860,936,192
32,768
3,584
28
28
152,064
Language Model
['Fine-tuned', 'RLHF', 'Merged', 'Specialized']
['text-generation', 'question-answering', 'summarization', 'reasoning']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Production', 'Education', 'Healthcare', 'Legal', 'Finance']
{}
78
Medium
0.78
2025-10-29T02:31:05.933505
base_model: - Qwen/Qwen2.5-7B-Instruct datasets: - miromind-ai/MiroRL-GenQA language: - en license: apache-2.0 tags: - agent - deepresearch - llm - rl - reinforcementlearning pipeline_tag: text-genera...
['RAG Support', 'Fast Inference', 'Memory Efficient', 'Multi-turn', 'Safety Aligned']
zai-org/GLM-4.6
GLM-4.6
zai-org
2025-09-29T18:22:51+00:00
62,687
906
transformers
['transformers', 'safetensors', 'glm4_moe', 'text-generation', 'conversational', 'en', 'zh', 'arxiv:2508.06471', 'license:mit', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
70
963
Glm4MoeForCausalLM
glm4_moe
29,716,643,840
202,752
5,120
96
92
151,552
Language Model
[]
['text-generation', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Arabic']
['Creative Writing']
{}
63
Hard
0.63
2025-10-29T02:31:06.094383
language: - en - zh library_name: transformers license: mit pipeline_tag: text-generation <div align="center"> <img src=https://raw.githubusercontent.com/zai-org/GLM-4.5/refs/heads/main/resources/logo...
['Function Calling', 'Long Context']
openai/gpt-oss-20b
gpt-oss-20b
openai
2025-08-04T22:33:29+00:00
4,842,116
3,816
transformers
['transformers', 'safetensors', 'gpt_oss', 'text-generation', 'vllm', 'conversational', 'arxiv:2508.10925', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', '8-bit', 'mxfp4', 'region:us']
62
964
GptOssForCausalLM
gpt_oss
2,967,920,640
131,072
2,880
64
24
201,088
Language Model
['Fine-tuned', 'Specialized']
['text-generation', 'reasoning', 'conversation']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Production', 'Business']
{}
85
Easy
0.85
2025-10-29T02:31:06.320771
license: apache-2.0 pipeline_tag: text-generation library_name: transformers tags: - vllm <p align="center"> <img alt="gpt-oss-20b" src="https://raw.githubusercontent.com/openai/gpt-oss/main/docs/gpt-...
['Function Calling', 'Fast Inference', 'Multi-turn']
meta-llama/Llama-3.1-8B-Instruct
Llama-3.1-8B-Instruct
meta-llama
2024-07-18T08:56:00+00:00
5,224,593
4,835
transformers
['transformers', 'safetensors', 'llama', 'text-generation', 'facebook', 'meta', 'pytorch', 'llama-3', 'conversational', 'en', 'de', 'fr', 'it', 'pt', 'hi', 'es', 'th', 'arxiv:2204.05149', 'base_model:meta-llama/Llama-3.1-8B', 'base_model:finetune:meta-llama/Llama-3.1-8B', 'license:llama3.1', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
48
965
Unknown
unknown
null
null
null
null
null
null
General Language Model
[]
['text-generation']
['English']
['General Purpose']
{}
40
Critical
0.4
2025-10-29T02:31:07.524061
No README available
[]
deepseek-ai/DeepSeek-V3.2-Exp
DeepSeek-V3.2-Exp
deepseek-ai
2025-09-29T06:07:26+00:00
101,711
748
transformers
['transformers', 'safetensors', 'deepseek_v32', 'text-generation', 'conversational', 'base_model:deepseek-ai/DeepSeek-V3.2-Exp-Base', 'base_model:finetune:deepseek-ai/DeepSeek-V3.2-Exp-Base', 'license:mit', 'autotrain_compatible', 'endpoints_compatible', 'fp8', 'region:us']
45
966
DeepseekV32ForCausalLM
deepseek_v32
38,537,003,008
163,840
7,168
128
61
129,280
Language Model
[]
['text-generation', 'question-answering', 'reasoning', 'conversation']
['English', 'Chinese', 'Spanish', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research']
{}
58
Hard
0.58
2025-10-29T02:31:07.749509
license: mit library_name: transformers base_model: - deepseek-ai/DeepSeek-V3.2-Exp-Base base_model_relation: finetune <!-- markdownlint-disable first-line-h1 --> <!-- markdownlint-disable html --> <!...
['Function Calling', 'Fast Inference', 'Safety Aligned']
inclusionAI/LLaDA2.0-flash-preview
LLaDA2.0-flash-preview
inclusionAI
2025-10-25T09:22:29+00:00
44
44
transformers
['transformers', 'safetensors', 'llada2_moe', 'text-generation', 'dllm', 'diffusion', 'llm', 'text_generation', 'conversational', 'custom_code', 'license:apache-2.0', 'autotrain_compatible', 'region:us']
44
967
LLaDA2MoeModelLM
llada2_moe
7,086,276,608
16,384
4,096
32
32
157,184
Language Model
['Fine-tuned', 'RLHF']
['text-generation', 'question-answering', 'code-generation', 'reasoning', 'conversation']
['English', 'Korean', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Education']
{}
65
Medium
0.65
2025-10-29T02:31:08.028102
license: apache-2.0 library_name: transformers tags: - dllm - diffusion - llm - text_generation **LLaDA2.0-flash-preview** is a diffusion language model featuring a 100BA6B Mixture-of-Experts (MoE) ar...
['Function Calling', 'RAG Support', 'Fast Inference', 'Memory Efficient']
openai/gpt-oss-120b
gpt-oss-120b
openai
2025-08-04T22:33:06+00:00
3,798,358
4,067
transformers
['transformers', 'safetensors', 'gpt_oss', 'text-generation', 'vllm', 'conversational', 'arxiv:2508.10925', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', '8-bit', 'mxfp4', 'region:us']
41
968
GptOssForCausalLM
gpt_oss
4,162,314,240
131,072
2,880
64
36
201,088
Language Model
['Fine-tuned', 'Specialized']
['text-generation', 'reasoning', 'conversation']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Production', 'Business']
{}
80
Medium
0.8
2025-10-29T02:31:08.236012
license: apache-2.0 pipeline_tag: text-generation library_name: transformers tags: - vllm <p align="center"> <img alt="gpt-oss-120b" src="https://raw.githubusercontent.com/openai/gpt-oss/main/docs/gpt...
['Function Calling', 'Fast Inference', 'Multi-turn']
cerebras/GLM-4.5-Air-REAP-82B-A12B
GLM-4.5-Air-REAP-82B-A12B
cerebras
2025-10-20T15:44:06+00:00
1,159
77
transformers
['transformers', 'safetensors', 'glm4_moe', 'text-generation', 'glm', 'MOE', 'pruning', 'compression', 'conversational', 'en', 'arxiv:2510.13999', 'base_model:zai-org/GLM-4.5-Air', 'base_model:finetune:zai-org/GLM-4.5-Air', 'license:mit', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
41
969
Glm4MoeForCausalLM
glm4_moe
9,881,780,224
131,072
4,096
96
46
151,552
Language Model
['Specialized']
['text-generation', 'question-answering', 'code-generation', 'reasoning']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Production', 'Creative Writing']
{}
70
Medium
0.7
2025-10-29T02:31:08.581588
language: - en library_name: transformers tags: - glm - MOE - pruning - compression license: mit name: cerebras/GLM-4.5-Air-REAP-82B-A12B description: > This model was obtained by uniformly pruning 25...
['Function Calling', 'Fast Inference', 'Memory Efficient', 'Multi-turn']
ibm-granite/granite-4.0-h-1b
granite-4.0-h-1b
ibm-granite
2025-10-07T20:21:46+00:00
36
34
transformers
['transformers', 'safetensors', 'granitemoehybrid', 'text-generation', 'language', 'granite-4.0', 'conversational', 'arxiv:0000.00000', 'base_model:ibm-granite/granite-4.0-h-1b-base', 'base_model:finetune:ibm-granite/granite-4.0-h-1b-base', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
34
970
GraniteMoeHybridForCausalLM
granitemoehybrid
1,286,602,752
131,072
1,536
12
40
100,352
Language Model
['Fine-tuned', 'RLHF', 'Specialized']
['text-generation', 'question-answering', 'text-classification', 'summarization', 'conversation']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production', 'Education']
{}
86
Easy
0.86
2025-10-29T02:31:08.893758
license: apache-2.0 library_name: transformers tags: - language - granite-4.0 base_model: - ibm-granite/granite-4.0-h-1b-base **Model Summary:** Granite-4.0-H-1B is a lightweight instruct model finetu...
['RAG Support', 'Long Context', 'Fast Inference', 'Safety Aligned']
RUC-DataLab/DeepAnalyze-8B
DeepAnalyze-8B
RUC-DataLab
2025-10-17T09:15:30+00:00
919
44
transformers
['transformers', 'safetensors', 'text-generation', 'dataset:RUC-DataLab/DataScience-Instruct-500K', 'arxiv:2510.16872', 'license:mit', 'endpoints_compatible', 'region:us']
34
971
Unknown
unknown
null
null
null
null
null
null
Language Model
[]
['text-generation']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research']
{}
40
Critical
0.4
2025-10-29T02:31:12.244229
datasets: - RUC-DataLab/DataScience-Instruct-500K license: mit pipeline_tag: text-generation library_name: transformers <p align="center" width="100%"> <img src="assets/logo.png" alt="DeepAnalyze" sty...
[]
facebook/MobileLLM-Pro
MobileLLM-Pro
facebook
2025-09-10T18:40:06+00:00
4,010
135
transformers
['transformers', 'safetensors', 'llama4_text', 'text-generation', 'facebook', 'meta', 'pytorch', 'conversational', 'custom_code', 'en', 'base_model:facebook/MobileLLM-Pro-base', 'base_model:finetune:facebook/MobileLLM-Pro-base', 'license:fair-noncommercial-research-license', 'autotrain_compatible', 'region:us']
32
972
Unknown
unknown
null
null
null
null
null
null
General Language Model
[]
['text-generation']
['English']
['General Purpose']
{}
40
Critical
0.4
2025-10-29T02:31:13.557107
No README available
[]
cerebras/GLM-4.6-REAP-218B-A32B-FP8
GLM-4.6-REAP-218B-A32B-FP8
cerebras
2025-10-23T20:29:59+00:00
365
32
transformers
['transformers', 'safetensors', 'glm4_moe', 'text-generation', 'glm', 'MOE', 'pruning', 'compression', 'conversational', 'en', 'arxiv:2510.13999', 'base_model:zai-org/GLM-4.6-FP8', 'base_model:quantized:zai-org/GLM-4.6-FP8', 'license:mit', 'autotrain_compatible', 'endpoints_compatible', 'compressed-tensors', 'region:us']
32
973
Glm4MoeForCausalLM
glm4_moe
29,716,643,840
202,752
5,120
96
92
151,552
Language Model
['Specialized']
['text-generation', 'question-answering', 'code-generation', 'reasoning']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Production', 'Creative Writing']
{}
60
Hard
0.6
2025-10-29T02:31:13.887930
language: - en library_name: transformers tags: - glm - MOE - pruning - compression license: mit name: cerebras/GLM-4.6-REAP-218B-A32B-FP8 description: > This model was obtained by uniformly pruning 4...
['Function Calling', 'Fast Inference', 'Memory Efficient', 'Multi-turn']
katanemo/Arch-Router-1.5B
Arch-Router-1.5B
katanemo
2025-05-30T18:16:23+00:00
2,818
211
transformers
['transformers', 'safetensors', 'qwen2', 'text-generation', 'routing', 'preference', 'arxiv:2506.16655', 'llm', 'conversational', 'en', 'base_model:Qwen/Qwen2.5-1.5B-Instruct', 'base_model:finetune:Qwen/Qwen2.5-1.5B-Instruct', 'license:other', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
30
974
Qwen2ForCausalLM
qwen2
1,026,097,152
32,768
1,536
12
28
151,936
Language Model
['Specialized']
['text-generation', 'question-answering', 'translation', 'summarization', 'code-generation', 'conversation']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Production', 'Healthcare', 'Legal']
{}
83
Medium
0.83
2025-10-29T02:31:14.129528
base_model: - Qwen/Qwen2.5-1.5B-Instruct language: - en library_name: transformers license: other license_name: katanemo-research license_link: https://huggingface.co/katanemo/Arch-Router-1.5B/blob/ma...
['Fast Inference', 'Memory Efficient', 'Multi-turn', 'Safety Aligned']
ibm-granite/granite-4.0-h-350m
granite-4.0-h-350m
ibm-granite
2025-10-07T20:23:17+00:00
802
28
transformers
['transformers', 'safetensors', 'granitemoehybrid', 'text-generation', 'language', 'granite-4.0', 'conversational', 'arxiv:0000.00000', 'base_model:ibm-granite/granite-4.0-h-350m-base', 'base_model:finetune:ibm-granite/granite-4.0-h-350m-base', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
28
975
GraniteMoeHybridForCausalLM
granitemoehybrid
303,562,752
32,768
768
12
32
100,352
Language Model
['Fine-tuned', 'RLHF', 'Specialized']
['text-generation', 'question-answering', 'text-classification', 'summarization', 'conversation']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production', 'Education']
{}
86
Easy
0.86
2025-10-29T02:31:14.403183
license: apache-2.0 library_name: transformers tags: - language - granite-4.0 base_model: - ibm-granite/granite-4.0-h-350m-base **Model Summary:** Granite-4.0-H-350M is a lightweight instruct model fi...
['RAG Support', 'Long Context', 'Fast Inference', 'Safety Aligned']
inclusionAI/LLaDA2.0-mini-preview
LLaDA2.0-mini-preview
inclusionAI
2025-10-17T07:36:24+00:00
1,194
66
transformers
['transformers', 'safetensors', 'llada2_moe', 'text-generation', 'dllm', 'diffusion', 'llm', 'text_generation', 'conversational', 'custom_code', 'license:apache-2.0', 'autotrain_compatible', 'region:us']
26
976
LLaDA2MoeModelLM
llada2_moe
1,328,545,792
8,192
2,048
16
20
157,184
Language Model
['Fine-tuned', 'RLHF']
['text-generation', 'question-answering', 'code-generation', 'reasoning', 'conversation']
['English', 'Korean', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Education']
{}
70
Medium
0.7
2025-10-29T02:31:14.641519
license: apache-2.0 library_name: transformers tags: - dllm - diffusion - llm - text_generation **LLaDA2.0-mini-preview** is a diffusion language model featuring a 16BA1B Mixture-of-Experts (MoE) arch...
['Function Calling', 'RAG Support', 'Fast Inference', 'Memory Efficient']
ibm-granite/granite-4.0-350m
granite-4.0-350m
ibm-granite
2025-10-07T20:25:05+00:00
98
25
transformers
['transformers', 'safetensors', 'granitemoehybrid', 'text-generation', 'language', 'granite-4.0', 'conversational', 'arxiv:0000.00000', 'base_model:ibm-granite/granite-4.0-350m-base', 'base_model:finetune:ibm-granite/granite-4.0-350m-base', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
25
977
GraniteMoeHybridForCausalLM
granitemoehybrid
455,081,984
32,768
1,024
16
28
100,352
Language Model
['Fine-tuned', 'RLHF', 'Specialized']
['text-generation', 'question-answering', 'text-classification', 'summarization', 'conversation']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production', 'Education']
{}
86
Easy
0.86
2025-10-29T02:31:14.900303
license: apache-2.0 library_name: transformers tags: - language - granite-4.0 base_model: - ibm-granite/granite-4.0-350m-base **Model Summary:** Granite-4.0-350M is a lightweight instruct model finetu...
['RAG Support', 'Long Context', 'Fast Inference', 'Safety Aligned']
inclusionAI/Ling-1T
Ling-1T
inclusionAI
2025-10-02T13:41:55+00:00
4,196
493
transformers
['transformers', 'safetensors', 'bailing_moe', 'text-generation', 'conversational', 'custom_code', 'arxiv:2507.17702', 'arxiv:2507.17634', 'license:mit', 'autotrain_compatible', 'region:us']
24
978
BailingMoeV2ForCausalLM
bailing_moe
65,712,160,768
32,768
8,192
64
80
157,184
Language Model
['RLHF', 'Merged']
['text-generation', 'question-answering', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Production', 'Education', 'Legal']
{}
63
Hard
0.63
2025-10-29T02:31:15.127568
license: mit pipeline_tag: text-generation library_name: transformers <p align="center"> <img src="https://mdn.alipayobjects.com/huamei_qa8qxu/afts/img/A*4QxcQrBlTiAAAAAAQXAAAAgAemJ7AQ/original" width...
['Function Calling', 'Long Context', 'Fast Inference', 'Multi-turn']
ibm-granite/granite-4.0-1b
granite-4.0-1b
ibm-granite
2025-10-07T20:24:27+00:00
63
24
transformers
['transformers', 'safetensors', 'granitemoehybrid', 'text-generation', 'language', 'granite-4.0', 'conversational', 'arxiv:0000.00000', 'base_model:ibm-granite/granite-4.0-1b-base', 'base_model:finetune:ibm-granite/granite-4.0-1b-base', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
24
979
GraniteMoeHybridForCausalLM
granitemoehybrid
2,218,786,816
131,072
2,048
16
40
100,352
Language Model
['Fine-tuned', 'RLHF', 'Specialized']
['text-generation', 'question-answering', 'text-classification', 'summarization', 'conversation']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production', 'Education']
{}
86
Easy
0.86
2025-10-29T02:31:15.507767
license: apache-2.0 library_name: transformers tags: - language - granite-4.0 base_model: - ibm-granite/granite-4.0-1b-base **Model Summary:** Granite-4.0-1B is a lightweight instruct model finetuned ...
['RAG Support', 'Long Context', 'Fast Inference', 'Safety Aligned']
utter-project/EuroLLM-9B
EuroLLM-9B
utter-project
2024-11-22T10:44:55+00:00
7,084
114
transformers
['transformers', 'pytorch', 'safetensors', 'llama', 'text-generation', 'en', 'de', 'es', 'fr', 'it', 'pt', 'pl', 'nl', 'tr', 'sv', 'cs', 'el', 'hu', 'ro', 'fi', 'uk', 'sl', 'sk', 'da', 'lt', 'lv', 'et', 'bg', 'no', 'ca', 'hr', 'ga', 'mt', 'gl', 'zh', 'ru', 'ko', 'ja', 'ar', 'hi', 'arxiv:2202.03799', 'arxiv:2402.17733', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
23
980
Unknown
unknown
null
null
null
null
null
null
General Language Model
[]
['text-generation']
['English']
['General Purpose']
{}
40
Critical
0.4
2025-10-29T02:31:17.187940
No README available
[]
deepseek-ai/DeepSeek-R1
DeepSeek-R1
deepseek-ai
2025-01-20T03:46:07+00:00
462,277
12,814
transformers
['transformers', 'safetensors', 'deepseek_v3', 'text-generation', 'conversational', 'custom_code', 'arxiv:2501.12948', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'fp8', 'region:us']
23
981
DeepseekV3ForCausalLM
deepseek_v3
38,537,003,008
163,840
7,168
128
61
129,280
Language Model
['Fine-tuned', 'RLHF', 'Merged']
['text-generation', 'question-answering', 'summarization', 'reasoning', 'conversation']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Education', 'Business']
{}
48
Hard
0.48
2025-10-29T02:31:19.877604
license: mit library_name: transformers <!-- markdownlint-disable first-line-h1 --> <!-- markdownlint-disable html --> <!-- markdownlint-disable no-duplicate-header --> <div align="center"> <img src="...
['RAG Support', 'Long Context']
rednote-hilab/dots.ocr
dots.ocr
rednote-hilab
2025-07-30T09:55:44+00:00
1,170,388
1,098
dots_ocr
['dots_ocr', 'safetensors', 'text-generation', 'image-to-text', 'ocr', 'document-parse', 'layout', 'table', 'formula', 'transformers', 'custom_code', 'image-text-to-text', 'conversational', 'en', 'zh', 'multilingual', 'license:mit', 'region:us']
23
982
DotsOCRForCausalLM
dots_ocr
1,026,097,152
131,072
1,536
12
28
151,936
Language Model
[]
['text-generation', 'translation', 'summarization', 'conversation']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Production', 'Finance']
{}
75
Medium
0.75
2025-10-29T02:31:20.300963
license: mit library_name: dots_ocr pipeline_tag: image-text-to-text tags: - image-to-text - ocr - document-parse - layout - table - formula - transformers - custom_code language: - en - zh - multilin...
['RAG Support', 'Fast Inference', 'Memory Efficient']
ibm-granite/granite-4.0-h-1b-base
granite-4.0-h-1b-base
ibm-granite
2025-10-07T20:22:43+00:00
33
23
transformers
['transformers', 'safetensors', 'granitemoehybrid', 'text-generation', 'language', 'granite-4.0', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
23
983
GraniteMoeHybridForCausalLM
granitemoehybrid
1,286,602,752
131,072
1,536
12
40
100,352
Language Model
['LoRA', 'Specialized']
['text-generation', 'question-answering', 'text-classification', 'summarization']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Education']
{}
86
Easy
0.86
2025-10-29T02:31:20.511873
license: apache-2.0 library_name: transformers tags: - language - granite-4.0 **Model Summary:** Granite-4.0-H-1B-Base is a lightweight decoder-only language model designed for scenarios where efficie...
['Long Context', 'Fast Inference', 'Safety Aligned']
moonshotai/Kimi-K2-Instruct-0905
Kimi-K2-Instruct-0905
moonshotai
2025-09-03T03:34:36+00:00
41,884
523
transformers
['transformers', 'safetensors', 'kimi_k2', 'text-generation', 'conversational', 'custom_code', 'license:other', 'autotrain_compatible', 'endpoints_compatible', 'fp8', 'region:us']
22
984
DeepseekV3ForCausalLM
kimi_k2
38,784,729,088
262,144
7,168
64
61
163,840
Language Model
[]
['question-answering', 'summarization', 'code-generation', 'reasoning', 'conversation']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Production', 'Creative Writing']
{}
48
Hard
0.48
2025-10-29T02:31:20.756473
license: other license_name: modified-mit library_name: transformers <div align="center"> <picture> <img src="figures/kimi-logo.png" width="30%" alt="Kimi K2: Open Agentic Intellignece"> </picture> </...
['Long Context']
inclusionAI/Ring-1T
Ring-1T
inclusionAI
2025-10-10T16:39:04+00:00
1,552
212
transformers
['transformers', 'safetensors', 'bailing_moe', 'text-generation', 'conversational', 'custom_code', 'arxiv:2510.18855', 'license:mit', 'autotrain_compatible', 'region:us']
22
985
BailingMoeV2ForCausalLM
bailing_moe
65,712,160,768
65,536
8,192
64
80
157,184
Language Model
['RLHF']
['text-generation', 'question-answering', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Production', 'Education', 'Creative Writing', 'Business', 'Healthcare']
{}
63
Hard
0.63
2025-10-29T02:31:21.333297
pipeline_tag: text-generation license: mit library_name: transformers <p align="center"> <img src="https://mdn.alipayobjects.com/huamei_qa8qxu/afts/img/A*4QxcQrBlTiAAAAAAQXAAAAgAemJ7AQ/original" width...
['Function Calling', 'RAG Support', 'Long Context', 'Fast Inference', 'Memory Efficient']
ibm-granite/granite-4.0-1b-base
granite-4.0-1b-base
ibm-granite
2025-10-07T20:24:47+00:00
31
21
transformers
['transformers', 'safetensors', 'granitemoehybrid', 'text-generation', 'language', 'granite-4.0', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
21
986
GraniteMoeHybridForCausalLM
granitemoehybrid
2,218,786,816
131,072
2,048
16
40
100,352
Language Model
['LoRA', 'Specialized']
['text-generation', 'question-answering', 'text-classification', 'summarization']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Education']
{}
86
Easy
0.86
2025-10-29T02:31:21.734530
license: apache-2.0 library_name: transformers tags: - language - granite-4.0 **Model Summary:** Granite-4.0-1B-Base is a lightweight decoder-only language model designed for scenarios where efficienc...
['Long Context', 'Fast Inference', 'Safety Aligned']
vandijklab/C2S-Scale-Gemma-2-27B
C2S-Scale-Gemma-2-27B
vandijklab
2025-10-06T20:22:30+00:00
9,405
134
transformers
['transformers', 'safetensors', 'gemma2', 'text-generation', 'biology', 'scRNAseq', 'genomics', 'computational-biology', 'bioinformatics', 'gene-expression', 'cell-biology', 'pytorch', 'cell-type-annotation', 'Question Answering', 'en', 'base_model:google/gemma-2-27b', 'base_model:finetune:google/gemma-2-27b', 'license:cc-by-4.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
20
987
Gemma2ForCausalLM
gemma2
12,900,630,528
8,192
4,608
32
46
256,000
Language Model
['Fine-tuned', 'RLHF', 'Specialized']
['text-generation', 'question-answering', 'text-classification', 'reasoning']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Education', 'Legal']
{}
60
Hard
0.6
2025-10-29T02:31:21.961927
license: cc-by-4.0 language: - en base_model: google/gemma-2-27b library_name: transformers pipeline_tag: text-generation tags: - biology - scRNAseq - gemma2 - genomics - computational-biology - bioin...
['RAG Support', 'Fast Inference']
meta-llama/Llama-3.2-1B
Llama-3.2-1B
meta-llama
2024-09-18T15:03:14+00:00
1,688,906
2,138
transformers
['transformers', 'safetensors', 'llama', 'text-generation', 'facebook', 'meta', 'pytorch', 'llama-3', 'en', 'de', 'fr', 'it', 'pt', 'hi', 'es', 'th', 'arxiv:2204.05149', 'arxiv:2405.16406', 'license:llama3.2', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
19
988
Unknown
unknown
null
null
null
null
null
null
General Language Model
[]
['text-generation']
['English']
['General Purpose']
{}
40
Critical
0.4
2025-10-29T02:31:23.251057
No README available
[]
Qwen/Qwen3-Embedding-0.6B
Qwen3-Embedding-0.6B
Qwen
2025-06-03T14:25:32+00:00
4,127,926
691
sentence-transformers
['sentence-transformers', 'safetensors', 'qwen3', 'text-generation', 'transformers', 'sentence-similarity', 'feature-extraction', 'text-embeddings-inference', 'arxiv:2506.05176', 'base_model:Qwen/Qwen3-0.6B-Base', 'base_model:finetune:Qwen/Qwen3-0.6B-Base', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
19
989
Qwen3ForCausalLM
qwen3
507,630,592
32,768
1,024
16
28
151,669
Language Model
[]
['text-classification', 'code-generation', 'reasoning']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['General Purpose']
{}
81
Medium
0.81
2025-10-29T02:31:23.549329
license: apache-2.0 base_model: - Qwen/Qwen3-0.6B-Base tags: - transformers - sentence-transformers - sentence-similarity - feature-extraction - text-embeddings-inference <p align="center"> <img src="...
['Long Context', 'Safety Aligned']
ibm-granite/granite-4.0-h-350m-base
granite-4.0-h-350m-base
ibm-granite
2025-10-07T20:24:04+00:00
20
19
transformers
['transformers', 'safetensors', 'granitemoehybrid', 'text-generation', 'language', 'granite-4.0', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
19
990
GraniteMoeHybridForCausalLM
granitemoehybrid
303,562,752
32,768
768
12
32
100,352
Language Model
['LoRA', 'Specialized']
['text-generation', 'question-answering', 'text-classification', 'summarization']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Education']
{}
86
Easy
0.86
2025-10-29T02:31:23.837092
license: apache-2.0 library_name: transformers tags: - language - granite-4.0 **Model Summary:** Granite-4.0-H-350M-Base is a lightweight decoder-only language model designed for scenarios where effic...
['Long Context', 'Fast Inference', 'Safety Aligned']
inclusionAI/Ring-flash-linear-2.0-128k
Ring-flash-linear-2.0-128k
inclusionAI
2025-10-21T02:57:09+00:00
62
19
transformers
['transformers', 'safetensors', 'bailing_moe_linear', 'text-generation', 'moe', 'conversational', 'custom_code', 'en', 'arxiv:2510.19338', 'base_model:inclusionAI/Ling-flash-base-2.0', 'base_model:finetune:inclusionAI/Ling-flash-base-2.0', 'license:mit', 'autotrain_compatible', 'region:us']
19
991
BailingMoeLinearV2ForCausalLM
bailing_moe_linear
7,086,276,608
131,072
4,096
32
32
157,184
Language Model
['Merged']
['text-generation', 'question-answering', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Production']
{}
78
Medium
0.78
2025-10-29T02:31:26.078247
license: mit language: - en base_model: - inclusionAI/Ling-flash-base-2.0 pipeline_tag: text-generation library_name: transformers tags: - moe <p align="center"> <img src="https://mdn.alipayobjects.co...
['RAG Support', 'Long Context', 'Fast Inference']
ibm-granite/granite-docling-258M
granite-docling-258M
ibm-granite
2025-05-19T19:53:11+00:00
229,742
990
transformers
['transformers', 'safetensors', 'idefics3', 'image-to-text', 'text-generation', 'documents', 'code', 'formula', 'chart', 'ocr', 'layout', 'table', 'document-parse', 'docling', 'granite', 'extraction', 'math', 'image-text-to-text', 'conversational', 'en', 'dataset:ds4sd/SynthCodeNet', 'dataset:ds4sd/SynthFormulaNet', 'dataset:ds4sd/SynthChartNet', 'dataset:HuggingFaceM4/DoclingMatix', 'arxiv:2501.17887', 'arxiv:2503.11576', 'arxiv:2305.03393', 'license:apache-2.0', 'endpoints_compatible', 'region:us']
18
992
Idefics3ForConditionalGeneration
idefics3
null
null
null
null
null
100,352
Language Model
['Fine-tuned', 'LoRA']
['text-generation', 'question-answering', 'summarization', 'code-generation', 'reasoning', 'conversation']
['English', 'Chinese', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Education']
{}
48
Hard
0.48
2025-10-29T02:31:26.308734
license: apache-2.0 datasets: - ds4sd/SynthCodeNet - ds4sd/SynthFormulaNet - ds4sd/SynthChartNet - HuggingFaceM4/DoclingMatix tags: - text-generation - documents - code - formula - chart - ocr - layou...
['Fast Inference', 'Memory Efficient', 'Safety Aligned']
Qwen/Qwen3-4B-Instruct-2507
Qwen3-4B-Instruct-2507
Qwen
2025-08-05T10:58:03+00:00
3,623,413
427
transformers
['transformers', 'safetensors', 'qwen3', 'text-generation', 'conversational', 'arxiv:2505.09388', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
18
993
Qwen3ForCausalLM
qwen3
3,220,111,360
262,144
2,560
32
36
151,936
Language Model
[]
['text-generation', 'question-answering', 'code-generation', 'reasoning', 'conversation']
['English', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Production', 'Creative Writing']
{}
68
Medium
0.68
2025-10-29T02:31:26.621674
library_name: transformers license: apache-2.0 license_link: https://huggingface.co/Qwen/Qwen3-4B-Instruct-2507/blob/main/LICENSE pipeline_tag: text-generation <a href="https://chat.qwen.ai" target="_...
['RAG Support', 'Safety Aligned']
Alibaba-NLP/Tongyi-DeepResearch-30B-A3B
Tongyi-DeepResearch-30B-A3B
Alibaba-NLP
2025-09-16T06:56:49+00:00
15,866
709
transformers
['transformers', 'safetensors', 'qwen3_moe', 'text-generation', 'conversational', 'en', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
18
994
Qwen3MoeForCausalLM
qwen3_moe
2,727,084,032
131,072
2,048
32
48
151,936
Language Model
['RLHF']
['text-generation', 'question-answering', 'reasoning']
['English', 'Chinese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Research', 'Production', 'Education']
{}
70
Medium
0.7
2025-10-29T02:31:26.843613
license: apache-2.0 language: - en pipeline_tag: text-generation library_name: transformers We present **Tongyi DeepResearch**, an agentic large language model featuring 30 billion total parameters, ...
['RAG Support']
ibm-granite/granite-4.0-350m-base
granite-4.0-350m-base
ibm-granite
2025-10-07T20:25:24+00:00
19
18
transformers
['transformers', 'safetensors', 'granitemoehybrid', 'text-generation', 'language', 'granite-4.0', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
18
995
GraniteMoeHybridForCausalLM
granitemoehybrid
455,081,984
32,768
1,024
16
28
100,352
Language Model
['LoRA', 'Specialized']
['text-generation', 'question-answering', 'text-classification', 'summarization']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Research', 'Education']
{}
86
Easy
0.86
2025-10-29T02:31:27.181589
license: apache-2.0 library_name: transformers tags: - language - granite-4.0 **Model Summary:** Granite-4.0-350M-Base is a lightweight decoder-only language model designed for scenarios where efficie...
['Long Context', 'Fast Inference', 'Safety Aligned']
Qwen/Qwen3-0.6B
Qwen3-0.6B
Qwen
2025-04-27T03:40:08+00:00
7,303,549
739
transformers
['transformers', 'safetensors', 'qwen3', 'text-generation', 'conversational', 'arxiv:2505.09388', 'base_model:Qwen/Qwen3-0.6B-Base', 'base_model:finetune:Qwen/Qwen3-0.6B-Base', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
17
996
Qwen3ForCausalLM
qwen3
507,904,000
40,960
1,024
16
28
151,936
Language Model
[]
['text-generation', 'question-answering', 'translation', 'code-generation', 'reasoning', 'conversation']
['English', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Production', 'Creative Writing']
{}
78
Medium
0.78
2025-10-29T02:31:27.464701
library_name: transformers license: apache-2.0 license_link: https://huggingface.co/Qwen/Qwen3-0.6B/blob/main/LICENSE pipeline_tag: text-generation base_model: - Qwen/Qwen3-0.6B-Base <a href="https://...
['Fast Inference', 'Multi-turn', 'Safety Aligned']
inference-net/Schematron-3B
Schematron-3B
inference-net
2025-08-21T19:15:21+00:00
1,927,465
92
transformers
['transformers', 'safetensors', 'llama', 'text-generation', 'conversational', 'base_model:meta-llama/Llama-3.2-3B-Instruct', 'base_model:finetune:meta-llama/Llama-3.2-3B-Instruct', 'license:llama3.2', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
17
997
LlamaForCausalLM
llama
3,564,896,256
131,072
3,072
24
28
128,256
Language Model
['Specialized']
['text-generation', 'question-answering', 'code-generation']
['English', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic']
['Production', 'Legal']
{}
86
Easy
0.86
2025-10-29T02:31:27.807187
library_name: transformers license: llama3.2 base_model: meta-llama/Llama-3.2-3B-Instruct <p align="center"> <img alt="Schematron" src="https://huggingface.co/inference-net/Schematron-3B/resolve/main/...
['Long Context', 'Fast Inference', 'Safety Aligned']
Qwen/Qwen3-8B
Qwen3-8B
Qwen
2025-04-27T03:42:21+00:00
2,190,201
703
transformers
['transformers', 'safetensors', 'qwen3', 'text-generation', 'conversational', 'arxiv:2309.00071', 'arxiv:2505.09388', 'base_model:Qwen/Qwen3-8B-Base', 'base_model:finetune:Qwen/Qwen3-8B-Base', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
16
998
Qwen3ForCausalLM
qwen3
7,870,087,168
40,960
4,096
32
36
151,936
Language Model
[]
['text-generation', 'question-answering', 'translation', 'code-generation', 'reasoning', 'conversation']
['English', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Production', 'Creative Writing']
{}
81
Medium
0.81
2025-10-29T02:31:28.268366
library_name: transformers license: apache-2.0 license_link: https://huggingface.co/Qwen/Qwen3-8B/blob/main/LICENSE pipeline_tag: text-generation base_model: - Qwen/Qwen3-8B-Base <a href="https://chat...
['RAG Support', 'Long Context', 'Fast Inference', 'Multi-turn', 'Safety Aligned']
Qwen/Qwen2.5-7B-Instruct
Qwen2.5-7B-Instruct
Qwen
2024-09-16T11:55:40+00:00
7,844,801
839
transformers
['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2309.00071', 'arxiv:2407.10671', 'base_model:Qwen/Qwen2.5-7B', 'base_model:finetune:Qwen/Qwen2.5-7B', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
15
999
Qwen2ForCausalLM
qwen2
4,860,936,192
32,768
3,584
28
28
152,064
Language Model
['Specialized']
['text-generation', 'question-answering', 'code-generation', 'conversation']
['English', 'Chinese', 'Korean', 'Japanese', 'Spanish', 'French', 'German', 'Russian', 'Arabic', 'Multilingual']
['Production']
{}
86
Easy
0.86
2025-10-29T02:31:28.533122
license: apache-2.0 license_link: https://huggingface.co/Qwen/Qwen2.5-7B-Instruct/blob/main/LICENSE language: - en pipeline_tag: text-generation base_model: Qwen/Qwen2.5-7B tags: - chat library_name: ...
['Long Context', 'Fast Inference', 'Safety Aligned']
meta-llama/Llama-3.2-3B-Instruct
Llama-3.2-3B-Instruct
meta-llama
2024-09-18T15:19:20+00:00
1,901,332
1,784
transformers
['transformers', 'safetensors', 'llama', 'text-generation', 'facebook', 'meta', 'pytorch', 'llama-3', 'conversational', 'en', 'de', 'fr', 'it', 'pt', 'hi', 'es', 'th', 'arxiv:2204.05149', 'arxiv:2405.16406', 'license:llama3.2', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
15
1,000
Unknown
unknown
null
null
null
null
null
null
General Language Model
[]
['text-generation']
['English']
['General Purpose']
{}
40
Critical
0.4
2025-10-29T02:31:29.827396
No README available
[]