| { | |
| "architectures": [ | |
| "MiniCPMV" | |
| ], | |
| "attention_bias": false, | |
| "attention_dropout": 0.0, | |
| "auto_map": { | |
| "AutoConfig": "configuration_minicpm.MiniCPMVConfig", | |
| "AutoModel": "modeling_minicpmv.MiniCPMV", | |
| "AutoModelForCausalLM": "modeling_minicpmv.MiniCPMV" | |
| }, | |
| "batch_vision_input": true, | |
| "bos_token_id": 1, | |
| "drop_vision_last_layer": false, | |
| "eos_token_id": [ | |
| 2, | |
| 73440 | |
| ], | |
| "head_dim": 128, | |
| "hidden_act": "silu", | |
| "hidden_size": 2560, | |
| "image_size": 448, | |
| "initializer_range": 0.1, | |
| "intermediate_size": 10240, | |
| "max_position_embeddings": 32768, | |
| "mlp_bias": false, | |
| "model_type": "minicpmv", | |
| "num_attention_heads": 32, | |
| "num_hidden_layers": 32, | |
| "num_key_value_heads": 2, | |
| "pad_token_id": 2, | |
| "patch_size": 14, | |
| "pretraining_tp": 1, | |
| "query_num": 64, | |
| "rms_norm_eps": 1e-06, | |
| "rope_scaling": { | |
| "factor": 1.0, | |
| "long_factor": [ | |
| 0.9977997200264581, | |
| 1.014658295992452, | |
| 1.0349680404997148, | |
| 1.059429246056193, | |
| 1.0888815016813513, | |
| 1.1243301355211495, | |
| 1.166977103606075, | |
| 1.2182568066927284, | |
| 1.2798772354275727, | |
| 1.3538666751582975, | |
| 1.4426259039919596, | |
| 1.5489853358570191, | |
| 1.6762658237220625, | |
| 1.8283407612492941, | |
| 2.0096956085876183, | |
| 2.225478927469756, | |
| 2.481536379650452, | |
| 2.784415934557119, | |
| 3.1413289096347365, | |
| 3.560047844772632, | |
| 4.048719380066383, | |
| 4.615569542115128, | |
| 5.2684819496549835, | |
| 6.014438591970396, | |
| 6.858830049237097, | |
| 7.804668263503327, | |
| 8.851768731513417, | |
| 9.99600492938444, | |
| 11.228766118181639, | |
| 12.536757560834843, | |
| 13.902257701387796, | |
| 15.303885189125953, | |
| 16.717837610115794, | |
| 18.119465097853947, | |
| 19.484965238406907, | |
| 20.792956681060105, | |
| 22.02571786985731, | |
| 23.16995406772833, | |
| 24.217054535738416, | |
| 25.16289275000465, | |
| 26.007284207271347, | |
| 26.753240849586767, | |
| 27.40615325712662, | |
| 27.973003419175363, | |
| 28.461674954469114, | |
| 28.880393889607006, | |
| 29.237306864684626, | |
| 29.540186419591297, | |
| 29.79624387177199, | |
| 30.01202719065413, | |
| 30.193382037992453, | |
| 30.34545697551969, | |
| 30.47273746338473, | |
| 30.579096895249787, | |
| 30.66785612408345, | |
| 30.741845563814174, | |
| 30.80346599254902, | |
| 30.85474569563567, | |
| 30.897392663720595, | |
| 30.932841297560394, | |
| 30.962293553185553, | |
| 30.986754758742034, | |
| 31.007064503249293, | |
| 31.02392307921529 | |
| ], | |
| "original_max_position_embeddings": 32786, | |
| "rope_type": "longrope", | |
| "short_factor": [ | |
| 0.9977997200264581, | |
| 1.014658295992452, | |
| 1.0349680404997148, | |
| 1.059429246056193, | |
| 1.0888815016813513, | |
| 1.1243301355211495, | |
| 1.166977103606075, | |
| 1.2182568066927284, | |
| 1.2798772354275727, | |
| 1.3538666751582975, | |
| 1.4426259039919596, | |
| 1.5489853358570191, | |
| 1.6762658237220625, | |
| 1.8283407612492941, | |
| 2.0096956085876183, | |
| 2.225478927469756, | |
| 2.481536379650452, | |
| 2.784415934557119, | |
| 3.1413289096347365, | |
| 3.560047844772632, | |
| 4.048719380066383, | |
| 4.615569542115128, | |
| 5.2684819496549835, | |
| 6.014438591970396, | |
| 6.858830049237097, | |
| 7.804668263503327, | |
| 8.851768731513417, | |
| 9.99600492938444, | |
| 11.228766118181639, | |
| 12.536757560834843, | |
| 13.902257701387796, | |
| 15.303885189125953, | |
| 16.717837610115794, | |
| 18.119465097853947, | |
| 19.484965238406907, | |
| 20.792956681060105, | |
| 22.02571786985731, | |
| 23.16995406772833, | |
| 24.217054535738416, | |
| 25.16289275000465, | |
| 26.007284207271347, | |
| 26.753240849586767, | |
| 27.40615325712662, | |
| 27.973003419175363, | |
| 28.461674954469114, | |
| 28.880393889607006, | |
| 29.237306864684626, | |
| 29.540186419591297, | |
| 29.79624387177199, | |
| 30.01202719065413, | |
| 30.193382037992453, | |
| 30.34545697551969, | |
| 30.47273746338473, | |
| 30.579096895249787, | |
| 30.66785612408345, | |
| 30.741845563814174, | |
| 30.80346599254902, | |
| 30.85474569563567, | |
| 30.897392663720595, | |
| 30.932841297560394, | |
| 30.962293553185553, | |
| 30.986754758742034, | |
| 31.007064503249293, | |
| 31.02392307921529 | |
| ] | |
| }, | |
| "rope_theta": 10000.0, | |
| "slice_config": { | |
| "max_slice_nums": 9, | |
| "model_type": "minicpmv", | |
| "patch_size": 14, | |
| "scale_resolution": 448 | |
| }, | |
| "slice_mode": true, | |
| "tie_word_embeddings": false, | |
| "torch_dtype": "bfloat16", | |
| "transformers_version": "4.51.0", | |
| "use_cache": true, | |
| "use_image_id": true, | |
| "version": 4.0, | |
| "vision_batch_size": 16, | |
| "vision_config": { | |
| "_attn_implementation_autoset": true, | |
| "attention_dropout": 0.0, | |
| "hidden_act": "gelu_pytorch_tanh", | |
| "hidden_size": 1152, | |
| "image_size": 980, | |
| "intermediate_size": 4304, | |
| "layer_norm_eps": 1e-06, | |
| "model_type": "siglip_vision_model", | |
| "num_attention_heads": 16, | |
| "num_channels": 3, | |
| "num_hidden_layers": 27, | |
| "patch_size": 14 | |
| }, | |
| "vocab_size": 73448 | |
| } | |