OPEA
/

Safetensors
qwen2_vl
2-bit
auto-round
cicdatopea commited on
Commit
df353d8
·
verified ·
1 Parent(s): b9b2297

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -35,7 +35,7 @@
35
  "lr": 0.0005,
36
  "minmax_lr": 0.0005,
37
  "nsamples": 1024,
38
- "quant_method": "gptq",
39
  "scale_dtype": "torch.float16",
40
  "seqlen": 2048,
41
  "sym": true,
@@ -55,7 +55,7 @@
55
  "rope_theta": 1000000.0,
56
  "sliding_window": 32768,
57
  "tie_word_embeddings": false,
58
- "torch_dtype": "float16",
59
  "transformers_version": "4.52.2",
60
  "use_cache": true,
61
  "use_sliding_window": false,
 
35
  "lr": 0.0005,
36
  "minmax_lr": 0.0005,
37
  "nsamples": 1024,
38
+ "quant_method": "auto-round",
39
  "scale_dtype": "torch.float16",
40
  "seqlen": 2048,
41
  "sym": true,
 
55
  "rope_theta": 1000000.0,
56
  "sliding_window": 32768,
57
  "tie_word_embeddings": false,
58
+ "torch_dtype": "bfloat16",
59
  "transformers_version": "4.52.2",
60
  "use_cache": true,
61
  "use_sliding_window": false,