JohnConnor123 commited on
Commit
867a0e5
·
verified ·
1 Parent(s): 81d1150

Upload MistralForCausalLM

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -17,7 +17,7 @@
17
  "num_hidden_layers": 10,
18
  "num_key_value_heads": 3,
19
  "quantization_config": {
20
- "_load_in_4bit": false,
21
  "_load_in_8bit": false,
22
  "bnb_4bit_compute_dtype": "float32",
23
  "bnb_4bit_quant_storage": "uint8",
@@ -27,7 +27,7 @@
27
  "llm_int8_has_fp16_weight": false,
28
  "llm_int8_skip_modules": null,
29
  "llm_int8_threshold": 6.0,
30
- "load_in_4bit": false,
31
  "load_in_8bit": false,
32
  "quant_method": "bitsandbytes"
33
  },
 
17
  "num_hidden_layers": 10,
18
  "num_key_value_heads": 3,
19
  "quantization_config": {
20
+ "_load_in_4bit": true,
21
  "_load_in_8bit": false,
22
  "bnb_4bit_compute_dtype": "float32",
23
  "bnb_4bit_quant_storage": "uint8",
 
27
  "llm_int8_has_fp16_weight": false,
28
  "llm_int8_skip_modules": null,
29
  "llm_int8_threshold": 6.0,
30
+ "load_in_4bit": true,
31
  "load_in_8bit": false,
32
  "quant_method": "bitsandbytes"
33
  },