danielhanchen commited on
Commit
d2f2dd0
·
verified ·
1 Parent(s): 05ad3fd

Add files using upload-large-folder tool

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json CHANGED
@@ -1,7 +1,6 @@
1
  {
2
  "</tool_call>": 151658,
3
  "<tool_call>": 151657,
4
- "<|PAD_TOKEN|>": 151665,
5
  "<|box_end|>": 151649,
6
  "<|box_start|>": 151648,
7
  "<|endoftext|>": 151643,
 
1
  {
2
  "</tool_call>": 151658,
3
  "<tool_call>": 151657,
 
4
  "<|box_end|>": 151649,
5
  "<|box_start|>": 151648,
6
  "<|endoftext|>": 151643,
config.json CHANGED
@@ -4,7 +4,6 @@
4
  "Qwen2ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 151643,
8
  "eos_token_id": 151645,
9
  "hidden_act": "silu",
10
  "hidden_size": 1536,
@@ -16,7 +15,7 @@
16
  "num_attention_heads": 12,
17
  "num_hidden_layers": 28,
18
  "num_key_value_heads": 2,
19
- "pad_token_id": 151665,
20
  "quantization_config": {
21
  "_load_in_4bit": true,
22
  "_load_in_8bit": false,
@@ -26,18 +25,24 @@
26
  "bnb_4bit_use_double_quant": true,
27
  "llm_int8_enable_fp32_cpu_offload": false,
28
  "llm_int8_has_fp16_weight": false,
29
- "llm_int8_skip_modules": null,
 
 
 
 
 
30
  "llm_int8_threshold": 6.0,
31
  "load_in_4bit": true,
32
  "load_in_8bit": false,
33
  "quant_method": "bitsandbytes"
34
  },
35
  "rms_norm_eps": 1e-06,
 
36
  "rope_theta": 1000000.0,
37
  "sliding_window": null,
38
  "tie_word_embeddings": true,
39
  "torch_dtype": "bfloat16",
40
- "transformers_version": "4.44.2",
41
  "unsloth_fixed": true,
42
  "use_cache": true,
43
  "use_sliding_window": false,
 
4
  "Qwen2ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
 
7
  "eos_token_id": 151645,
8
  "hidden_act": "silu",
9
  "hidden_size": 1536,
 
15
  "num_attention_heads": 12,
16
  "num_hidden_layers": 28,
17
  "num_key_value_heads": 2,
18
+ "pad_token_id": 151654,
19
  "quantization_config": {
20
  "_load_in_4bit": true,
21
  "_load_in_8bit": false,
 
25
  "bnb_4bit_use_double_quant": true,
26
  "llm_int8_enable_fp32_cpu_offload": false,
27
  "llm_int8_has_fp16_weight": false,
28
+ "llm_int8_skip_modules": [
29
+ "lm_head",
30
+ "multi_modal_projector",
31
+ "merger",
32
+ "modality_projection"
33
+ ],
34
  "llm_int8_threshold": 6.0,
35
  "load_in_4bit": true,
36
  "load_in_8bit": false,
37
  "quant_method": "bitsandbytes"
38
  },
39
  "rms_norm_eps": 1e-06,
40
+ "rope_scaling": null,
41
  "rope_theta": 1000000.0,
42
  "sliding_window": null,
43
  "tie_word_embeddings": true,
44
  "torch_dtype": "bfloat16",
45
+ "transformers_version": "4.49.0.dev0",
46
  "unsloth_fixed": true,
47
  "use_cache": true,
48
  "use_sliding_window": false,
generation_config.json CHANGED
@@ -6,10 +6,10 @@
6
  151643
7
  ],
8
  "max_length": 32768,
9
- "pad_token_id": 151665,
10
  "repetition_penalty": 1.1,
11
  "temperature": 0.7,
12
  "top_k": 20,
13
  "top_p": 0.8,
14
- "transformers_version": "4.44.2"
15
  }
 
6
  151643
7
  ],
8
  "max_length": 32768,
9
+ "pad_token_id": 151654,
10
  "repetition_penalty": 1.1,
11
  "temperature": 0.7,
12
  "top_k": 20,
13
  "top_p": 0.8,
14
+ "transformers_version": "4.49.0.dev0"
15
  }
special_tokens_map.json CHANGED
@@ -22,7 +22,7 @@
22
  "single_word": false
23
  },
24
  "pad_token": {
25
- "content": "<|PAD_TOKEN|>",
26
  "lstrip": false,
27
  "normalized": false,
28
  "rstrip": false,
 
22
  "single_word": false
23
  },
24
  "pad_token": {
25
+ "content": "<|vision_pad|>",
26
  "lstrip": false,
27
  "normalized": false,
28
  "rstrip": false,
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -177,14 +177,6 @@
177
  "rstrip": false,
178
  "single_word": false,
179
  "special": false
180
- },
181
- "151665": {
182
- "content": "<|PAD_TOKEN|>",
183
- "lstrip": false,
184
- "normalized": false,
185
- "rstrip": false,
186
- "single_word": false,
187
- "special": true
188
  }
189
  },
190
  "additional_special_tokens": [
@@ -207,8 +199,9 @@
207
  "clean_up_tokenization_spaces": false,
208
  "eos_token": "<|im_end|>",
209
  "errors": "replace",
210
- "model_max_length": 131072,
211
- "pad_token": "<|PAD_TOKEN|>",
 
212
  "padding_side": "left",
213
  "split_special_tokens": false,
214
  "tokenizer_class": "Qwen2Tokenizer",
 
177
  "rstrip": false,
178
  "single_word": false,
179
  "special": false
 
 
 
 
 
 
 
 
180
  }
181
  },
182
  "additional_special_tokens": [
 
199
  "clean_up_tokenization_spaces": false,
200
  "eos_token": "<|im_end|>",
201
  "errors": "replace",
202
+ "extra_special_tokens": {},
203
+ "model_max_length": 32768,
204
+ "pad_token": "<|vision_pad|>",
205
  "padding_side": "left",
206
  "split_special_tokens": false,
207
  "tokenizer_class": "Qwen2Tokenizer",