kiennkt05 commited on
Commit
8ef2180
·
verified ·
1 Parent(s): 8a0d29e

Upload 10 files

Browse files
added_tokens.json CHANGED
@@ -1,7 +1,6 @@
1
  {
2
  "</tool_call>": 151658,
3
  "<tool_call>": 151657,
4
- "<|PAD_TOKEN|>": 151665,
5
  "<|box_end|>": 151649,
6
  "<|box_start|>": 151648,
7
  "<|endoftext|>": 151643,
 
1
  {
2
  "</tool_call>": 151658,
3
  "<tool_call>": 151657,
 
4
  "<|box_end|>": 151649,
5
  "<|box_start|>": 151648,
6
  "<|endoftext|>": 151643,
chat_template.jinja CHANGED
@@ -3,7 +3,7 @@
3
  {%- if messages[0]['role'] == 'system' %}
4
  {{- messages[0]['content'] }}
5
  {%- else %}
6
- {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}
7
  {%- endif %}
8
  {{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
9
  {%- for tool in tools %}
@@ -15,7 +15,7 @@
15
  {%- if messages[0]['role'] == 'system' %}
16
  {{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}
17
  {%- else %}
18
- {{- '<|im_start|>system\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\n' }}
19
  {%- endif %}
20
  {%- endif %}
21
  {%- for message in messages %}
@@ -38,7 +38,8 @@
38
  {%- endfor %}
39
  {{- '<|im_end|>\n' }}
40
  {%- elif message.role == "tool" %}
41
- {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %} {{- '<|im_start|>user' }}
 
42
  {%- endif %}
43
  {{- '\n<tool_response>\n' }}
44
  {{- message.content }}
 
3
  {%- if messages[0]['role'] == 'system' %}
4
  {{- messages[0]['content'] }}
5
  {%- else %}
6
+ {{- 'You are a helpful assistant.' }}
7
  {%- endif %}
8
  {{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
9
  {%- for tool in tools %}
 
15
  {%- if messages[0]['role'] == 'system' %}
16
  {{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}
17
  {%- else %}
18
+ {{- '<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n' }}
19
  {%- endif %}
20
  {%- endif %}
21
  {%- for message in messages %}
 
38
  {%- endfor %}
39
  {{- '<|im_end|>\n' }}
40
  {%- elif message.role == "tool" %}
41
+ {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %}
42
+ {{- '<|im_start|>user' }}
43
  {%- endif %}
44
  {{- '\n<tool_response>\n' }}
45
  {{- message.content }}
config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2ForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 151643,
7
+ "eos_token_id": 151643,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 896,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 4864,
12
+ "max_position_embeddings": 32768,
13
+ "max_window_layers": 24,
14
+ "model_type": "qwen2",
15
+ "num_attention_heads": 14,
16
+ "num_hidden_layers": 24,
17
+ "num_key_value_heads": 2,
18
+ "rms_norm_eps": 1e-06,
19
+ "rope_scaling": null,
20
+ "rope_theta": 1000000.0,
21
+ "sliding_window": 32768,
22
+ "tie_word_embeddings": true,
23
+ "torch_dtype": "float32",
24
+ "transformers_version": "4.52.4",
25
+ "use_cache": true,
26
+ "use_sliding_window": false,
27
+ "vocab_size": 151936
28
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "eos_token_id": 151643,
4
+ "max_new_tokens": 2048,
5
+ "transformers_version": "4.52.4"
6
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af6b6761a2b30b1c504c4ca25bdfa4ba236cefd6883c4cf8e6e239b4b94d0def
3
+ size 1976163472
special_tokens_map.json CHANGED
@@ -15,14 +15,14 @@
15
  "<|video_pad|>"
16
  ],
17
  "eos_token": {
18
- "content": "<|im_end|>",
19
  "lstrip": false,
20
  "normalized": false,
21
  "rstrip": false,
22
  "single_word": false
23
  },
24
  "pad_token": {
25
- "content": "<|PAD_TOKEN|>",
26
  "lstrip": false,
27
  "normalized": false,
28
  "rstrip": false,
 
15
  "<|video_pad|>"
16
  ],
17
  "eos_token": {
18
+ "content": "<|endoftext|>",
19
  "lstrip": false,
20
  "normalized": false,
21
  "rstrip": false,
22
  "single_word": false
23
  },
24
  "pad_token": {
25
+ "content": "<|endoftext|>",
26
  "lstrip": false,
27
  "normalized": false,
28
  "rstrip": false,
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fab42efe8d17406525a9154b728cf9e957629a8ed7ce997770efdd71128c6a1a
3
- size 11422086
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
tokenizer_config.json CHANGED
@@ -177,14 +177,6 @@
177
  "rstrip": false,
178
  "single_word": false,
179
  "special": false
180
- },
181
- "151665": {
182
- "content": "<|PAD_TOKEN|>",
183
- "lstrip": false,
184
- "normalized": false,
185
- "rstrip": false,
186
- "single_word": false,
187
- "special": true
188
  }
189
  },
190
  "additional_special_tokens": [
@@ -204,12 +196,11 @@
204
  ],
205
  "bos_token": null,
206
  "clean_up_tokenization_spaces": false,
207
- "eos_token": "<|im_end|>",
208
  "errors": "replace",
209
  "extra_special_tokens": {},
210
  "model_max_length": 32768,
211
- "pad_token": "<|PAD_TOKEN|>",
212
- "padding_side": "right",
213
  "split_special_tokens": false,
214
  "tokenizer_class": "Qwen2Tokenizer",
215
  "unk_token": null
 
177
  "rstrip": false,
178
  "single_word": false,
179
  "special": false
 
 
 
 
 
 
 
 
180
  }
181
  },
182
  "additional_special_tokens": [
 
196
  ],
197
  "bos_token": null,
198
  "clean_up_tokenization_spaces": false,
199
+ "eos_token": "<|endoftext|>",
200
  "errors": "replace",
201
  "extra_special_tokens": {},
202
  "model_max_length": 32768,
203
+ "pad_token": "<|endoftext|>",
 
204
  "split_special_tokens": false,
205
  "tokenizer_class": "Qwen2Tokenizer",
206
  "unk_token": null