hhoh commited on
Commit
74fc0f7
·
verified ·
1 Parent(s): 95f040f

Upload 7 files

Browse files
chat_template.jinja ADDED
@@ -0,0 +1 @@
 
 
1
+ {% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}<|hy_begin▁of▁sentence|>{{ system_message }}<|hy_place▁holder▁no▁3|>{% else %}{% set loop_messages = messages %}<|hy_begin▁of▁sentence|>{% endif %}{% for message in loop_messages %}{% if message['role'] == 'user' %}<|hy_User|>{{ message['content'] }}{% elif message['role'] == 'assistant' %}<|hy_Assistant|>{{ message['content'] }}<|hy_place▁holder▁no▁2|>{% endif %}{% endfor %}{% if add_generation_prompt %}<|hy_Assistant|>{% else %}<|hy_place▁holder▁no▁8|>{% endif %}
config.json ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_classification_head": false,
3
+ "architectures": [
4
+ "HunYuanDenseV1ForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "attention_head_dim": 128,
9
+ "bos_token_id": 120000,
10
+ "cla_share_factor": 2,
11
+ "class_num": 0,
12
+ "dense_list": [
13
+ 2048,
14
+ 0
15
+ ],
16
+ "dtype": "bfloat16",
17
+ "eos_token_id": 120020,
18
+ "head_dim": 128,
19
+ "hidden_act": "silu",
20
+ "hidden_size": 2048,
21
+ "im_end_id": 5,
22
+ "im_newline_id": 11,
23
+ "im_start_id": 4,
24
+ "initializer_range": 0.02,
25
+ "intermediate_size": 6144,
26
+ "mask_init_id": 12,
27
+ "max_position_embeddings": 262144,
28
+ "mlp_bias": false,
29
+ "model_type": "hunyuan_v1_dense",
30
+ "norm_type": "rms",
31
+ "num_attention_heads": 16,
32
+ "num_hidden_layers": 32,
33
+ "num_key_value_heads": 4,
34
+ "org_vocab_size": 120818,
35
+ "pad_id": 120002,
36
+ "pad_token_id": 120002,
37
+ "pool_type": "last",
38
+ "pretraining_tp": 1,
39
+ "rms_norm_eps": 1e-05,
40
+ "rope_scaling": {
41
+ "alpha": 1000.0,
42
+ "beta_fast": 32,
43
+ "beta_slow": 1,
44
+ "factor": 1.0,
45
+ "mscale": 1.0,
46
+ "mscale_all_dim": 1.0,
47
+ "type": "dynamic"
48
+ },
49
+ "rope_theta": 10000.0,
50
+ "sep_token_id": 120007,
51
+ "text_end_id": 7,
52
+ "text_start_id": 6,
53
+ "tie_word_embeddings": true,
54
+ "transformers_version": "4.56.1",
55
+ "use_cache": true,
56
+ "use_cla": false,
57
+ "use_qk_norm": true,
58
+ "use_rotary_pos_emb": true,
59
+ "vocab_size": 120818
60
+ }
generation_config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 120000,
3
+ "do_sample": true,
4
+ "eos_token_id": 120020,
5
+ "pad_token_id": 120002,
6
+ "repetition_penalty": 1.05,
7
+ "temperature": 0.7,
8
+ "top_k": 20,
9
+ "top_p": 0.8,
10
+ "transformers_version": "4.56.1"
11
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07736f560253d8c991616060fb2d855420957c268fa7d32fa8593df2f83b21ab
3
+ size 4077072784
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|hy_begin▁of▁sentence|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|hy_place▁holder▁no▁2|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|hy_▁pad▁|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff