Add files using upload-large-folder tool
Browse files- config.json +2 -6
- generation_config.json +1 -1
- tokenizer_config.json +1 -0
config.json
CHANGED
|
@@ -6,11 +6,7 @@
|
|
| 6 |
"attention_bias": false,
|
| 7 |
"attention_dropout": 0.0,
|
| 8 |
"bos_token_id": 128000,
|
| 9 |
-
"eos_token_id":
|
| 10 |
-
128001,
|
| 11 |
-
128008,
|
| 12 |
-
128009
|
| 13 |
-
],
|
| 14 |
"head_dim": 128,
|
| 15 |
"hidden_act": "silu",
|
| 16 |
"hidden_size": 3072,
|
|
@@ -55,7 +51,7 @@
|
|
| 55 |
"rope_theta": 500000.0,
|
| 56 |
"tie_word_embeddings": true,
|
| 57 |
"torch_dtype": "bfloat16",
|
| 58 |
-
"transformers_version": "4.48.
|
| 59 |
"unsloth_fixed": true,
|
| 60 |
"use_cache": true,
|
| 61 |
"vocab_size": 128256
|
|
|
|
| 6 |
"attention_bias": false,
|
| 7 |
"attention_dropout": 0.0,
|
| 8 |
"bos_token_id": 128000,
|
| 9 |
+
"eos_token_id": 128009,
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
"head_dim": 128,
|
| 11 |
"hidden_act": "silu",
|
| 12 |
"hidden_size": 3072,
|
|
|
|
| 51 |
"rope_theta": 500000.0,
|
| 52 |
"tie_word_embeddings": true,
|
| 53 |
"torch_dtype": "bfloat16",
|
| 54 |
+
"transformers_version": "4.48.1",
|
| 55 |
"unsloth_fixed": true,
|
| 56 |
"use_cache": true,
|
| 57 |
"vocab_size": 128256
|
generation_config.json
CHANGED
|
@@ -10,5 +10,5 @@
|
|
| 10 |
"pad_token_id": 128004,
|
| 11 |
"temperature": 0.6,
|
| 12 |
"top_p": 0.9,
|
| 13 |
-
"transformers_version": "4.48.
|
| 14 |
}
|
|
|
|
| 10 |
"pad_token_id": 128004,
|
| 11 |
"temperature": 0.6,
|
| 12 |
"top_p": 0.9,
|
| 13 |
+
"transformers_version": "4.48.1"
|
| 14 |
}
|
tokenizer_config.json
CHANGED
|
@@ -1,4 +1,5 @@
|
|
| 1 |
{
|
|
|
|
| 2 |
"added_tokens_decoder": {
|
| 3 |
"128000": {
|
| 4 |
"content": "<|begin_of_text|>",
|
|
|
|
| 1 |
{
|
| 2 |
+
"add_bos_token": true,
|
| 3 |
"added_tokens_decoder": {
|
| 4 |
"128000": {
|
| 5 |
"content": "<|begin_of_text|>",
|