danielhanchen commited on
Commit
11f0887
·
verified ·
1 Parent(s): 36abf88

Add files using upload-large-folder tool

Browse files
Files changed (3) hide show
  1. chat_template.jinja +58 -70
  2. config.json +759 -678
  3. tokenizer_config.json +1 -1
chat_template.jinja CHANGED
@@ -1,79 +1,51 @@
1
  {{- bos_token }}
2
- {%- if custom_tools is defined %}
3
  {%- set tools = custom_tools %}
4
  {%- endif %}
5
- {%- if not tools_in_user_message is defined %}
6
- {%- set tools_in_user_message = true %}
7
- {%- endif %}
8
- {%- if not date_string is defined %}
9
- {%- if strftime_now is defined %}
10
- {%- set date_string = strftime_now("%d %b %Y") %}
11
- {%- else %}
12
- {%- set date_string = "26 Jul 2024" %}
13
- {%- endif %}
14
- {%- endif %}
15
- {%- if not tools is defined %}
16
  {%- set tools = none %}
17
  {%- endif %}
18
 
 
19
  {#- This block extracts the system message, so we can slot it into the right place. #}
20
- {%- if messages[0]['role'] == 'system' %}
 
21
  {%- if messages[0]['content'] is string %}
22
  {%- set system_message = messages[0]['content']|trim %}
23
  {%- else %}
24
- {#- FIXME: The processor requires an array, always. #}
25
  {%- set system_message = messages[0]['content'][0]['text']|trim %}
26
  {%- endif %}
27
  {%- set messages = messages[1:] %}
28
- {%- set user_supplied_system_message = true %}
29
  {%- else %}
30
- {%- set system_message = "" %}
31
- {%- set user_supplied_system_message = false %}
 
 
 
 
 
32
  {%- endif %}
33
-
34
- {#- System message if the user supplied one #}
35
- {%- if user_supplied_system_message %}
36
  {{- "<|header_start|>system<|header_end|>\n\n" }}
37
- {%- if tools is not none %}
38
- {{- "Environment: ipython\n" }}
39
- {%- endif %}
40
- {%- if tools is not none and not tools_in_user_message %}
41
- {{- "You have access to the following functions. To call a function, please respond with JSON for a function call." }}
42
- {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }}
43
- {{- "Do not use variables.\n\n" }}
44
- {%- for t in tools %}
45
- {{- t | tojson(indent=4) }}
46
- {{- "\n\n" }}
47
- {%- endfor %}
48
- {%- endif %}
49
  {{- system_message }}
 
 
 
 
 
 
50
  {{- "<|eot|>" }}
51
  {%- endif %}
52
 
53
- {#- Custom tools are passed in a user message with some extra guidance #}
54
- {%- if tools_in_user_message and not tools is none %}
55
- {#- Extract the first user message so we can plug it in here #}
56
- {%- if messages | length != 0 %}
57
- {%- set first_user_message = messages[0]['content']|trim %}
58
- {%- set messages = messages[1:] %}
59
- {%- else %}
60
- {{- raise_exception("Cannot put tools in the first user message when there's no first user message!") }}
61
- {%- endif %}
62
- {{- '<|header_start|>user<|header_end|>\n\n' -}}
63
- {{- "Given the following functions, please respond with a JSON for a function call " }}
64
- {{- "with its proper arguments that best answers the given prompt.\n\n" }}
65
- {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }}
66
- {{- "Do not use variables.\n\n" }}
67
- {%- for t in tools %}
68
- {{- t | tojson(indent=4) }}
69
- {{- "\n\n" }}
70
- {%- endfor %}
71
- {{- first_user_message + "<|eot|>"}}
72
- {%- endif %}
73
-
74
  {%- for message in messages %}
75
- {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}
76
- {{- '<|header_start|>' + message['role'] + '<|header_end|>\n\n' }}
 
77
  {%- if message['content'] is string %}
78
  {{- message['content'] }}
79
  {%- else %}
@@ -81,14 +53,16 @@
81
  {%- if content['type'] == 'image' %}
82
  {{- '<|image|>' }}
83
  {%- elif content['type'] == 'text' %}
84
- {{- content['text'] }}
85
  {%- endif %}
86
  {%- endfor %}
87
  {%- endif %}
88
- {{- "<|eot|>" }}
89
- {%- elif 'tool_calls' in message and message.tool_calls|length > 0 %}
90
- {{- '<|header_start|>assistant<|header_end|>\n\n' -}}
91
- {{- '<|python_start|>' }}
 
 
92
  {%- if message['content'] is string %}
93
  {{- message['content'] }}
94
  {%- else %}
@@ -100,24 +74,38 @@
100
  {%- endif %}
101
  {%- endfor %}
102
  {%- endif %}
103
- {{- '<|python_end|>' }}
104
  {%- for tool_call in message.tool_calls %}
105
- {{- '{"name": "' + tool_call.function.name + '", ' }}
106
- {{- '"parameters": ' }}
107
- {{- tool_call.function.arguments | tojson }}
108
- {{- "}" }}
 
 
 
 
 
 
 
 
109
  {%- endfor %}
110
- {{- "<|eot|>" }}
 
 
111
  {%- elif message.role == "tool" or message.role == "ipython" %}
112
  {{- "<|header_start|>ipython<|header_end|>\n\n" }}
113
- {%- if message.content is mapping or message.content is iterable %}
114
- {{- message.content | tojson }}
115
  {%- else %}
116
- {{- message.content }}
 
 
 
 
117
  {%- endif %}
118
  {{- "<|eot|>" }}
119
  {%- endif %}
120
  {%- endfor %}
121
  {%- if add_generation_prompt %}
122
  {{- '<|header_start|>assistant<|header_end|>\n\n' }}
123
- {%- endif %}
 
1
  {{- bos_token }}
2
+ {%- if custom_tools is defined and custom_tools%}
3
  {%- set tools = custom_tools %}
4
  {%- endif %}
5
+ {%- if tools is defined and tools %}
6
+ {%- set tool_definition = tool_definition ~ (tools | tojson(indent=4)) %}
7
+ {%- else %}
 
 
 
 
 
 
 
 
8
  {%- set tools = none %}
9
  {%- endif %}
10
 
11
+
12
  {#- This block extracts the system message, so we can slot it into the right place. #}
13
+ {%- if messages[0]['role'] == 'system' %}
14
+ {%- set user_provided_system_message = true %}
15
  {%- if messages[0]['content'] is string %}
16
  {%- set system_message = messages[0]['content']|trim %}
17
  {%- else %}
 
18
  {%- set system_message = messages[0]['content'][0]['text']|trim %}
19
  {%- endif %}
20
  {%- set messages = messages[1:] %}
 
21
  {%- else %}
22
+ {%- if tools is not none %}
23
+ {#- Since not system_message was provided by user, if tool is provided, system_message is now default tool system message #}
24
+ {#- This system message is from llama website:https://www.llama.com/docs/model-cards-and-prompt-formats/llama4/ #}
25
+ {%- set system_message = "You are a helpful assistant and an expert in function composition. You can answer general questions using your internal knowledge OR invoke functions when necessary. Follow these strict guidelines:\n\n1. FUNCTION CALLS:\n- ONLY use functions that are EXPLICITLY listed in the function list below\n- If NO functions are listed (empty function list []), respond ONLY with internal knowledge or \"I don't have access to [Unavailable service] information\"\n- If a function is not in the list, respond ONLY with internal knowledge or \"I don't have access to [Unavailable service] information\"\n- If ALL required parameters are present AND the query EXACTLY matches a listed function's purpose: output ONLY the function call(s)\n- Use exact format: [func_name1(param1=value1, param2=value2), func_name2(...)]\nExamples:\nCORRECT: [get_weather(location=\"Vancouver\"), calculate_route(start=\"Boston\", end=\"New York\")] <- Only if get_weather and calculate_route are in function list\nINCORRECT: get_weather(location=\"New York\")\nINCORRECT: Let me check the weather: [get_weather(location=\"New York\")]\nINCORRECT: [get_events(location=\"Singapore\")] <- If function not in list\n\n2. RESPONSE RULES:\n- For pure function requests matching a listed function: ONLY output the function call(s)\n- For knowledge questions: ONLY output text\n- For missing parameters: ONLY request the specific missing parameters\n- For unavailable services (not in function list): output ONLY with internal knowledge or \"I don't have access to [Unavailable service] information\". Do NOT execute a function call.\n- If the query asks for information beyond what a listed function provides: output ONLY with internal knowledge about your limitations\n- NEVER combine text and function calls in the same response\n- NEVER suggest alternative functions when the requested service is unavailable\n- NEVER create or invent new functions not listed below\n\n3. STRICT BOUNDARIES:\n- ONLY use functions from the list below - no exceptions\n- NEVER use a function as an alternative to unavailable information\n- NEVER call functions not present in the function list\n- NEVER add explanatory text to function calls\n- NEVER respond with empty brackets\n- Use proper Python/JSON syntax for function calls\n- Check the function list carefully before responding\n\n4. TOOL RESPONSE HANDLING:\n- When receiving tool responses: provide concise, natural language responses\n- Don't repeat tool response verbatim\n- Don't add supplementary information\n\nHere is a list of functions in JSON format that you can invoke:\n" %}
26
+ {%- else %}
27
+ {%- set system_message = "" %}
28
+ {%- endif %}
29
  {%- endif %}
30
+ {#- Now writing the system message: use the user provided system message if user_provided_system_message, else default tool system message if tools presented #}
31
+ {%- if system_message %}
32
+ {#- always use user provided system message to override default tool system message #}
33
  {{- "<|header_start|>system<|header_end|>\n\n" }}
 
 
 
 
 
 
 
 
 
 
 
 
34
  {{- system_message }}
35
+ {%- if user_provided_system_message and tools %}
36
+ {{- "\nHere is a list of functions in JSON format that you can invoke. Use exact format: [func_name1(param1=value1, param2=value2), func_name2(...)]\n" }}
37
+ {{- tool_definition -}}
38
+ {%- elif tool_definition %}
39
+ {{- tool_definition -}}
40
+ {%- endif %}
41
  {{- "<|eot|>" }}
42
  {%- endif %}
43
 
44
+ {#- Now deal with all other messages #}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
  {%- for message in messages %}
46
+ {#- Base case: messages that are not from tool role and has empty tool_call list #}
47
+ {%- if not (message.role == 'ipython' or message.role == 'tool' or ('tool_calls' in message and message.tool_calls|length != 0 )) %}
48
+ {{- '<|header_start|>' + message['role'] + '<|header_end|>\n\n' }}
49
  {%- if message['content'] is string %}
50
  {{- message['content'] }}
51
  {%- else %}
 
53
  {%- if content['type'] == 'image' %}
54
  {{- '<|image|>' }}
55
  {%- elif content['type'] == 'text' %}
56
+ {{- content['text'] | trim }}
57
  {%- endif %}
58
  {%- endfor %}
59
  {%- endif %}
60
+ {{- "<|eot|>" }}
61
+ {#- Tool case: messages has non-empty tool_call list, must from assistant #}
62
+ {%- elif 'tool_calls' in message %}
63
+ {#- assume tool_calls are always coming from assistant #}
64
+ {%- if message.role == 'assistant' %}
65
+ {{- '<|header_start|>assistant<|header_end|>\n\n' -}}
66
  {%- if message['content'] is string %}
67
  {{- message['content'] }}
68
  {%- else %}
 
74
  {%- endif %}
75
  {%- endfor %}
76
  {%- endif %}
77
+ {{- "[" }}
78
  {%- for tool_call in message.tool_calls %}
79
+ {%- if tool_call.function is defined %}
80
+ {%- set tool_call = tool_call.function %}
81
+ {%- endif %}
82
+ {{- tool_call.name + '(' -}}
83
+ {%- for param in tool_call.arguments %}
84
+ {{- param + '="' -}}
85
+ {{- "%s" | format(tool_call.arguments[param]) -}}
86
+ {{- '"' -}}
87
+ {% if not loop.last %}, {% endif %}
88
+ {%- endfor %}
89
+ {{- ')' -}}
90
+ {% if not loop.last %}, {% endif %}
91
  {%- endfor %}
92
+ {{- "]<|eot|>" }}
93
+ {%- endif %}
94
+ {#- Tool_response case: messages are from tool_response #}
95
  {%- elif message.role == "tool" or message.role == "ipython" %}
96
  {{- "<|header_start|>ipython<|header_end|>\n\n" }}
97
+ {%- if message.content is string %}
98
+ {{- message.content | tojson }}
99
  {%- else %}
100
+ {%- for content in message['content'] %}
101
+ {%- if content['type'] == 'text' %}
102
+ {{- content['text'] | tojson }}
103
+ {%- endif %}
104
+ {%- endfor %}
105
  {%- endif %}
106
  {{- "<|eot|>" }}
107
  {%- endif %}
108
  {%- endfor %}
109
  {%- if add_generation_prompt %}
110
  {{- '<|header_start|>assistant<|header_end|>\n\n' }}
111
+ {%- endif %}
config.json CHANGED
@@ -1,684 +1,765 @@
1
  {
2
- "architectures": [
3
- "Llama4ForConditionalGeneration"
4
- ],
5
- "boi_token_index": 200080,
6
- "eoi_token_index": 200081,
7
- "image_token_index": 200092,
8
- "model_type": "llama4",
9
- "quantization_config": {
10
- "config_groups": {
11
- "group_0": {
12
- "input_activations": {
13
- "actorder": null,
14
- "block_structure": null,
15
- "dynamic": true,
16
- "group_size": null,
17
- "num_bits": 8,
18
- "observer": null,
19
- "observer_kwargs": {},
20
- "strategy": "token",
21
- "symmetric": true,
22
- "type": "float"
23
- },
24
- "output_activations": null,
25
- "targets": [
26
- "Linear"
27
- ],
28
- "weights": {
29
- "actorder": null,
30
- "block_structure": null,
31
- "dynamic": false,
32
- "group_size": null,
33
- "num_bits": 8,
34
- "observer": "minmax",
35
- "observer_kwargs": {},
36
- "strategy": "channel",
37
- "symmetric": true,
38
- "type": "float"
39
- }
40
- }
41
  },
42
- "format": "float-quantized",
43
- "global_compression_ratio": null,
44
- "ignore": [
45
- "vision_model.patch_embedding.linear",
46
- "vision_model.model.layers.0.self_attn.q_proj",
47
- "vision_model.model.layers.0.self_attn.k_proj",
48
- "vision_model.model.layers.0.self_attn.v_proj",
49
- "vision_model.model.layers.0.self_attn.o_proj",
50
- "vision_model.model.layers.0.mlp.fc1",
51
- "vision_model.model.layers.0.mlp.fc2",
52
- "vision_model.model.layers.1.self_attn.q_proj",
53
- "vision_model.model.layers.1.self_attn.k_proj",
54
- "vision_model.model.layers.1.self_attn.v_proj",
55
- "vision_model.model.layers.1.self_attn.o_proj",
56
- "vision_model.model.layers.1.mlp.fc1",
57
- "vision_model.model.layers.1.mlp.fc2",
58
- "vision_model.model.layers.2.self_attn.q_proj",
59
- "vision_model.model.layers.2.self_attn.k_proj",
60
- "vision_model.model.layers.2.self_attn.v_proj",
61
- "vision_model.model.layers.2.self_attn.o_proj",
62
- "vision_model.model.layers.2.mlp.fc1",
63
- "vision_model.model.layers.2.mlp.fc2",
64
- "vision_model.model.layers.3.self_attn.q_proj",
65
- "vision_model.model.layers.3.self_attn.k_proj",
66
- "vision_model.model.layers.3.self_attn.v_proj",
67
- "vision_model.model.layers.3.self_attn.o_proj",
68
- "vision_model.model.layers.3.mlp.fc1",
69
- "vision_model.model.layers.3.mlp.fc2",
70
- "vision_model.model.layers.4.self_attn.q_proj",
71
- "vision_model.model.layers.4.self_attn.k_proj",
72
- "vision_model.model.layers.4.self_attn.v_proj",
73
- "vision_model.model.layers.4.self_attn.o_proj",
74
- "vision_model.model.layers.4.mlp.fc1",
75
- "vision_model.model.layers.4.mlp.fc2",
76
- "vision_model.model.layers.5.self_attn.q_proj",
77
- "vision_model.model.layers.5.self_attn.k_proj",
78
- "vision_model.model.layers.5.self_attn.v_proj",
79
- "vision_model.model.layers.5.self_attn.o_proj",
80
- "vision_model.model.layers.5.mlp.fc1",
81
- "vision_model.model.layers.5.mlp.fc2",
82
- "vision_model.model.layers.6.self_attn.q_proj",
83
- "vision_model.model.layers.6.self_attn.k_proj",
84
- "vision_model.model.layers.6.self_attn.v_proj",
85
- "vision_model.model.layers.6.self_attn.o_proj",
86
- "vision_model.model.layers.6.mlp.fc1",
87
- "vision_model.model.layers.6.mlp.fc2",
88
- "vision_model.model.layers.7.self_attn.q_proj",
89
- "vision_model.model.layers.7.self_attn.k_proj",
90
- "vision_model.model.layers.7.self_attn.v_proj",
91
- "vision_model.model.layers.7.self_attn.o_proj",
92
- "vision_model.model.layers.7.mlp.fc1",
93
- "vision_model.model.layers.7.mlp.fc2",
94
- "vision_model.model.layers.8.self_attn.q_proj",
95
- "vision_model.model.layers.8.self_attn.k_proj",
96
- "vision_model.model.layers.8.self_attn.v_proj",
97
- "vision_model.model.layers.8.self_attn.o_proj",
98
- "vision_model.model.layers.8.mlp.fc1",
99
- "vision_model.model.layers.8.mlp.fc2",
100
- "vision_model.model.layers.9.self_attn.q_proj",
101
- "vision_model.model.layers.9.self_attn.k_proj",
102
- "vision_model.model.layers.9.self_attn.v_proj",
103
- "vision_model.model.layers.9.self_attn.o_proj",
104
- "vision_model.model.layers.9.mlp.fc1",
105
- "vision_model.model.layers.9.mlp.fc2",
106
- "vision_model.model.layers.10.self_attn.q_proj",
107
- "vision_model.model.layers.10.self_attn.k_proj",
108
- "vision_model.model.layers.10.self_attn.v_proj",
109
- "vision_model.model.layers.10.self_attn.o_proj",
110
- "vision_model.model.layers.10.mlp.fc1",
111
- "vision_model.model.layers.10.mlp.fc2",
112
- "vision_model.model.layers.11.self_attn.q_proj",
113
- "vision_model.model.layers.11.self_attn.k_proj",
114
- "vision_model.model.layers.11.self_attn.v_proj",
115
- "vision_model.model.layers.11.self_attn.o_proj",
116
- "vision_model.model.layers.11.mlp.fc1",
117
- "vision_model.model.layers.11.mlp.fc2",
118
- "vision_model.model.layers.12.self_attn.q_proj",
119
- "vision_model.model.layers.12.self_attn.k_proj",
120
- "vision_model.model.layers.12.self_attn.v_proj",
121
- "vision_model.model.layers.12.self_attn.o_proj",
122
- "vision_model.model.layers.12.mlp.fc1",
123
- "vision_model.model.layers.12.mlp.fc2",
124
- "vision_model.model.layers.13.self_attn.q_proj",
125
- "vision_model.model.layers.13.self_attn.k_proj",
126
- "vision_model.model.layers.13.self_attn.v_proj",
127
- "vision_model.model.layers.13.self_attn.o_proj",
128
- "vision_model.model.layers.13.mlp.fc1",
129
- "vision_model.model.layers.13.mlp.fc2",
130
- "vision_model.model.layers.14.self_attn.q_proj",
131
- "vision_model.model.layers.14.self_attn.k_proj",
132
- "vision_model.model.layers.14.self_attn.v_proj",
133
- "vision_model.model.layers.14.self_attn.o_proj",
134
- "vision_model.model.layers.14.mlp.fc1",
135
- "vision_model.model.layers.14.mlp.fc2",
136
- "vision_model.model.layers.15.self_attn.q_proj",
137
- "vision_model.model.layers.15.self_attn.k_proj",
138
- "vision_model.model.layers.15.self_attn.v_proj",
139
- "vision_model.model.layers.15.self_attn.o_proj",
140
- "vision_model.model.layers.15.mlp.fc1",
141
- "vision_model.model.layers.15.mlp.fc2",
142
- "vision_model.model.layers.16.self_attn.q_proj",
143
- "vision_model.model.layers.16.self_attn.k_proj",
144
- "vision_model.model.layers.16.self_attn.v_proj",
145
- "vision_model.model.layers.16.self_attn.o_proj",
146
- "vision_model.model.layers.16.mlp.fc1",
147
- "vision_model.model.layers.16.mlp.fc2",
148
- "vision_model.model.layers.17.self_attn.q_proj",
149
- "vision_model.model.layers.17.self_attn.k_proj",
150
- "vision_model.model.layers.17.self_attn.v_proj",
151
- "vision_model.model.layers.17.self_attn.o_proj",
152
- "vision_model.model.layers.17.mlp.fc1",
153
- "vision_model.model.layers.17.mlp.fc2",
154
- "vision_model.model.layers.18.self_attn.q_proj",
155
- "vision_model.model.layers.18.self_attn.k_proj",
156
- "vision_model.model.layers.18.self_attn.v_proj",
157
- "vision_model.model.layers.18.self_attn.o_proj",
158
- "vision_model.model.layers.18.mlp.fc1",
159
- "vision_model.model.layers.18.mlp.fc2",
160
- "vision_model.model.layers.19.self_attn.q_proj",
161
- "vision_model.model.layers.19.self_attn.k_proj",
162
- "vision_model.model.layers.19.self_attn.v_proj",
163
- "vision_model.model.layers.19.self_attn.o_proj",
164
- "vision_model.model.layers.19.mlp.fc1",
165
- "vision_model.model.layers.19.mlp.fc2",
166
- "vision_model.model.layers.20.self_attn.q_proj",
167
- "vision_model.model.layers.20.self_attn.k_proj",
168
- "vision_model.model.layers.20.self_attn.v_proj",
169
- "vision_model.model.layers.20.self_attn.o_proj",
170
- "vision_model.model.layers.20.mlp.fc1",
171
- "vision_model.model.layers.20.mlp.fc2",
172
- "vision_model.model.layers.21.self_attn.q_proj",
173
- "vision_model.model.layers.21.self_attn.k_proj",
174
- "vision_model.model.layers.21.self_attn.v_proj",
175
- "vision_model.model.layers.21.self_attn.o_proj",
176
- "vision_model.model.layers.21.mlp.fc1",
177
- "vision_model.model.layers.21.mlp.fc2",
178
- "vision_model.model.layers.22.self_attn.q_proj",
179
- "vision_model.model.layers.22.self_attn.k_proj",
180
- "vision_model.model.layers.22.self_attn.v_proj",
181
- "vision_model.model.layers.22.self_attn.o_proj",
182
- "vision_model.model.layers.22.mlp.fc1",
183
- "vision_model.model.layers.22.mlp.fc2",
184
- "vision_model.model.layers.23.self_attn.q_proj",
185
- "vision_model.model.layers.23.self_attn.k_proj",
186
- "vision_model.model.layers.23.self_attn.v_proj",
187
- "vision_model.model.layers.23.self_attn.o_proj",
188
- "vision_model.model.layers.23.mlp.fc1",
189
- "vision_model.model.layers.23.mlp.fc2",
190
- "vision_model.model.layers.24.self_attn.q_proj",
191
- "vision_model.model.layers.24.self_attn.k_proj",
192
- "vision_model.model.layers.24.self_attn.v_proj",
193
- "vision_model.model.layers.24.self_attn.o_proj",
194
- "vision_model.model.layers.24.mlp.fc1",
195
- "vision_model.model.layers.24.mlp.fc2",
196
- "vision_model.model.layers.25.self_attn.q_proj",
197
- "vision_model.model.layers.25.self_attn.k_proj",
198
- "vision_model.model.layers.25.self_attn.v_proj",
199
- "vision_model.model.layers.25.self_attn.o_proj",
200
- "vision_model.model.layers.25.mlp.fc1",
201
- "vision_model.model.layers.25.mlp.fc2",
202
- "vision_model.model.layers.26.self_attn.q_proj",
203
- "vision_model.model.layers.26.self_attn.k_proj",
204
- "vision_model.model.layers.26.self_attn.v_proj",
205
- "vision_model.model.layers.26.self_attn.o_proj",
206
- "vision_model.model.layers.26.mlp.fc1",
207
- "vision_model.model.layers.26.mlp.fc2",
208
- "vision_model.model.layers.27.self_attn.q_proj",
209
- "vision_model.model.layers.27.self_attn.k_proj",
210
- "vision_model.model.layers.27.self_attn.v_proj",
211
- "vision_model.model.layers.27.self_attn.o_proj",
212
- "vision_model.model.layers.27.mlp.fc1",
213
- "vision_model.model.layers.27.mlp.fc2",
214
- "vision_model.model.layers.28.self_attn.q_proj",
215
- "vision_model.model.layers.28.self_attn.k_proj",
216
- "vision_model.model.layers.28.self_attn.v_proj",
217
- "vision_model.model.layers.28.self_attn.o_proj",
218
- "vision_model.model.layers.28.mlp.fc1",
219
- "vision_model.model.layers.28.mlp.fc2",
220
- "vision_model.model.layers.29.self_attn.q_proj",
221
- "vision_model.model.layers.29.self_attn.k_proj",
222
- "vision_model.model.layers.29.self_attn.v_proj",
223
- "vision_model.model.layers.29.self_attn.o_proj",
224
- "vision_model.model.layers.29.mlp.fc1",
225
- "vision_model.model.layers.29.mlp.fc2",
226
- "vision_model.model.layers.30.self_attn.q_proj",
227
- "vision_model.model.layers.30.self_attn.k_proj",
228
- "vision_model.model.layers.30.self_attn.v_proj",
229
- "vision_model.model.layers.30.self_attn.o_proj",
230
- "vision_model.model.layers.30.mlp.fc1",
231
- "vision_model.model.layers.30.mlp.fc2",
232
- "vision_model.model.layers.31.self_attn.q_proj",
233
- "vision_model.model.layers.31.self_attn.k_proj",
234
- "vision_model.model.layers.31.self_attn.v_proj",
235
- "vision_model.model.layers.31.self_attn.o_proj",
236
- "vision_model.model.layers.31.mlp.fc1",
237
- "vision_model.model.layers.31.mlp.fc2",
238
- "vision_model.model.layers.32.self_attn.q_proj",
239
- "vision_model.model.layers.32.self_attn.k_proj",
240
- "vision_model.model.layers.32.self_attn.v_proj",
241
- "vision_model.model.layers.32.self_attn.o_proj",
242
- "vision_model.model.layers.32.mlp.fc1",
243
- "vision_model.model.layers.32.mlp.fc2",
244
- "vision_model.model.layers.33.self_attn.q_proj",
245
- "vision_model.model.layers.33.self_attn.k_proj",
246
- "vision_model.model.layers.33.self_attn.v_proj",
247
- "vision_model.model.layers.33.self_attn.o_proj",
248
- "vision_model.model.layers.33.mlp.fc1",
249
- "vision_model.model.layers.33.mlp.fc2",
250
- "vision_model.vision_adapter.mlp.fc1",
251
- "vision_model.vision_adapter.mlp.fc2",
252
- "multi_modal_projector.linear_1",
253
- "language_model.model.layers.0.self_attn.q_proj",
254
- "language_model.model.layers.0.self_attn.k_proj",
255
- "language_model.model.layers.0.self_attn.v_proj",
256
- "language_model.model.layers.0.self_attn.o_proj",
257
- "language_model.model.layers.0.feed_forward.gate_proj",
258
- "language_model.model.layers.0.feed_forward.up_proj",
259
- "language_model.model.layers.0.feed_forward.down_proj",
260
- "language_model.model.layers.1.self_attn.q_proj",
261
- "language_model.model.layers.1.self_attn.k_proj",
262
- "language_model.model.layers.1.self_attn.v_proj",
263
- "language_model.model.layers.1.self_attn.o_proj",
264
- "language_model.model.layers.1.feed_forward.router",
265
- "language_model.model.layers.1.feed_forward.shared_expert.gate_proj",
266
- "language_model.model.layers.1.feed_forward.shared_expert.up_proj",
267
- "language_model.model.layers.1.feed_forward.shared_expert.down_proj",
268
- "language_model.model.layers.2.self_attn.q_proj",
269
- "language_model.model.layers.2.self_attn.k_proj",
270
- "language_model.model.layers.2.self_attn.v_proj",
271
- "language_model.model.layers.2.self_attn.o_proj",
272
- "language_model.model.layers.2.feed_forward.gate_proj",
273
- "language_model.model.layers.2.feed_forward.up_proj",
274
- "language_model.model.layers.2.feed_forward.down_proj",
275
- "language_model.model.layers.3.self_attn.q_proj",
276
- "language_model.model.layers.3.self_attn.k_proj",
277
- "language_model.model.layers.3.self_attn.v_proj",
278
- "language_model.model.layers.3.self_attn.o_proj",
279
- "language_model.model.layers.3.feed_forward.router",
280
- "language_model.model.layers.3.feed_forward.shared_expert.gate_proj",
281
- "language_model.model.layers.3.feed_forward.shared_expert.up_proj",
282
- "language_model.model.layers.3.feed_forward.shared_expert.down_proj",
283
- "language_model.model.layers.4.self_attn.q_proj",
284
- "language_model.model.layers.4.self_attn.k_proj",
285
- "language_model.model.layers.4.self_attn.v_proj",
286
- "language_model.model.layers.4.self_attn.o_proj",
287
- "language_model.model.layers.4.feed_forward.gate_proj",
288
- "language_model.model.layers.4.feed_forward.up_proj",
289
- "language_model.model.layers.4.feed_forward.down_proj",
290
- "language_model.model.layers.5.self_attn.q_proj",
291
- "language_model.model.layers.5.self_attn.k_proj",
292
- "language_model.model.layers.5.self_attn.v_proj",
293
- "language_model.model.layers.5.self_attn.o_proj",
294
- "language_model.model.layers.5.feed_forward.router",
295
- "language_model.model.layers.5.feed_forward.shared_expert.gate_proj",
296
- "language_model.model.layers.5.feed_forward.shared_expert.up_proj",
297
- "language_model.model.layers.5.feed_forward.shared_expert.down_proj",
298
- "language_model.model.layers.6.self_attn.q_proj",
299
- "language_model.model.layers.6.self_attn.k_proj",
300
- "language_model.model.layers.6.self_attn.v_proj",
301
- "language_model.model.layers.6.self_attn.o_proj",
302
- "language_model.model.layers.6.feed_forward.gate_proj",
303
- "language_model.model.layers.6.feed_forward.up_proj",
304
- "language_model.model.layers.6.feed_forward.down_proj",
305
- "language_model.model.layers.7.self_attn.q_proj",
306
- "language_model.model.layers.7.self_attn.k_proj",
307
- "language_model.model.layers.7.self_attn.v_proj",
308
- "language_model.model.layers.7.self_attn.o_proj",
309
- "language_model.model.layers.7.feed_forward.router",
310
- "language_model.model.layers.7.feed_forward.shared_expert.gate_proj",
311
- "language_model.model.layers.7.feed_forward.shared_expert.up_proj",
312
- "language_model.model.layers.7.feed_forward.shared_expert.down_proj",
313
- "language_model.model.layers.8.self_attn.q_proj",
314
- "language_model.model.layers.8.self_attn.k_proj",
315
- "language_model.model.layers.8.self_attn.v_proj",
316
- "language_model.model.layers.8.self_attn.o_proj",
317
- "language_model.model.layers.8.feed_forward.gate_proj",
318
- "language_model.model.layers.8.feed_forward.up_proj",
319
- "language_model.model.layers.8.feed_forward.down_proj",
320
- "language_model.model.layers.9.self_attn.q_proj",
321
- "language_model.model.layers.9.self_attn.k_proj",
322
- "language_model.model.layers.9.self_attn.v_proj",
323
- "language_model.model.layers.9.self_attn.o_proj",
324
- "language_model.model.layers.9.feed_forward.router",
325
- "language_model.model.layers.9.feed_forward.shared_expert.gate_proj",
326
- "language_model.model.layers.9.feed_forward.shared_expert.up_proj",
327
- "language_model.model.layers.9.feed_forward.shared_expert.down_proj",
328
- "language_model.model.layers.10.self_attn.q_proj",
329
- "language_model.model.layers.10.self_attn.k_proj",
330
- "language_model.model.layers.10.self_attn.v_proj",
331
- "language_model.model.layers.10.self_attn.o_proj",
332
- "language_model.model.layers.10.feed_forward.gate_proj",
333
- "language_model.model.layers.10.feed_forward.up_proj",
334
- "language_model.model.layers.10.feed_forward.down_proj",
335
- "language_model.model.layers.11.self_attn.q_proj",
336
- "language_model.model.layers.11.self_attn.k_proj",
337
- "language_model.model.layers.11.self_attn.v_proj",
338
- "language_model.model.layers.11.self_attn.o_proj",
339
- "language_model.model.layers.11.feed_forward.router",
340
- "language_model.model.layers.11.feed_forward.shared_expert.gate_proj",
341
- "language_model.model.layers.11.feed_forward.shared_expert.up_proj",
342
- "language_model.model.layers.11.feed_forward.shared_expert.down_proj",
343
- "language_model.model.layers.12.self_attn.q_proj",
344
- "language_model.model.layers.12.self_attn.k_proj",
345
- "language_model.model.layers.12.self_attn.v_proj",
346
- "language_model.model.layers.12.self_attn.o_proj",
347
- "language_model.model.layers.12.feed_forward.gate_proj",
348
- "language_model.model.layers.12.feed_forward.up_proj",
349
- "language_model.model.layers.12.feed_forward.down_proj",
350
- "language_model.model.layers.13.self_attn.q_proj",
351
- "language_model.model.layers.13.self_attn.k_proj",
352
- "language_model.model.layers.13.self_attn.v_proj",
353
- "language_model.model.layers.13.self_attn.o_proj",
354
- "language_model.model.layers.13.feed_forward.router",
355
- "language_model.model.layers.13.feed_forward.shared_expert.gate_proj",
356
- "language_model.model.layers.13.feed_forward.shared_expert.up_proj",
357
- "language_model.model.layers.13.feed_forward.shared_expert.down_proj",
358
- "language_model.model.layers.14.self_attn.q_proj",
359
- "language_model.model.layers.14.self_attn.k_proj",
360
- "language_model.model.layers.14.self_attn.v_proj",
361
- "language_model.model.layers.14.self_attn.o_proj",
362
- "language_model.model.layers.14.feed_forward.gate_proj",
363
- "language_model.model.layers.14.feed_forward.up_proj",
364
- "language_model.model.layers.14.feed_forward.down_proj",
365
- "language_model.model.layers.15.self_attn.q_proj",
366
- "language_model.model.layers.15.self_attn.k_proj",
367
- "language_model.model.layers.15.self_attn.v_proj",
368
- "language_model.model.layers.15.self_attn.o_proj",
369
- "language_model.model.layers.15.feed_forward.router",
370
- "language_model.model.layers.15.feed_forward.shared_expert.gate_proj",
371
- "language_model.model.layers.15.feed_forward.shared_expert.up_proj",
372
- "language_model.model.layers.15.feed_forward.shared_expert.down_proj",
373
- "language_model.model.layers.16.self_attn.q_proj",
374
- "language_model.model.layers.16.self_attn.k_proj",
375
- "language_model.model.layers.16.self_attn.v_proj",
376
- "language_model.model.layers.16.self_attn.o_proj",
377
- "language_model.model.layers.16.feed_forward.gate_proj",
378
- "language_model.model.layers.16.feed_forward.up_proj",
379
- "language_model.model.layers.16.feed_forward.down_proj",
380
- "language_model.model.layers.17.self_attn.q_proj",
381
- "language_model.model.layers.17.self_attn.k_proj",
382
- "language_model.model.layers.17.self_attn.v_proj",
383
- "language_model.model.layers.17.self_attn.o_proj",
384
- "language_model.model.layers.17.feed_forward.router",
385
- "language_model.model.layers.17.feed_forward.shared_expert.gate_proj",
386
- "language_model.model.layers.17.feed_forward.shared_expert.up_proj",
387
- "language_model.model.layers.17.feed_forward.shared_expert.down_proj",
388
- "language_model.model.layers.18.self_attn.q_proj",
389
- "language_model.model.layers.18.self_attn.k_proj",
390
- "language_model.model.layers.18.self_attn.v_proj",
391
- "language_model.model.layers.18.self_attn.o_proj",
392
- "language_model.model.layers.18.feed_forward.gate_proj",
393
- "language_model.model.layers.18.feed_forward.up_proj",
394
- "language_model.model.layers.18.feed_forward.down_proj",
395
- "language_model.model.layers.19.self_attn.q_proj",
396
- "language_model.model.layers.19.self_attn.k_proj",
397
- "language_model.model.layers.19.self_attn.v_proj",
398
- "language_model.model.layers.19.self_attn.o_proj",
399
- "language_model.model.layers.19.feed_forward.router",
400
- "language_model.model.layers.19.feed_forward.shared_expert.gate_proj",
401
- "language_model.model.layers.19.feed_forward.shared_expert.up_proj",
402
- "language_model.model.layers.19.feed_forward.shared_expert.down_proj",
403
- "language_model.model.layers.20.self_attn.q_proj",
404
- "language_model.model.layers.20.self_attn.k_proj",
405
- "language_model.model.layers.20.self_attn.v_proj",
406
- "language_model.model.layers.20.self_attn.o_proj",
407
- "language_model.model.layers.20.feed_forward.gate_proj",
408
- "language_model.model.layers.20.feed_forward.up_proj",
409
- "language_model.model.layers.20.feed_forward.down_proj",
410
- "language_model.model.layers.21.self_attn.q_proj",
411
- "language_model.model.layers.21.self_attn.k_proj",
412
- "language_model.model.layers.21.self_attn.v_proj",
413
- "language_model.model.layers.21.self_attn.o_proj",
414
- "language_model.model.layers.21.feed_forward.router",
415
- "language_model.model.layers.21.feed_forward.shared_expert.gate_proj",
416
- "language_model.model.layers.21.feed_forward.shared_expert.up_proj",
417
- "language_model.model.layers.21.feed_forward.shared_expert.down_proj",
418
- "language_model.model.layers.22.self_attn.q_proj",
419
- "language_model.model.layers.22.self_attn.k_proj",
420
- "language_model.model.layers.22.self_attn.v_proj",
421
- "language_model.model.layers.22.self_attn.o_proj",
422
- "language_model.model.layers.22.feed_forward.gate_proj",
423
- "language_model.model.layers.22.feed_forward.up_proj",
424
- "language_model.model.layers.22.feed_forward.down_proj",
425
- "language_model.model.layers.23.self_attn.q_proj",
426
- "language_model.model.layers.23.self_attn.k_proj",
427
- "language_model.model.layers.23.self_attn.v_proj",
428
- "language_model.model.layers.23.self_attn.o_proj",
429
- "language_model.model.layers.23.feed_forward.router",
430
- "language_model.model.layers.23.feed_forward.shared_expert.gate_proj",
431
- "language_model.model.layers.23.feed_forward.shared_expert.up_proj",
432
- "language_model.model.layers.23.feed_forward.shared_expert.down_proj",
433
- "language_model.model.layers.24.self_attn.q_proj",
434
- "language_model.model.layers.24.self_attn.k_proj",
435
- "language_model.model.layers.24.self_attn.v_proj",
436
- "language_model.model.layers.24.self_attn.o_proj",
437
- "language_model.model.layers.24.feed_forward.gate_proj",
438
- "language_model.model.layers.24.feed_forward.up_proj",
439
- "language_model.model.layers.24.feed_forward.down_proj",
440
- "language_model.model.layers.25.self_attn.q_proj",
441
- "language_model.model.layers.25.self_attn.k_proj",
442
- "language_model.model.layers.25.self_attn.v_proj",
443
- "language_model.model.layers.25.self_attn.o_proj",
444
- "language_model.model.layers.25.feed_forward.router",
445
- "language_model.model.layers.25.feed_forward.shared_expert.gate_proj",
446
- "language_model.model.layers.25.feed_forward.shared_expert.up_proj",
447
- "language_model.model.layers.25.feed_forward.shared_expert.down_proj",
448
- "language_model.model.layers.26.self_attn.q_proj",
449
- "language_model.model.layers.26.self_attn.k_proj",
450
- "language_model.model.layers.26.self_attn.v_proj",
451
- "language_model.model.layers.26.self_attn.o_proj",
452
- "language_model.model.layers.26.feed_forward.gate_proj",
453
- "language_model.model.layers.26.feed_forward.up_proj",
454
- "language_model.model.layers.26.feed_forward.down_proj",
455
- "language_model.model.layers.27.self_attn.q_proj",
456
- "language_model.model.layers.27.self_attn.k_proj",
457
- "language_model.model.layers.27.self_attn.v_proj",
458
- "language_model.model.layers.27.self_attn.o_proj",
459
- "language_model.model.layers.27.feed_forward.router",
460
- "language_model.model.layers.27.feed_forward.shared_expert.gate_proj",
461
- "language_model.model.layers.27.feed_forward.shared_expert.up_proj",
462
- "language_model.model.layers.27.feed_forward.shared_expert.down_proj",
463
- "language_model.model.layers.28.self_attn.q_proj",
464
- "language_model.model.layers.28.self_attn.k_proj",
465
- "language_model.model.layers.28.self_attn.v_proj",
466
- "language_model.model.layers.28.self_attn.o_proj",
467
- "language_model.model.layers.28.feed_forward.gate_proj",
468
- "language_model.model.layers.28.feed_forward.up_proj",
469
- "language_model.model.layers.28.feed_forward.down_proj",
470
- "language_model.model.layers.29.self_attn.q_proj",
471
- "language_model.model.layers.29.self_attn.k_proj",
472
- "language_model.model.layers.29.self_attn.v_proj",
473
- "language_model.model.layers.29.self_attn.o_proj",
474
- "language_model.model.layers.29.feed_forward.router",
475
- "language_model.model.layers.29.feed_forward.shared_expert.gate_proj",
476
- "language_model.model.layers.29.feed_forward.shared_expert.up_proj",
477
- "language_model.model.layers.29.feed_forward.shared_expert.down_proj",
478
- "language_model.model.layers.30.self_attn.q_proj",
479
- "language_model.model.layers.30.self_attn.k_proj",
480
- "language_model.model.layers.30.self_attn.v_proj",
481
- "language_model.model.layers.30.self_attn.o_proj",
482
- "language_model.model.layers.30.feed_forward.gate_proj",
483
- "language_model.model.layers.30.feed_forward.up_proj",
484
- "language_model.model.layers.30.feed_forward.down_proj",
485
- "language_model.model.layers.31.self_attn.q_proj",
486
- "language_model.model.layers.31.self_attn.k_proj",
487
- "language_model.model.layers.31.self_attn.v_proj",
488
- "language_model.model.layers.31.self_attn.o_proj",
489
- "language_model.model.layers.31.feed_forward.router",
490
- "language_model.model.layers.31.feed_forward.shared_expert.gate_proj",
491
- "language_model.model.layers.31.feed_forward.shared_expert.up_proj",
492
- "language_model.model.layers.31.feed_forward.shared_expert.down_proj",
493
- "language_model.model.layers.32.self_attn.q_proj",
494
- "language_model.model.layers.32.self_attn.k_proj",
495
- "language_model.model.layers.32.self_attn.v_proj",
496
- "language_model.model.layers.32.self_attn.o_proj",
497
- "language_model.model.layers.32.feed_forward.gate_proj",
498
- "language_model.model.layers.32.feed_forward.up_proj",
499
- "language_model.model.layers.32.feed_forward.down_proj",
500
- "language_model.model.layers.33.self_attn.q_proj",
501
- "language_model.model.layers.33.self_attn.k_proj",
502
- "language_model.model.layers.33.self_attn.v_proj",
503
- "language_model.model.layers.33.self_attn.o_proj",
504
- "language_model.model.layers.33.feed_forward.router",
505
- "language_model.model.layers.33.feed_forward.shared_expert.gate_proj",
506
- "language_model.model.layers.33.feed_forward.shared_expert.up_proj",
507
- "language_model.model.layers.33.feed_forward.shared_expert.down_proj",
508
- "language_model.model.layers.34.self_attn.q_proj",
509
- "language_model.model.layers.34.self_attn.k_proj",
510
- "language_model.model.layers.34.self_attn.v_proj",
511
- "language_model.model.layers.34.self_attn.o_proj",
512
- "language_model.model.layers.34.feed_forward.gate_proj",
513
- "language_model.model.layers.34.feed_forward.up_proj",
514
- "language_model.model.layers.34.feed_forward.down_proj",
515
- "language_model.model.layers.35.self_attn.q_proj",
516
- "language_model.model.layers.35.self_attn.k_proj",
517
- "language_model.model.layers.35.self_attn.v_proj",
518
- "language_model.model.layers.35.self_attn.o_proj",
519
- "language_model.model.layers.35.feed_forward.router",
520
- "language_model.model.layers.35.feed_forward.shared_expert.gate_proj",
521
- "language_model.model.layers.35.feed_forward.shared_expert.up_proj",
522
- "language_model.model.layers.35.feed_forward.shared_expert.down_proj",
523
- "language_model.model.layers.36.self_attn.q_proj",
524
- "language_model.model.layers.36.self_attn.k_proj",
525
- "language_model.model.layers.36.self_attn.v_proj",
526
- "language_model.model.layers.36.self_attn.o_proj",
527
- "language_model.model.layers.36.feed_forward.gate_proj",
528
- "language_model.model.layers.36.feed_forward.up_proj",
529
- "language_model.model.layers.36.feed_forward.down_proj",
530
- "language_model.model.layers.37.self_attn.q_proj",
531
- "language_model.model.layers.37.self_attn.k_proj",
532
- "language_model.model.layers.37.self_attn.v_proj",
533
- "language_model.model.layers.37.self_attn.o_proj",
534
- "language_model.model.layers.37.feed_forward.router",
535
- "language_model.model.layers.37.feed_forward.shared_expert.gate_proj",
536
- "language_model.model.layers.37.feed_forward.shared_expert.up_proj",
537
- "language_model.model.layers.37.feed_forward.shared_expert.down_proj",
538
- "language_model.model.layers.38.self_attn.q_proj",
539
- "language_model.model.layers.38.self_attn.k_proj",
540
- "language_model.model.layers.38.self_attn.v_proj",
541
- "language_model.model.layers.38.self_attn.o_proj",
542
- "language_model.model.layers.38.feed_forward.gate_proj",
543
- "language_model.model.layers.38.feed_forward.up_proj",
544
- "language_model.model.layers.38.feed_forward.down_proj",
545
- "language_model.model.layers.39.self_attn.q_proj",
546
- "language_model.model.layers.39.self_attn.k_proj",
547
- "language_model.model.layers.39.self_attn.v_proj",
548
- "language_model.model.layers.39.self_attn.o_proj",
549
- "language_model.model.layers.39.feed_forward.router",
550
- "language_model.model.layers.39.feed_forward.shared_expert.gate_proj",
551
- "language_model.model.layers.39.feed_forward.shared_expert.up_proj",
552
- "language_model.model.layers.39.feed_forward.shared_expert.down_proj",
553
- "language_model.model.layers.40.self_attn.q_proj",
554
- "language_model.model.layers.40.self_attn.k_proj",
555
- "language_model.model.layers.40.self_attn.v_proj",
556
- "language_model.model.layers.40.self_attn.o_proj",
557
- "language_model.model.layers.40.feed_forward.gate_proj",
558
- "language_model.model.layers.40.feed_forward.up_proj",
559
- "language_model.model.layers.40.feed_forward.down_proj",
560
- "language_model.model.layers.41.self_attn.q_proj",
561
- "language_model.model.layers.41.self_attn.k_proj",
562
- "language_model.model.layers.41.self_attn.v_proj",
563
- "language_model.model.layers.41.self_attn.o_proj",
564
- "language_model.model.layers.41.feed_forward.router",
565
- "language_model.model.layers.41.feed_forward.shared_expert.gate_proj",
566
- "language_model.model.layers.41.feed_forward.shared_expert.up_proj",
567
- "language_model.model.layers.41.feed_forward.shared_expert.down_proj",
568
- "language_model.model.layers.42.self_attn.q_proj",
569
- "language_model.model.layers.42.self_attn.k_proj",
570
- "language_model.model.layers.42.self_attn.v_proj",
571
- "language_model.model.layers.42.self_attn.o_proj",
572
- "language_model.model.layers.42.feed_forward.gate_proj",
573
- "language_model.model.layers.42.feed_forward.up_proj",
574
- "language_model.model.layers.42.feed_forward.down_proj",
575
- "language_model.model.layers.43.self_attn.q_proj",
576
- "language_model.model.layers.43.self_attn.k_proj",
577
- "language_model.model.layers.43.self_attn.v_proj",
578
- "language_model.model.layers.43.self_attn.o_proj",
579
- "language_model.model.layers.43.feed_forward.router",
580
- "language_model.model.layers.43.feed_forward.shared_expert.gate_proj",
581
- "language_model.model.layers.43.feed_forward.shared_expert.up_proj",
582
- "language_model.model.layers.43.feed_forward.shared_expert.down_proj",
583
- "language_model.model.layers.44.self_attn.q_proj",
584
- "language_model.model.layers.44.self_attn.k_proj",
585
- "language_model.model.layers.44.self_attn.v_proj",
586
- "language_model.model.layers.44.self_attn.o_proj",
587
- "language_model.model.layers.44.feed_forward.gate_proj",
588
- "language_model.model.layers.44.feed_forward.up_proj",
589
- "language_model.model.layers.44.feed_forward.down_proj",
590
- "language_model.model.layers.45.self_attn.q_proj",
591
- "language_model.model.layers.45.self_attn.k_proj",
592
- "language_model.model.layers.45.self_attn.v_proj",
593
- "language_model.model.layers.45.self_attn.o_proj",
594
- "language_model.model.layers.45.feed_forward.router",
595
- "language_model.model.layers.45.feed_forward.shared_expert.gate_proj",
596
- "language_model.model.layers.45.feed_forward.shared_expert.up_proj",
597
- "language_model.model.layers.45.feed_forward.shared_expert.down_proj",
598
- "language_model.model.layers.46.self_attn.q_proj",
599
- "language_model.model.layers.46.self_attn.k_proj",
600
- "language_model.model.layers.46.self_attn.v_proj",
601
- "language_model.model.layers.46.self_attn.o_proj",
602
- "language_model.model.layers.46.feed_forward.gate_proj",
603
- "language_model.model.layers.46.feed_forward.up_proj",
604
- "language_model.model.layers.46.feed_forward.down_proj",
605
- "language_model.model.layers.47.self_attn.q_proj",
606
- "language_model.model.layers.47.self_attn.k_proj",
607
- "language_model.model.layers.47.self_attn.v_proj",
608
- "language_model.model.layers.47.self_attn.o_proj",
609
- "language_model.model.layers.47.feed_forward.router",
610
- "language_model.model.layers.47.feed_forward.shared_expert.gate_proj",
611
- "language_model.model.layers.47.feed_forward.shared_expert.up_proj",
612
- "language_model.model.layers.47.feed_forward.shared_expert.down_proj",
613
- "language_model.lm_head"
614
- ],
615
- "kv_cache_scheme": null,
616
- "quant_method": "compressed-tensors",
617
- "quantization_status": "compressed"
618
- },
619
- "text_config": {
620
- "_attn_implementation_autoset": true,
621
- "attention_bias": false,
622
- "attention_chunk_size": 8192,
623
- "attention_dropout": 0.0,
624
- "bos_token_id": 200000,
625
- "eos_token_id": [
626
- 200001,
627
- 200007,
628
- 200008
629
  ],
630
- "for_llm_compressor": true,
631
- "head_dim": 128,
632
- "hidden_act": "silu",
633
- "hidden_size": 5120,
634
- "initializer_range": 0.02,
635
- "interleave_moe_layer_step": 2,
636
- "intermediate_size": 8192,
637
- "intermediate_size_mlp": 16384,
638
- "max_position_embeddings": 1048576,
639
- "model_type": "llama4_text",
640
- "nope_layer_interval": 4,
641
- "num_attention_heads": 40,
642
- "num_experts_per_tok": 1,
643
- "num_hidden_layers": 48,
644
- "num_key_value_heads": 8,
645
- "num_local_experts": 128,
646
- "output_router_logits": false,
647
- "pad_token_id": 200018,
648
- "rms_norm_eps": 1e-05,
649
- "rope_scaling": null,
650
- "rope_theta": 500000.0,
651
- "router_aux_loss_coef": 0.001,
652
- "router_jitter_noise": 0.0,
653
- "torch_dtype": "bfloat16",
654
- "use_cache": true,
655
- "use_qk_norm": false,
656
- "vocab_size": 202048
657
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
658
  "torch_dtype": "bfloat16",
659
- "transformers_version": "4.51.0.dev0",
660
- "vision_config": {
661
- "_attn_implementation_autoset": true,
662
- "attention_dropout": 0.0,
663
- "hidden_act": "gelu",
664
- "hidden_size": 1408,
665
- "image_size": 336,
666
- "initializer_range": 0.02,
667
- "intermediate_size": 5632,
668
- "model_type": "llama4_vision_model",
669
- "multi_modal_projector_bias": false,
670
- "norm_eps": 1e-05,
671
- "num_attention_heads": 16,
672
- "num_channels": 3,
673
- "num_hidden_layers": 34,
674
- "patch_size": 14,
675
- "pixel_shuffle_ratio": 0.5,
676
- "projector_dropout": 0.0,
677
- "projector_input_dim": 4096,
678
- "projector_output_dim": 4096,
679
- "rope_theta": 10000,
680
- "vision_feature_layer": -1,
681
- "vision_feature_select_strategy": "default",
682
- "vision_output_dim": 4096
683
- }
 
 
 
 
 
 
684
  }
 
1
  {
2
+ "architectures": [
3
+ "Llama4ForConditionalGeneration"
4
+ ],
5
+ "boi_token_index": 200080,
6
+ "eoi_token_index": 200081,
7
+ "image_token_index": 200092,
8
+ "model_type": "llama4",
9
+ "pad_token_id": 200018,
10
+ "quantization_config": {
11
+ "config_groups": {
12
+ "group_0": {
13
+ "input_activations": {
14
+ "actorder": null,
15
+ "block_structure": null,
16
+ "dynamic": true,
17
+ "group_size": null,
18
+ "num_bits": 8,
19
+ "observer": null,
20
+ "observer_kwargs": {},
21
+ "strategy": "token",
22
+ "symmetric": true,
23
+ "type": "float"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
  },
25
+ "output_activations": null,
26
+ "targets": [
27
+ "Linear"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  ],
29
+ "weights": {
30
+ "actorder": null,
31
+ "block_structure": null,
32
+ "dynamic": false,
33
+ "group_size": null,
34
+ "num_bits": 8,
35
+ "observer": "minmax",
36
+ "observer_kwargs": {},
37
+ "strategy": "channel",
38
+ "symmetric": true,
39
+ "type": "float"
40
+ }
41
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  },
43
+ "format": "float-quantized",
44
+ "global_compression_ratio": null,
45
+ "ignore": [
46
+ "vision_model.patch_embedding.linear",
47
+ "vision_model.model.layers.0.self_attn.q_proj",
48
+ "vision_model.model.layers.0.self_attn.k_proj",
49
+ "vision_model.model.layers.0.self_attn.v_proj",
50
+ "vision_model.model.layers.0.self_attn.o_proj",
51
+ "vision_model.model.layers.0.mlp.fc1",
52
+ "vision_model.model.layers.0.mlp.fc2",
53
+ "vision_model.model.layers.1.self_attn.q_proj",
54
+ "vision_model.model.layers.1.self_attn.k_proj",
55
+ "vision_model.model.layers.1.self_attn.v_proj",
56
+ "vision_model.model.layers.1.self_attn.o_proj",
57
+ "vision_model.model.layers.1.mlp.fc1",
58
+ "vision_model.model.layers.1.mlp.fc2",
59
+ "vision_model.model.layers.2.self_attn.q_proj",
60
+ "vision_model.model.layers.2.self_attn.k_proj",
61
+ "vision_model.model.layers.2.self_attn.v_proj",
62
+ "vision_model.model.layers.2.self_attn.o_proj",
63
+ "vision_model.model.layers.2.mlp.fc1",
64
+ "vision_model.model.layers.2.mlp.fc2",
65
+ "vision_model.model.layers.3.self_attn.q_proj",
66
+ "vision_model.model.layers.3.self_attn.k_proj",
67
+ "vision_model.model.layers.3.self_attn.v_proj",
68
+ "vision_model.model.layers.3.self_attn.o_proj",
69
+ "vision_model.model.layers.3.mlp.fc1",
70
+ "vision_model.model.layers.3.mlp.fc2",
71
+ "vision_model.model.layers.4.self_attn.q_proj",
72
+ "vision_model.model.layers.4.self_attn.k_proj",
73
+ "vision_model.model.layers.4.self_attn.v_proj",
74
+ "vision_model.model.layers.4.self_attn.o_proj",
75
+ "vision_model.model.layers.4.mlp.fc1",
76
+ "vision_model.model.layers.4.mlp.fc2",
77
+ "vision_model.model.layers.5.self_attn.q_proj",
78
+ "vision_model.model.layers.5.self_attn.k_proj",
79
+ "vision_model.model.layers.5.self_attn.v_proj",
80
+ "vision_model.model.layers.5.self_attn.o_proj",
81
+ "vision_model.model.layers.5.mlp.fc1",
82
+ "vision_model.model.layers.5.mlp.fc2",
83
+ "vision_model.model.layers.6.self_attn.q_proj",
84
+ "vision_model.model.layers.6.self_attn.k_proj",
85
+ "vision_model.model.layers.6.self_attn.v_proj",
86
+ "vision_model.model.layers.6.self_attn.o_proj",
87
+ "vision_model.model.layers.6.mlp.fc1",
88
+ "vision_model.model.layers.6.mlp.fc2",
89
+ "vision_model.model.layers.7.self_attn.q_proj",
90
+ "vision_model.model.layers.7.self_attn.k_proj",
91
+ "vision_model.model.layers.7.self_attn.v_proj",
92
+ "vision_model.model.layers.7.self_attn.o_proj",
93
+ "vision_model.model.layers.7.mlp.fc1",
94
+ "vision_model.model.layers.7.mlp.fc2",
95
+ "vision_model.model.layers.8.self_attn.q_proj",
96
+ "vision_model.model.layers.8.self_attn.k_proj",
97
+ "vision_model.model.layers.8.self_attn.v_proj",
98
+ "vision_model.model.layers.8.self_attn.o_proj",
99
+ "vision_model.model.layers.8.mlp.fc1",
100
+ "vision_model.model.layers.8.mlp.fc2",
101
+ "vision_model.model.layers.9.self_attn.q_proj",
102
+ "vision_model.model.layers.9.self_attn.k_proj",
103
+ "vision_model.model.layers.9.self_attn.v_proj",
104
+ "vision_model.model.layers.9.self_attn.o_proj",
105
+ "vision_model.model.layers.9.mlp.fc1",
106
+ "vision_model.model.layers.9.mlp.fc2",
107
+ "vision_model.model.layers.10.self_attn.q_proj",
108
+ "vision_model.model.layers.10.self_attn.k_proj",
109
+ "vision_model.model.layers.10.self_attn.v_proj",
110
+ "vision_model.model.layers.10.self_attn.o_proj",
111
+ "vision_model.model.layers.10.mlp.fc1",
112
+ "vision_model.model.layers.10.mlp.fc2",
113
+ "vision_model.model.layers.11.self_attn.q_proj",
114
+ "vision_model.model.layers.11.self_attn.k_proj",
115
+ "vision_model.model.layers.11.self_attn.v_proj",
116
+ "vision_model.model.layers.11.self_attn.o_proj",
117
+ "vision_model.model.layers.11.mlp.fc1",
118
+ "vision_model.model.layers.11.mlp.fc2",
119
+ "vision_model.model.layers.12.self_attn.q_proj",
120
+ "vision_model.model.layers.12.self_attn.k_proj",
121
+ "vision_model.model.layers.12.self_attn.v_proj",
122
+ "vision_model.model.layers.12.self_attn.o_proj",
123
+ "vision_model.model.layers.12.mlp.fc1",
124
+ "vision_model.model.layers.12.mlp.fc2",
125
+ "vision_model.model.layers.13.self_attn.q_proj",
126
+ "vision_model.model.layers.13.self_attn.k_proj",
127
+ "vision_model.model.layers.13.self_attn.v_proj",
128
+ "vision_model.model.layers.13.self_attn.o_proj",
129
+ "vision_model.model.layers.13.mlp.fc1",
130
+ "vision_model.model.layers.13.mlp.fc2",
131
+ "vision_model.model.layers.14.self_attn.q_proj",
132
+ "vision_model.model.layers.14.self_attn.k_proj",
133
+ "vision_model.model.layers.14.self_attn.v_proj",
134
+ "vision_model.model.layers.14.self_attn.o_proj",
135
+ "vision_model.model.layers.14.mlp.fc1",
136
+ "vision_model.model.layers.14.mlp.fc2",
137
+ "vision_model.model.layers.15.self_attn.q_proj",
138
+ "vision_model.model.layers.15.self_attn.k_proj",
139
+ "vision_model.model.layers.15.self_attn.v_proj",
140
+ "vision_model.model.layers.15.self_attn.o_proj",
141
+ "vision_model.model.layers.15.mlp.fc1",
142
+ "vision_model.model.layers.15.mlp.fc2",
143
+ "vision_model.model.layers.16.self_attn.q_proj",
144
+ "vision_model.model.layers.16.self_attn.k_proj",
145
+ "vision_model.model.layers.16.self_attn.v_proj",
146
+ "vision_model.model.layers.16.self_attn.o_proj",
147
+ "vision_model.model.layers.16.mlp.fc1",
148
+ "vision_model.model.layers.16.mlp.fc2",
149
+ "vision_model.model.layers.17.self_attn.q_proj",
150
+ "vision_model.model.layers.17.self_attn.k_proj",
151
+ "vision_model.model.layers.17.self_attn.v_proj",
152
+ "vision_model.model.layers.17.self_attn.o_proj",
153
+ "vision_model.model.layers.17.mlp.fc1",
154
+ "vision_model.model.layers.17.mlp.fc2",
155
+ "vision_model.model.layers.18.self_attn.q_proj",
156
+ "vision_model.model.layers.18.self_attn.k_proj",
157
+ "vision_model.model.layers.18.self_attn.v_proj",
158
+ "vision_model.model.layers.18.self_attn.o_proj",
159
+ "vision_model.model.layers.18.mlp.fc1",
160
+ "vision_model.model.layers.18.mlp.fc2",
161
+ "vision_model.model.layers.19.self_attn.q_proj",
162
+ "vision_model.model.layers.19.self_attn.k_proj",
163
+ "vision_model.model.layers.19.self_attn.v_proj",
164
+ "vision_model.model.layers.19.self_attn.o_proj",
165
+ "vision_model.model.layers.19.mlp.fc1",
166
+ "vision_model.model.layers.19.mlp.fc2",
167
+ "vision_model.model.layers.20.self_attn.q_proj",
168
+ "vision_model.model.layers.20.self_attn.k_proj",
169
+ "vision_model.model.layers.20.self_attn.v_proj",
170
+ "vision_model.model.layers.20.self_attn.o_proj",
171
+ "vision_model.model.layers.20.mlp.fc1",
172
+ "vision_model.model.layers.20.mlp.fc2",
173
+ "vision_model.model.layers.21.self_attn.q_proj",
174
+ "vision_model.model.layers.21.self_attn.k_proj",
175
+ "vision_model.model.layers.21.self_attn.v_proj",
176
+ "vision_model.model.layers.21.self_attn.o_proj",
177
+ "vision_model.model.layers.21.mlp.fc1",
178
+ "vision_model.model.layers.21.mlp.fc2",
179
+ "vision_model.model.layers.22.self_attn.q_proj",
180
+ "vision_model.model.layers.22.self_attn.k_proj",
181
+ "vision_model.model.layers.22.self_attn.v_proj",
182
+ "vision_model.model.layers.22.self_attn.o_proj",
183
+ "vision_model.model.layers.22.mlp.fc1",
184
+ "vision_model.model.layers.22.mlp.fc2",
185
+ "vision_model.model.layers.23.self_attn.q_proj",
186
+ "vision_model.model.layers.23.self_attn.k_proj",
187
+ "vision_model.model.layers.23.self_attn.v_proj",
188
+ "vision_model.model.layers.23.self_attn.o_proj",
189
+ "vision_model.model.layers.23.mlp.fc1",
190
+ "vision_model.model.layers.23.mlp.fc2",
191
+ "vision_model.model.layers.24.self_attn.q_proj",
192
+ "vision_model.model.layers.24.self_attn.k_proj",
193
+ "vision_model.model.layers.24.self_attn.v_proj",
194
+ "vision_model.model.layers.24.self_attn.o_proj",
195
+ "vision_model.model.layers.24.mlp.fc1",
196
+ "vision_model.model.layers.24.mlp.fc2",
197
+ "vision_model.model.layers.25.self_attn.q_proj",
198
+ "vision_model.model.layers.25.self_attn.k_proj",
199
+ "vision_model.model.layers.25.self_attn.v_proj",
200
+ "vision_model.model.layers.25.self_attn.o_proj",
201
+ "vision_model.model.layers.25.mlp.fc1",
202
+ "vision_model.model.layers.25.mlp.fc2",
203
+ "vision_model.model.layers.26.self_attn.q_proj",
204
+ "vision_model.model.layers.26.self_attn.k_proj",
205
+ "vision_model.model.layers.26.self_attn.v_proj",
206
+ "vision_model.model.layers.26.self_attn.o_proj",
207
+ "vision_model.model.layers.26.mlp.fc1",
208
+ "vision_model.model.layers.26.mlp.fc2",
209
+ "vision_model.model.layers.27.self_attn.q_proj",
210
+ "vision_model.model.layers.27.self_attn.k_proj",
211
+ "vision_model.model.layers.27.self_attn.v_proj",
212
+ "vision_model.model.layers.27.self_attn.o_proj",
213
+ "vision_model.model.layers.27.mlp.fc1",
214
+ "vision_model.model.layers.27.mlp.fc2",
215
+ "vision_model.model.layers.28.self_attn.q_proj",
216
+ "vision_model.model.layers.28.self_attn.k_proj",
217
+ "vision_model.model.layers.28.self_attn.v_proj",
218
+ "vision_model.model.layers.28.self_attn.o_proj",
219
+ "vision_model.model.layers.28.mlp.fc1",
220
+ "vision_model.model.layers.28.mlp.fc2",
221
+ "vision_model.model.layers.29.self_attn.q_proj",
222
+ "vision_model.model.layers.29.self_attn.k_proj",
223
+ "vision_model.model.layers.29.self_attn.v_proj",
224
+ "vision_model.model.layers.29.self_attn.o_proj",
225
+ "vision_model.model.layers.29.mlp.fc1",
226
+ "vision_model.model.layers.29.mlp.fc2",
227
+ "vision_model.model.layers.30.self_attn.q_proj",
228
+ "vision_model.model.layers.30.self_attn.k_proj",
229
+ "vision_model.model.layers.30.self_attn.v_proj",
230
+ "vision_model.model.layers.30.self_attn.o_proj",
231
+ "vision_model.model.layers.30.mlp.fc1",
232
+ "vision_model.model.layers.30.mlp.fc2",
233
+ "vision_model.model.layers.31.self_attn.q_proj",
234
+ "vision_model.model.layers.31.self_attn.k_proj",
235
+ "vision_model.model.layers.31.self_attn.v_proj",
236
+ "vision_model.model.layers.31.self_attn.o_proj",
237
+ "vision_model.model.layers.31.mlp.fc1",
238
+ "vision_model.model.layers.31.mlp.fc2",
239
+ "vision_model.model.layers.32.self_attn.q_proj",
240
+ "vision_model.model.layers.32.self_attn.k_proj",
241
+ "vision_model.model.layers.32.self_attn.v_proj",
242
+ "vision_model.model.layers.32.self_attn.o_proj",
243
+ "vision_model.model.layers.32.mlp.fc1",
244
+ "vision_model.model.layers.32.mlp.fc2",
245
+ "vision_model.model.layers.33.self_attn.q_proj",
246
+ "vision_model.model.layers.33.self_attn.k_proj",
247
+ "vision_model.model.layers.33.self_attn.v_proj",
248
+ "vision_model.model.layers.33.self_attn.o_proj",
249
+ "vision_model.model.layers.33.mlp.fc1",
250
+ "vision_model.model.layers.33.mlp.fc2",
251
+ "vision_model.vision_adapter.mlp.fc1",
252
+ "vision_model.vision_adapter.mlp.fc2",
253
+ "multi_modal_projector.linear_1",
254
+ "language_model.model.layers.0.self_attn.q_proj",
255
+ "language_model.model.layers.0.self_attn.k_proj",
256
+ "language_model.model.layers.0.self_attn.v_proj",
257
+ "language_model.model.layers.0.self_attn.o_proj",
258
+ "language_model.model.layers.0.feed_forward.gate_proj",
259
+ "language_model.model.layers.0.feed_forward.up_proj",
260
+ "language_model.model.layers.0.feed_forward.down_proj",
261
+ "language_model.model.layers.1.self_attn.q_proj",
262
+ "language_model.model.layers.1.self_attn.k_proj",
263
+ "language_model.model.layers.1.self_attn.v_proj",
264
+ "language_model.model.layers.1.self_attn.o_proj",
265
+ "language_model.model.layers.1.feed_forward.router",
266
+ "language_model.model.layers.1.feed_forward.shared_expert.gate_proj",
267
+ "language_model.model.layers.1.feed_forward.shared_expert.up_proj",
268
+ "language_model.model.layers.1.feed_forward.shared_expert.down_proj",
269
+ "language_model.model.layers.2.self_attn.q_proj",
270
+ "language_model.model.layers.2.self_attn.k_proj",
271
+ "language_model.model.layers.2.self_attn.v_proj",
272
+ "language_model.model.layers.2.self_attn.o_proj",
273
+ "language_model.model.layers.2.feed_forward.gate_proj",
274
+ "language_model.model.layers.2.feed_forward.up_proj",
275
+ "language_model.model.layers.2.feed_forward.down_proj",
276
+ "language_model.model.layers.3.self_attn.q_proj",
277
+ "language_model.model.layers.3.self_attn.k_proj",
278
+ "language_model.model.layers.3.self_attn.v_proj",
279
+ "language_model.model.layers.3.self_attn.o_proj",
280
+ "language_model.model.layers.3.feed_forward.router",
281
+ "language_model.model.layers.3.feed_forward.shared_expert.gate_proj",
282
+ "language_model.model.layers.3.feed_forward.shared_expert.up_proj",
283
+ "language_model.model.layers.3.feed_forward.shared_expert.down_proj",
284
+ "language_model.model.layers.4.self_attn.q_proj",
285
+ "language_model.model.layers.4.self_attn.k_proj",
286
+ "language_model.model.layers.4.self_attn.v_proj",
287
+ "language_model.model.layers.4.self_attn.o_proj",
288
+ "language_model.model.layers.4.feed_forward.gate_proj",
289
+ "language_model.model.layers.4.feed_forward.up_proj",
290
+ "language_model.model.layers.4.feed_forward.down_proj",
291
+ "language_model.model.layers.5.self_attn.q_proj",
292
+ "language_model.model.layers.5.self_attn.k_proj",
293
+ "language_model.model.layers.5.self_attn.v_proj",
294
+ "language_model.model.layers.5.self_attn.o_proj",
295
+ "language_model.model.layers.5.feed_forward.router",
296
+ "language_model.model.layers.5.feed_forward.shared_expert.gate_proj",
297
+ "language_model.model.layers.5.feed_forward.shared_expert.up_proj",
298
+ "language_model.model.layers.5.feed_forward.shared_expert.down_proj",
299
+ "language_model.model.layers.6.self_attn.q_proj",
300
+ "language_model.model.layers.6.self_attn.k_proj",
301
+ "language_model.model.layers.6.self_attn.v_proj",
302
+ "language_model.model.layers.6.self_attn.o_proj",
303
+ "language_model.model.layers.6.feed_forward.gate_proj",
304
+ "language_model.model.layers.6.feed_forward.up_proj",
305
+ "language_model.model.layers.6.feed_forward.down_proj",
306
+ "language_model.model.layers.7.self_attn.q_proj",
307
+ "language_model.model.layers.7.self_attn.k_proj",
308
+ "language_model.model.layers.7.self_attn.v_proj",
309
+ "language_model.model.layers.7.self_attn.o_proj",
310
+ "language_model.model.layers.7.feed_forward.router",
311
+ "language_model.model.layers.7.feed_forward.shared_expert.gate_proj",
312
+ "language_model.model.layers.7.feed_forward.shared_expert.up_proj",
313
+ "language_model.model.layers.7.feed_forward.shared_expert.down_proj",
314
+ "language_model.model.layers.8.self_attn.q_proj",
315
+ "language_model.model.layers.8.self_attn.k_proj",
316
+ "language_model.model.layers.8.self_attn.v_proj",
317
+ "language_model.model.layers.8.self_attn.o_proj",
318
+ "language_model.model.layers.8.feed_forward.gate_proj",
319
+ "language_model.model.layers.8.feed_forward.up_proj",
320
+ "language_model.model.layers.8.feed_forward.down_proj",
321
+ "language_model.model.layers.9.self_attn.q_proj",
322
+ "language_model.model.layers.9.self_attn.k_proj",
323
+ "language_model.model.layers.9.self_attn.v_proj",
324
+ "language_model.model.layers.9.self_attn.o_proj",
325
+ "language_model.model.layers.9.feed_forward.router",
326
+ "language_model.model.layers.9.feed_forward.shared_expert.gate_proj",
327
+ "language_model.model.layers.9.feed_forward.shared_expert.up_proj",
328
+ "language_model.model.layers.9.feed_forward.shared_expert.down_proj",
329
+ "language_model.model.layers.10.self_attn.q_proj",
330
+ "language_model.model.layers.10.self_attn.k_proj",
331
+ "language_model.model.layers.10.self_attn.v_proj",
332
+ "language_model.model.layers.10.self_attn.o_proj",
333
+ "language_model.model.layers.10.feed_forward.gate_proj",
334
+ "language_model.model.layers.10.feed_forward.up_proj",
335
+ "language_model.model.layers.10.feed_forward.down_proj",
336
+ "language_model.model.layers.11.self_attn.q_proj",
337
+ "language_model.model.layers.11.self_attn.k_proj",
338
+ "language_model.model.layers.11.self_attn.v_proj",
339
+ "language_model.model.layers.11.self_attn.o_proj",
340
+ "language_model.model.layers.11.feed_forward.router",
341
+ "language_model.model.layers.11.feed_forward.shared_expert.gate_proj",
342
+ "language_model.model.layers.11.feed_forward.shared_expert.up_proj",
343
+ "language_model.model.layers.11.feed_forward.shared_expert.down_proj",
344
+ "language_model.model.layers.12.self_attn.q_proj",
345
+ "language_model.model.layers.12.self_attn.k_proj",
346
+ "language_model.model.layers.12.self_attn.v_proj",
347
+ "language_model.model.layers.12.self_attn.o_proj",
348
+ "language_model.model.layers.12.feed_forward.gate_proj",
349
+ "language_model.model.layers.12.feed_forward.up_proj",
350
+ "language_model.model.layers.12.feed_forward.down_proj",
351
+ "language_model.model.layers.13.self_attn.q_proj",
352
+ "language_model.model.layers.13.self_attn.k_proj",
353
+ "language_model.model.layers.13.self_attn.v_proj",
354
+ "language_model.model.layers.13.self_attn.o_proj",
355
+ "language_model.model.layers.13.feed_forward.router",
356
+ "language_model.model.layers.13.feed_forward.shared_expert.gate_proj",
357
+ "language_model.model.layers.13.feed_forward.shared_expert.up_proj",
358
+ "language_model.model.layers.13.feed_forward.shared_expert.down_proj",
359
+ "language_model.model.layers.14.self_attn.q_proj",
360
+ "language_model.model.layers.14.self_attn.k_proj",
361
+ "language_model.model.layers.14.self_attn.v_proj",
362
+ "language_model.model.layers.14.self_attn.o_proj",
363
+ "language_model.model.layers.14.feed_forward.gate_proj",
364
+ "language_model.model.layers.14.feed_forward.up_proj",
365
+ "language_model.model.layers.14.feed_forward.down_proj",
366
+ "language_model.model.layers.15.self_attn.q_proj",
367
+ "language_model.model.layers.15.self_attn.k_proj",
368
+ "language_model.model.layers.15.self_attn.v_proj",
369
+ "language_model.model.layers.15.self_attn.o_proj",
370
+ "language_model.model.layers.15.feed_forward.router",
371
+ "language_model.model.layers.15.feed_forward.shared_expert.gate_proj",
372
+ "language_model.model.layers.15.feed_forward.shared_expert.up_proj",
373
+ "language_model.model.layers.15.feed_forward.shared_expert.down_proj",
374
+ "language_model.model.layers.16.self_attn.q_proj",
375
+ "language_model.model.layers.16.self_attn.k_proj",
376
+ "language_model.model.layers.16.self_attn.v_proj",
377
+ "language_model.model.layers.16.self_attn.o_proj",
378
+ "language_model.model.layers.16.feed_forward.gate_proj",
379
+ "language_model.model.layers.16.feed_forward.up_proj",
380
+ "language_model.model.layers.16.feed_forward.down_proj",
381
+ "language_model.model.layers.17.self_attn.q_proj",
382
+ "language_model.model.layers.17.self_attn.k_proj",
383
+ "language_model.model.layers.17.self_attn.v_proj",
384
+ "language_model.model.layers.17.self_attn.o_proj",
385
+ "language_model.model.layers.17.feed_forward.router",
386
+ "language_model.model.layers.17.feed_forward.shared_expert.gate_proj",
387
+ "language_model.model.layers.17.feed_forward.shared_expert.up_proj",
388
+ "language_model.model.layers.17.feed_forward.shared_expert.down_proj",
389
+ "language_model.model.layers.18.self_attn.q_proj",
390
+ "language_model.model.layers.18.self_attn.k_proj",
391
+ "language_model.model.layers.18.self_attn.v_proj",
392
+ "language_model.model.layers.18.self_attn.o_proj",
393
+ "language_model.model.layers.18.feed_forward.gate_proj",
394
+ "language_model.model.layers.18.feed_forward.up_proj",
395
+ "language_model.model.layers.18.feed_forward.down_proj",
396
+ "language_model.model.layers.19.self_attn.q_proj",
397
+ "language_model.model.layers.19.self_attn.k_proj",
398
+ "language_model.model.layers.19.self_attn.v_proj",
399
+ "language_model.model.layers.19.self_attn.o_proj",
400
+ "language_model.model.layers.19.feed_forward.router",
401
+ "language_model.model.layers.19.feed_forward.shared_expert.gate_proj",
402
+ "language_model.model.layers.19.feed_forward.shared_expert.up_proj",
403
+ "language_model.model.layers.19.feed_forward.shared_expert.down_proj",
404
+ "language_model.model.layers.20.self_attn.q_proj",
405
+ "language_model.model.layers.20.self_attn.k_proj",
406
+ "language_model.model.layers.20.self_attn.v_proj",
407
+ "language_model.model.layers.20.self_attn.o_proj",
408
+ "language_model.model.layers.20.feed_forward.gate_proj",
409
+ "language_model.model.layers.20.feed_forward.up_proj",
410
+ "language_model.model.layers.20.feed_forward.down_proj",
411
+ "language_model.model.layers.21.self_attn.q_proj",
412
+ "language_model.model.layers.21.self_attn.k_proj",
413
+ "language_model.model.layers.21.self_attn.v_proj",
414
+ "language_model.model.layers.21.self_attn.o_proj",
415
+ "language_model.model.layers.21.feed_forward.router",
416
+ "language_model.model.layers.21.feed_forward.shared_expert.gate_proj",
417
+ "language_model.model.layers.21.feed_forward.shared_expert.up_proj",
418
+ "language_model.model.layers.21.feed_forward.shared_expert.down_proj",
419
+ "language_model.model.layers.22.self_attn.q_proj",
420
+ "language_model.model.layers.22.self_attn.k_proj",
421
+ "language_model.model.layers.22.self_attn.v_proj",
422
+ "language_model.model.layers.22.self_attn.o_proj",
423
+ "language_model.model.layers.22.feed_forward.gate_proj",
424
+ "language_model.model.layers.22.feed_forward.up_proj",
425
+ "language_model.model.layers.22.feed_forward.down_proj",
426
+ "language_model.model.layers.23.self_attn.q_proj",
427
+ "language_model.model.layers.23.self_attn.k_proj",
428
+ "language_model.model.layers.23.self_attn.v_proj",
429
+ "language_model.model.layers.23.self_attn.o_proj",
430
+ "language_model.model.layers.23.feed_forward.router",
431
+ "language_model.model.layers.23.feed_forward.shared_expert.gate_proj",
432
+ "language_model.model.layers.23.feed_forward.shared_expert.up_proj",
433
+ "language_model.model.layers.23.feed_forward.shared_expert.down_proj",
434
+ "language_model.model.layers.24.self_attn.q_proj",
435
+ "language_model.model.layers.24.self_attn.k_proj",
436
+ "language_model.model.layers.24.self_attn.v_proj",
437
+ "language_model.model.layers.24.self_attn.o_proj",
438
+ "language_model.model.layers.24.feed_forward.gate_proj",
439
+ "language_model.model.layers.24.feed_forward.up_proj",
440
+ "language_model.model.layers.24.feed_forward.down_proj",
441
+ "language_model.model.layers.25.self_attn.q_proj",
442
+ "language_model.model.layers.25.self_attn.k_proj",
443
+ "language_model.model.layers.25.self_attn.v_proj",
444
+ "language_model.model.layers.25.self_attn.o_proj",
445
+ "language_model.model.layers.25.feed_forward.router",
446
+ "language_model.model.layers.25.feed_forward.shared_expert.gate_proj",
447
+ "language_model.model.layers.25.feed_forward.shared_expert.up_proj",
448
+ "language_model.model.layers.25.feed_forward.shared_expert.down_proj",
449
+ "language_model.model.layers.26.self_attn.q_proj",
450
+ "language_model.model.layers.26.self_attn.k_proj",
451
+ "language_model.model.layers.26.self_attn.v_proj",
452
+ "language_model.model.layers.26.self_attn.o_proj",
453
+ "language_model.model.layers.26.feed_forward.gate_proj",
454
+ "language_model.model.layers.26.feed_forward.up_proj",
455
+ "language_model.model.layers.26.feed_forward.down_proj",
456
+ "language_model.model.layers.27.self_attn.q_proj",
457
+ "language_model.model.layers.27.self_attn.k_proj",
458
+ "language_model.model.layers.27.self_attn.v_proj",
459
+ "language_model.model.layers.27.self_attn.o_proj",
460
+ "language_model.model.layers.27.feed_forward.router",
461
+ "language_model.model.layers.27.feed_forward.shared_expert.gate_proj",
462
+ "language_model.model.layers.27.feed_forward.shared_expert.up_proj",
463
+ "language_model.model.layers.27.feed_forward.shared_expert.down_proj",
464
+ "language_model.model.layers.28.self_attn.q_proj",
465
+ "language_model.model.layers.28.self_attn.k_proj",
466
+ "language_model.model.layers.28.self_attn.v_proj",
467
+ "language_model.model.layers.28.self_attn.o_proj",
468
+ "language_model.model.layers.28.feed_forward.gate_proj",
469
+ "language_model.model.layers.28.feed_forward.up_proj",
470
+ "language_model.model.layers.28.feed_forward.down_proj",
471
+ "language_model.model.layers.29.self_attn.q_proj",
472
+ "language_model.model.layers.29.self_attn.k_proj",
473
+ "language_model.model.layers.29.self_attn.v_proj",
474
+ "language_model.model.layers.29.self_attn.o_proj",
475
+ "language_model.model.layers.29.feed_forward.router",
476
+ "language_model.model.layers.29.feed_forward.shared_expert.gate_proj",
477
+ "language_model.model.layers.29.feed_forward.shared_expert.up_proj",
478
+ "language_model.model.layers.29.feed_forward.shared_expert.down_proj",
479
+ "language_model.model.layers.30.self_attn.q_proj",
480
+ "language_model.model.layers.30.self_attn.k_proj",
481
+ "language_model.model.layers.30.self_attn.v_proj",
482
+ "language_model.model.layers.30.self_attn.o_proj",
483
+ "language_model.model.layers.30.feed_forward.gate_proj",
484
+ "language_model.model.layers.30.feed_forward.up_proj",
485
+ "language_model.model.layers.30.feed_forward.down_proj",
486
+ "language_model.model.layers.31.self_attn.q_proj",
487
+ "language_model.model.layers.31.self_attn.k_proj",
488
+ "language_model.model.layers.31.self_attn.v_proj",
489
+ "language_model.model.layers.31.self_attn.o_proj",
490
+ "language_model.model.layers.31.feed_forward.router",
491
+ "language_model.model.layers.31.feed_forward.shared_expert.gate_proj",
492
+ "language_model.model.layers.31.feed_forward.shared_expert.up_proj",
493
+ "language_model.model.layers.31.feed_forward.shared_expert.down_proj",
494
+ "language_model.model.layers.32.self_attn.q_proj",
495
+ "language_model.model.layers.32.self_attn.k_proj",
496
+ "language_model.model.layers.32.self_attn.v_proj",
497
+ "language_model.model.layers.32.self_attn.o_proj",
498
+ "language_model.model.layers.32.feed_forward.gate_proj",
499
+ "language_model.model.layers.32.feed_forward.up_proj",
500
+ "language_model.model.layers.32.feed_forward.down_proj",
501
+ "language_model.model.layers.33.self_attn.q_proj",
502
+ "language_model.model.layers.33.self_attn.k_proj",
503
+ "language_model.model.layers.33.self_attn.v_proj",
504
+ "language_model.model.layers.33.self_attn.o_proj",
505
+ "language_model.model.layers.33.feed_forward.router",
506
+ "language_model.model.layers.33.feed_forward.shared_expert.gate_proj",
507
+ "language_model.model.layers.33.feed_forward.shared_expert.up_proj",
508
+ "language_model.model.layers.33.feed_forward.shared_expert.down_proj",
509
+ "language_model.model.layers.34.self_attn.q_proj",
510
+ "language_model.model.layers.34.self_attn.k_proj",
511
+ "language_model.model.layers.34.self_attn.v_proj",
512
+ "language_model.model.layers.34.self_attn.o_proj",
513
+ "language_model.model.layers.34.feed_forward.gate_proj",
514
+ "language_model.model.layers.34.feed_forward.up_proj",
515
+ "language_model.model.layers.34.feed_forward.down_proj",
516
+ "language_model.model.layers.35.self_attn.q_proj",
517
+ "language_model.model.layers.35.self_attn.k_proj",
518
+ "language_model.model.layers.35.self_attn.v_proj",
519
+ "language_model.model.layers.35.self_attn.o_proj",
520
+ "language_model.model.layers.35.feed_forward.router",
521
+ "language_model.model.layers.35.feed_forward.shared_expert.gate_proj",
522
+ "language_model.model.layers.35.feed_forward.shared_expert.up_proj",
523
+ "language_model.model.layers.35.feed_forward.shared_expert.down_proj",
524
+ "language_model.model.layers.36.self_attn.q_proj",
525
+ "language_model.model.layers.36.self_attn.k_proj",
526
+ "language_model.model.layers.36.self_attn.v_proj",
527
+ "language_model.model.layers.36.self_attn.o_proj",
528
+ "language_model.model.layers.36.feed_forward.gate_proj",
529
+ "language_model.model.layers.36.feed_forward.up_proj",
530
+ "language_model.model.layers.36.feed_forward.down_proj",
531
+ "language_model.model.layers.37.self_attn.q_proj",
532
+ "language_model.model.layers.37.self_attn.k_proj",
533
+ "language_model.model.layers.37.self_attn.v_proj",
534
+ "language_model.model.layers.37.self_attn.o_proj",
535
+ "language_model.model.layers.37.feed_forward.router",
536
+ "language_model.model.layers.37.feed_forward.shared_expert.gate_proj",
537
+ "language_model.model.layers.37.feed_forward.shared_expert.up_proj",
538
+ "language_model.model.layers.37.feed_forward.shared_expert.down_proj",
539
+ "language_model.model.layers.38.self_attn.q_proj",
540
+ "language_model.model.layers.38.self_attn.k_proj",
541
+ "language_model.model.layers.38.self_attn.v_proj",
542
+ "language_model.model.layers.38.self_attn.o_proj",
543
+ "language_model.model.layers.38.feed_forward.gate_proj",
544
+ "language_model.model.layers.38.feed_forward.up_proj",
545
+ "language_model.model.layers.38.feed_forward.down_proj",
546
+ "language_model.model.layers.39.self_attn.q_proj",
547
+ "language_model.model.layers.39.self_attn.k_proj",
548
+ "language_model.model.layers.39.self_attn.v_proj",
549
+ "language_model.model.layers.39.self_attn.o_proj",
550
+ "language_model.model.layers.39.feed_forward.router",
551
+ "language_model.model.layers.39.feed_forward.shared_expert.gate_proj",
552
+ "language_model.model.layers.39.feed_forward.shared_expert.up_proj",
553
+ "language_model.model.layers.39.feed_forward.shared_expert.down_proj",
554
+ "language_model.model.layers.40.self_attn.q_proj",
555
+ "language_model.model.layers.40.self_attn.k_proj",
556
+ "language_model.model.layers.40.self_attn.v_proj",
557
+ "language_model.model.layers.40.self_attn.o_proj",
558
+ "language_model.model.layers.40.feed_forward.gate_proj",
559
+ "language_model.model.layers.40.feed_forward.up_proj",
560
+ "language_model.model.layers.40.feed_forward.down_proj",
561
+ "language_model.model.layers.41.self_attn.q_proj",
562
+ "language_model.model.layers.41.self_attn.k_proj",
563
+ "language_model.model.layers.41.self_attn.v_proj",
564
+ "language_model.model.layers.41.self_attn.o_proj",
565
+ "language_model.model.layers.41.feed_forward.router",
566
+ "language_model.model.layers.41.feed_forward.shared_expert.gate_proj",
567
+ "language_model.model.layers.41.feed_forward.shared_expert.up_proj",
568
+ "language_model.model.layers.41.feed_forward.shared_expert.down_proj",
569
+ "language_model.model.layers.42.self_attn.q_proj",
570
+ "language_model.model.layers.42.self_attn.k_proj",
571
+ "language_model.model.layers.42.self_attn.v_proj",
572
+ "language_model.model.layers.42.self_attn.o_proj",
573
+ "language_model.model.layers.42.feed_forward.gate_proj",
574
+ "language_model.model.layers.42.feed_forward.up_proj",
575
+ "language_model.model.layers.42.feed_forward.down_proj",
576
+ "language_model.model.layers.43.self_attn.q_proj",
577
+ "language_model.model.layers.43.self_attn.k_proj",
578
+ "language_model.model.layers.43.self_attn.v_proj",
579
+ "language_model.model.layers.43.self_attn.o_proj",
580
+ "language_model.model.layers.43.feed_forward.router",
581
+ "language_model.model.layers.43.feed_forward.shared_expert.gate_proj",
582
+ "language_model.model.layers.43.feed_forward.shared_expert.up_proj",
583
+ "language_model.model.layers.43.feed_forward.shared_expert.down_proj",
584
+ "language_model.model.layers.44.self_attn.q_proj",
585
+ "language_model.model.layers.44.self_attn.k_proj",
586
+ "language_model.model.layers.44.self_attn.v_proj",
587
+ "language_model.model.layers.44.self_attn.o_proj",
588
+ "language_model.model.layers.44.feed_forward.gate_proj",
589
+ "language_model.model.layers.44.feed_forward.up_proj",
590
+ "language_model.model.layers.44.feed_forward.down_proj",
591
+ "language_model.model.layers.45.self_attn.q_proj",
592
+ "language_model.model.layers.45.self_attn.k_proj",
593
+ "language_model.model.layers.45.self_attn.v_proj",
594
+ "language_model.model.layers.45.self_attn.o_proj",
595
+ "language_model.model.layers.45.feed_forward.router",
596
+ "language_model.model.layers.45.feed_forward.shared_expert.gate_proj",
597
+ "language_model.model.layers.45.feed_forward.shared_expert.up_proj",
598
+ "language_model.model.layers.45.feed_forward.shared_expert.down_proj",
599
+ "language_model.model.layers.46.self_attn.q_proj",
600
+ "language_model.model.layers.46.self_attn.k_proj",
601
+ "language_model.model.layers.46.self_attn.v_proj",
602
+ "language_model.model.layers.46.self_attn.o_proj",
603
+ "language_model.model.layers.46.feed_forward.gate_proj",
604
+ "language_model.model.layers.46.feed_forward.up_proj",
605
+ "language_model.model.layers.46.feed_forward.down_proj",
606
+ "language_model.model.layers.47.self_attn.q_proj",
607
+ "language_model.model.layers.47.self_attn.k_proj",
608
+ "language_model.model.layers.47.self_attn.v_proj",
609
+ "language_model.model.layers.47.self_attn.o_proj",
610
+ "language_model.model.layers.47.feed_forward.router",
611
+ "language_model.model.layers.47.feed_forward.shared_expert.gate_proj",
612
+ "language_model.model.layers.47.feed_forward.shared_expert.up_proj",
613
+ "language_model.model.layers.47.feed_forward.shared_expert.down_proj",
614
+ "language_model.lm_head"
615
+ ],
616
+ "kv_cache_scheme": null,
617
+ "quant_method": "compressed-tensors",
618
+ "quantization_status": "compressed"
619
+ },
620
+ "text_config": {
621
+ "attention_bias": false,
622
+ "attention_chunk_size": 8192,
623
+ "attention_dropout": 0.0,
624
+ "attn_scale": 0.1,
625
+ "attn_temperature_tuning": true,
626
+ "bos_token_id": 200000,
627
+ "cache_implementation": "hybrid",
628
+ "eos_token_id": [
629
+ 200001,
630
+ 200007,
631
+ 200008
632
+ ],
633
+ "floor_scale": 8192,
634
+ "for_llm_compressor": true,
635
+ "head_dim": 128,
636
+ "hidden_act": "silu",
637
+ "hidden_size": 5120,
638
+ "initializer_range": 0.02,
639
+ "interleave_moe_layer_step": 2,
640
+ "intermediate_size": 8192,
641
+ "intermediate_size_mlp": 16384,
642
+ "max_position_embeddings": 1048576,
643
+ "model_type": "llama4_text",
644
+ "moe_layers": [
645
+ 1,
646
+ 3,
647
+ 5,
648
+ 7,
649
+ 9,
650
+ 11,
651
+ 13,
652
+ 15,
653
+ 17,
654
+ 19,
655
+ 21,
656
+ 23,
657
+ 25,
658
+ 27,
659
+ 29,
660
+ 31,
661
+ 33,
662
+ 35,
663
+ 37,
664
+ 39,
665
+ 41,
666
+ 43,
667
+ 45,
668
+ 47
669
+ ],
670
+ "no_rope_layers": [
671
+ 1,
672
+ 1,
673
+ 1,
674
+ 0,
675
+ 1,
676
+ 1,
677
+ 1,
678
+ 0,
679
+ 1,
680
+ 1,
681
+ 1,
682
+ 0,
683
+ 1,
684
+ 1,
685
+ 1,
686
+ 0,
687
+ 1,
688
+ 1,
689
+ 1,
690
+ 0,
691
+ 1,
692
+ 1,
693
+ 1,
694
+ 0,
695
+ 1,
696
+ 1,
697
+ 1,
698
+ 0,
699
+ 1,
700
+ 1,
701
+ 1,
702
+ 0,
703
+ 1,
704
+ 1,
705
+ 1,
706
+ 0,
707
+ 1,
708
+ 1,
709
+ 1,
710
+ 0,
711
+ 1,
712
+ 1,
713
+ 1,
714
+ 0,
715
+ 1,
716
+ 1,
717
+ 1,
718
+ 0
719
+ ],
720
+ "nope_layer_interval": 4,
721
+ "num_attention_heads": 40,
722
+ "num_experts_per_tok": 1,
723
+ "num_hidden_layers": 48,
724
+ "num_key_value_heads": 8,
725
+ "num_local_experts": 128,
726
+ "output_router_logits": false,
727
+ "pad_token_id": 200018,
728
+ "rms_norm_eps": 1e-05,
729
+ "rope_scaling": null,
730
+ "rope_theta": 500000.0,
731
+ "router_aux_loss_coef": 0.001,
732
+ "router_jitter_noise": 0.0,
733
  "torch_dtype": "bfloat16",
734
+ "use_cache": true,
735
+ "use_qk_norm": false,
736
+ "vocab_size": 202048
737
+ },
738
+ "tie_word_embeddings": false,
739
+ "torch_dtype": "bfloat16",
740
+ "transformers_version": "4.52.3",
741
+ "unsloth_fixed": true,
742
+ "vision_config": {
743
+ "attention_dropout": 0.0,
744
+ "hidden_act": "gelu",
745
+ "hidden_size": 1408,
746
+ "image_size": 336,
747
+ "initializer_range": 0.02,
748
+ "intermediate_size": 5632,
749
+ "model_type": "llama4_vision_model",
750
+ "multi_modal_projector_bias": false,
751
+ "norm_eps": 1e-05,
752
+ "num_attention_heads": 16,
753
+ "num_channels": 3,
754
+ "num_hidden_layers": 34,
755
+ "patch_size": 14,
756
+ "pixel_shuffle_ratio": 0.5,
757
+ "projector_dropout": 0.0,
758
+ "projector_input_dim": 4096,
759
+ "projector_output_dim": 4096,
760
+ "rope_theta": 10000,
761
+ "vision_feature_layer": -1,
762
+ "vision_feature_select_strategy": "default",
763
+ "vision_output_dim": 4096
764
+ }
765
  }
tokenizer_config.json CHANGED
@@ -9096,5 +9096,5 @@
9096
  "processor_class": "Llama4Processor",
9097
  "tokenizer_class": "PreTrainedTokenizer",
9098
  "unk_token": null,
9099
- "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- if strftime_now is defined %}\n {%- set date_string = strftime_now(\"%d %b %Y\") %}\n {%- else %}\n {%- set date_string = \"26 Jul 2024\" %}\n {%- endif %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %} \n {%- if messages[0]['content'] is string %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- else %}\n {#- FIXME: The processor requires an array, always. #}\n {%- set system_message = messages[0]['content'][0]['text']|trim %}\n {%- endif %}\n {%- set messages = messages[1:] %}\n {%- set user_supplied_system_message = true %}\n{%- else %}\n {%- set system_message = \"\" %}\n {%- set user_supplied_system_message = false %}\n{%- endif %}\n\n{#- System message if the user supplied one #}\n{%- if user_supplied_system_message %}\n {{- \"<|header_start|>system<|header_end|>\\n\\n\" }}\n {%- if tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n {%- endif %}\n {%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {%- endif %}\n {{- system_message }}\n {{- \"<|eot|>\" }}\n{%- endif %}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|header_start|>user<|header_end|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|header_start|>' + message['role'] + '<|header_end|>\\n\\n' }}\n {%- if message['content'] is string %}\n {{- message['content'] }}\n {%- else %}\n {%- for content in message['content'] %}\n {%- if content['type'] == 'image' %}\n {{- '<|image|>' }}\n {%- elif content['type'] == 'text' %}\n {{- content['text'] }}\n {%- endif %}\n {%- endfor %}\n {%- endif %}\n {{- \"<|eot|>\" }}\n {%- elif 'tool_calls' in message and message.tool_calls|length > 0 %}\n {{- '<|header_start|>assistant<|header_end|>\\n\\n' -}}\n {{- '<|python_start|>' }}\n {%- if message['content'] is string %}\n {{- message['content'] }}\n {%- else %}\n {%- for content in message['content'] %}\n {%- if content['type'] == 'image' %}\n {{- '<|image|>' }}\n {%- elif content['type'] == 'text' %}\n {{- content['text'] }}\n {%- endif %}\n {%- endfor %}\n {%- endif %}\n {{- '<|python_end|>' }}\n {%- for tool_call in message.tool_calls %}\n {{- '{\"name\": \"' + tool_call.function.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.function.arguments | tojson }}\n {{- \"}\" }}\n {%- endfor %}\n {{- \"<|eot|>\" }}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|header_start|>ipython<|header_end|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|header_start|>assistant<|header_end|>\\n\\n' }}\n{%- endif %}\n"
9100
  }
 
9096
  "processor_class": "Llama4Processor",
9097
  "tokenizer_class": "PreTrainedTokenizer",
9098
  "unk_token": null,
9099
+ "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined and custom_tools%}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if tools is defined and tools %}\n {%- set tool_definition = tool_definition ~ (tools | tojson(indent=4)) %}\n{%- else %}\n {%- set tools = none %}\n{%- endif %}\n\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set user_provided_system_message = true %}\n {%- if messages[0]['content'] is string %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- else %}\n {%- set system_message = messages[0]['content'][0]['text']|trim %}\n {%- endif %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- if tools is not none %}\n {#- Since not system_message was provided by user, if tool is provided, system_message is now default tool system message #}\n {#- This system message is from llama website:https://www.llama.com/docs/model-cards-and-prompt-formats/llama4/ #}\n {%- set system_message = \"You are a helpful assistant and an expert in function composition. You can answer general questions using your internal knowledge OR invoke functions when necessary. Follow these strict guidelines:\\n\\n1. FUNCTION CALLS:\\n- ONLY use functions that are EXPLICITLY listed in the function list below\\n- If NO functions are listed (empty function list []), respond ONLY with internal knowledge or \\\"I don't have access to [Unavailable service] information\\\"\\n- If a function is not in the list, respond ONLY with internal knowledge or \\\"I don't have access to [Unavailable service] information\\\"\\n- If ALL required parameters are present AND the query EXACTLY matches a listed function's purpose: output ONLY the function call(s)\\n- Use exact format: [func_name1(param1=value1, param2=value2), func_name2(...)]\\nExamples:\\nCORRECT: [get_weather(location=\\\"Vancouver\\\"), calculate_route(start=\\\"Boston\\\", end=\\\"New York\\\")] <- Only if get_weather and calculate_route are in function list\\nINCORRECT: get_weather(location=\\\"New York\\\")\\nINCORRECT: Let me check the weather: [get_weather(location=\\\"New York\\\")]\\nINCORRECT: [get_events(location=\\\"Singapore\\\")] <- If function not in list\\n\\n2. RESPONSE RULES:\\n- For pure function requests matching a listed function: ONLY output the function call(s)\\n- For knowledge questions: ONLY output text\\n- For missing parameters: ONLY request the specific missing parameters\\n- For unavailable services (not in function list): output ONLY with internal knowledge or \\\"I don't have access to [Unavailable service] information\\\". Do NOT execute a function call.\\n- If the query asks for information beyond what a listed function provides: output ONLY with internal knowledge about your limitations\\n- NEVER combine text and function calls in the same response\\n- NEVER suggest alternative functions when the requested service is unavailable\\n- NEVER create or invent new functions not listed below\\n\\n3. STRICT BOUNDARIES:\\n- ONLY use functions from the list below - no exceptions\\n- NEVER use a function as an alternative to unavailable information\\n- NEVER call functions not present in the function list\\n- NEVER add explanatory text to function calls\\n- NEVER respond with empty brackets\\n- Use proper Python/JSON syntax for function calls\\n- Check the function list carefully before responding\\n\\n4. TOOL RESPONSE HANDLING:\\n- When receiving tool responses: provide concise, natural language responses\\n- Don't repeat tool response verbatim\\n- Don't add supplementary information\\n\\nHere is a list of functions in JSON format that you can invoke:\\n\" %}\n {%- else %}\n {%- set system_message = \"\" %}\n {%- endif %}\n{%- endif %}\n{#- Now writing the system message: use the user provided system message if user_provided_system_message, else default tool system message if tools presented #}\n{%- if system_message %}\n {#- always use user provided system message to override default tool system message #}\n {{- \"<|header_start|>system<|header_end|>\\n\\n\" }}\n {{- system_message }}\n {%- if user_provided_system_message and tools %}\n {{- \"\\nHere is a list of functions in JSON format that you can invoke. Use exact format: [func_name1(param1=value1, param2=value2), func_name2(...)]\\n\" }}\n {{- tool_definition -}}\n {%- elif tool_definition %}\n {{- tool_definition -}}\n {%- endif %}\n {{- \"<|eot|>\" }}\n{%- endif %}\n\n{#- Now deal with all other messages #}\n{%- for message in messages %}\n {#- Base case: messages that are not from tool role and has empty tool_call list #}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or ('tool_calls' in message and message.tool_calls|length != 0 )) %}\n {{- '<|header_start|>' + message['role'] + '<|header_end|>\\n\\n' }}\n {%- if message['content'] is string %}\n {{- message['content'] }}\n {%- else %}\n {%- for content in message['content'] %}\n {%- if content['type'] == 'image' %}\n {{- '<|image|>' }}\n {%- elif content['type'] == 'text' %}\n {{- content['text'] | trim }}\n {%- endif %}\n {%- endfor %}\n {%- endif %}\n {{- \"<|eot|>\" }}\n {#- Tool case: messages has non-empty tool_call list, must from assistant #}\n {%- elif 'tool_calls' in message %}\n {#- assume tool_calls are always coming from assistant #}\n {%- if message.role == 'assistant' %}\n {{- '<|header_start|>assistant<|header_end|>\\n\\n' -}}\n {%- if message['content'] is string %}\n {{- message['content'] }}\n {%- else %}\n {%- for content in message['content'] %}\n {%- if content['type'] == 'image' %}\n {{- '<|image|>' }}\n {%- elif content['type'] == 'text' %}\n {{- content['text'] }}\n {%- endif %}\n {%- endfor %}\n {%- endif %}\n {{- \"[\" }}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- tool_call.name + '(' -}}\n {%- for param in tool_call.arguments %}\n {{- param + '=\"' -}}\n {{- \"%s\" | format(tool_call.arguments[param]) -}}\n {{- '\"' -}}\n {% if not loop.last %}, {% endif %}\n {%- endfor %}\n {{- ')' -}}\n {% if not loop.last %}, {% endif %}\n {%- endfor %}\n {{- \"]<|eot|>\" }}\n{%- endif %}\n{#- Tool_response case: messages are from tool_response #}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|header_start|>ipython<|header_end|>\\n\\n\" }}\n {%- if message.content is string %}\n {{- message.content | tojson }}\n {%- else %}\n {%- for content in message['content'] %}\n {%- if content['type'] == 'text' %}\n {{- content['text'] | tojson }}\n {%- endif %}\n {%- endfor %}\n {%- endif %}\n {{- \"<|eot|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|header_start|>assistant<|header_end|>\\n\\n' }}\n{%- endif %}"
9100
  }