ovedrive commited on
Commit
c1af3e1
·
verified ·
1 Parent(s): b7c3c3e

Upload folder using huggingface_hub

Browse files
Files changed (42) hide show
  1. .gitattributes +1 -0
  2. LICENSE +202 -0
  3. README.md +135 -0
  4. model_index.json +24 -0
  5. scheduler/scheduler_config.json +18 -0
  6. text_encoder/.gitattributes +36 -0
  7. text_encoder/README.md +117 -0
  8. text_encoder/added_tokens.json +24 -0
  9. text_encoder/chat_template.jinja +7 -0
  10. text_encoder/config.json +147 -0
  11. text_encoder/generation_config.json +14 -0
  12. text_encoder/merges.txt +0 -0
  13. text_encoder/model-00001-of-00004.safetensors +3 -0
  14. text_encoder/model-00002-of-00004.safetensors +3 -0
  15. text_encoder/model-00003-of-00004.safetensors +3 -0
  16. text_encoder/model-00004-of-00004.safetensors +3 -0
  17. text_encoder/model.safetensors.index.json +832 -0
  18. text_encoder/preprocessor_config.json +37 -0
  19. text_encoder/special_tokens_map.json +31 -0
  20. text_encoder/tokenizer.json +3 -0
  21. text_encoder/tokenizer_config.json +215 -0
  22. text_encoder/video_preprocessor_config.json +43 -0
  23. text_encoder/vocab.json +0 -0
  24. tokenizer/added_tokens.json +24 -0
  25. tokenizer/chat_template.jinja +54 -0
  26. tokenizer/merges.txt +0 -0
  27. tokenizer/special_tokens_map.json +31 -0
  28. tokenizer/tokenizer_config.json +207 -0
  29. tokenizer/vocab.json +0 -0
  30. transformer/config.json +18 -0
  31. transformer/diffusion_pytorch_model-00001-of-00009.safetensors +3 -0
  32. transformer/diffusion_pytorch_model-00002-of-00009.safetensors +3 -0
  33. transformer/diffusion_pytorch_model-00003-of-00009.safetensors +3 -0
  34. transformer/diffusion_pytorch_model-00004-of-00009.safetensors +3 -0
  35. transformer/diffusion_pytorch_model-00005-of-00009.safetensors +3 -0
  36. transformer/diffusion_pytorch_model-00006-of-00009.safetensors +3 -0
  37. transformer/diffusion_pytorch_model-00007-of-00009.safetensors +3 -0
  38. transformer/diffusion_pytorch_model-00008-of-00009.safetensors +3 -0
  39. transformer/diffusion_pytorch_model-00009-of-00009.safetensors +3 -0
  40. transformer/diffusion_pytorch_model.safetensors.index.json +0 -0
  41. vae/config.json +56 -0
  42. vae/diffusion_pytorch_model.safetensors +3 -0
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ text_encoder/tokenizer.json filter=lfs diff=lfs merge=lfs -text
LICENSE ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ Apache License
3
+ Version 2.0, January 2004
4
+ http://www.apache.org/licenses/
5
+
6
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7
+
8
+ 1. Definitions.
9
+
10
+ "License" shall mean the terms and conditions for use, reproduction,
11
+ and distribution as defined by Sections 1 through 9 of this document.
12
+
13
+ "Licensor" shall mean the copyright owner or entity authorized by
14
+ the copyright owner that is granting the License.
15
+
16
+ "Legal Entity" shall mean the union of the acting entity and all
17
+ other entities that control, are controlled by, or are under common
18
+ control with that entity. For the purposes of this definition,
19
+ "control" means (i) the power, direct or indirect, to cause the
20
+ direction or management of such entity, whether by contract or
21
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
22
+ outstanding shares, or (iii) beneficial ownership of such entity.
23
+
24
+ "You" (or "Your") shall mean an individual or Legal Entity
25
+ exercising permissions granted by this License.
26
+
27
+ "Source" form shall mean the preferred form for making modifications,
28
+ including but not limited to software source code, documentation
29
+ source, and configuration files.
30
+
31
+ "Object" form shall mean any form resulting from mechanical
32
+ transformation or translation of a Source form, including but
33
+ not limited to compiled object code, generated documentation,
34
+ and conversions to other media types.
35
+
36
+ "Work" shall mean the work of authorship, whether in Source or
37
+ Object form, made available under the License, as indicated by a
38
+ copyright notice that is included in or attached to the work
39
+ (an example is provided in the Appendix below).
40
+
41
+ "Derivative Works" shall mean any work, whether in Source or Object
42
+ form, that is based on (or derived from) the Work and for which the
43
+ editorial revisions, annotations, elaborations, or other modifications
44
+ represent, as a whole, an original work of authorship. For the purposes
45
+ of this License, Derivative Works shall not include works that remain
46
+ separable from, or merely link (or bind by name) to the interfaces of,
47
+ the Work and Derivative Works thereof.
48
+
49
+ "Contribution" shall mean any work of authorship, including
50
+ the original version of the Work and any modifications or additions
51
+ to that Work or Derivative Works thereof, that is intentionally
52
+ submitted to Licensor for inclusion in the Work by the copyright owner
53
+ or by an individual or Legal Entity authorized to submit on behalf of
54
+ the copyright owner. For the purposes of this definition, "submitted"
55
+ means any form of electronic, verbal, or written communication sent
56
+ to the Licensor or its representatives, including but not limited to
57
+ communication on electronic mailing lists, source code control systems,
58
+ and issue tracking systems that are managed by, or on behalf of, the
59
+ Licensor for the purpose of discussing and improving the Work, but
60
+ excluding communication that is conspicuously marked or otherwise
61
+ designated in writing by the copyright owner as "Not a Contribution."
62
+
63
+ "Contributor" shall mean Licensor and any individual or Legal Entity
64
+ on behalf of whom a Contribution has been received by Licensor and
65
+ subsequently incorporated within the Work.
66
+
67
+ 2. Grant of Copyright License. Subject to the terms and conditions of
68
+ this License, each Contributor hereby grants to You a perpetual,
69
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70
+ copyright license to reproduce, prepare Derivative Works of,
71
+ publicly display, publicly perform, sublicense, and distribute the
72
+ Work and such Derivative Works in Source or Object form.
73
+
74
+ 3. Grant of Patent License. Subject to the terms and conditions of
75
+ this License, each Contributor hereby grants to You a perpetual,
76
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77
+ (except as stated in this section) patent license to make, have made,
78
+ use, offer to sell, sell, import, and otherwise transfer the Work,
79
+ where such license applies only to those patent claims licensable
80
+ by such Contributor that are necessarily infringed by their
81
+ Contribution(s) alone or by combination of their Contribution(s)
82
+ with the Work to which such Contribution(s) was submitted. If You
83
+ institute patent litigation against any entity (including a
84
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
85
+ or a Contribution incorporated within the Work constitutes direct
86
+ or contributory patent infringement, then any patent licenses
87
+ granted to You under this License for that Work shall terminate
88
+ as of the date such litigation is filed.
89
+
90
+ 4. Redistribution. You may reproduce and distribute copies of the
91
+ Work or Derivative Works thereof in any medium, with or without
92
+ modifications, and in Source or Object form, provided that You
93
+ meet the following conditions:
94
+
95
+ (a) You must give any other recipients of the Work or
96
+ Derivative Works a copy of this License; and
97
+
98
+ (b) You must cause any modified files to carry prominent notices
99
+ stating that You changed the files; and
100
+
101
+ (c) You must retain, in the Source form of any Derivative Works
102
+ that You distribute, all copyright, patent, trademark, and
103
+ attribution notices from the Source form of the Work,
104
+ excluding those notices that do not pertain to any part of
105
+ the Derivative Works; and
106
+
107
+ (d) If the Work includes a "NOTICE" text file as part of its
108
+ distribution, then any Derivative Works that You distribute must
109
+ include a readable copy of the attribution notices contained
110
+ within such NOTICE file, excluding those notices that do not
111
+ pertain to any part of the Derivative Works, in at least one
112
+ of the following places: within a NOTICE text file distributed
113
+ as part of the Derivative Works; within the Source form or
114
+ documentation, if provided along with the Derivative Works; or,
115
+ within a display generated by the Derivative Works, if and
116
+ wherever such third-party notices normally appear. The contents
117
+ of the NOTICE file are for informational purposes only and
118
+ do not modify the License. You may add Your own attribution
119
+ notices within Derivative Works that You distribute, alongside
120
+ or as an addendum to the NOTICE text from the Work, provided
121
+ that such additional attribution notices cannot be construed
122
+ as modifying the License.
123
+
124
+ You may add Your own copyright statement to Your modifications and
125
+ may provide additional or different license terms and conditions
126
+ for use, reproduction, or distribution of Your modifications, or
127
+ for any such Derivative Works as a whole, provided Your use,
128
+ reproduction, and distribution of the Work otherwise complies with
129
+ the conditions stated in this License.
130
+
131
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
132
+ any Contribution intentionally submitted for inclusion in the Work
133
+ by You to the Licensor shall be under the terms and conditions of
134
+ this License, without any additional terms or conditions.
135
+ Notwithstanding the above, nothing herein shall supersede or modify
136
+ the terms of any separate license agreement you may have executed
137
+ with Licensor regarding such Contributions.
138
+
139
+ 6. Trademarks. This License does not grant permission to use the trade
140
+ names, trademarks, service marks, or product names of the Licensor,
141
+ except as required for reasonable and customary use in describing the
142
+ origin of the Work and reproducing the content of the NOTICE file.
143
+
144
+ 7. Disclaimer of Warranty. Unless required by applicable law or
145
+ agreed to in writing, Licensor provides the Work (and each
146
+ Contributor provides its Contributions) on an "AS IS" BASIS,
147
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148
+ implied, including, without limitation, any warranties or conditions
149
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150
+ PARTICULAR PURPOSE. You are solely responsible for determining the
151
+ appropriateness of using or redistributing the Work and assume any
152
+ risks associated with Your exercise of permissions under this License.
153
+
154
+ 8. Limitation of Liability. In no event and under no legal theory,
155
+ whether in tort (including negligence), contract, or otherwise,
156
+ unless required by applicable law (such as deliberate and grossly
157
+ negligent acts) or agreed to in writing, shall any Contributor be
158
+ liable to You for damages, including any direct, indirect, special,
159
+ incidental, or consequential damages of any character arising as a
160
+ result of this License or out of the use or inability to use the
161
+ Work (including but not limited to damages for loss of goodwill,
162
+ work stoppage, computer failure or malfunction, or any and all
163
+ other commercial damages or losses), even if such Contributor
164
+ has been advised of the possibility of such damages.
165
+
166
+ 9. Accepting Warranty or Additional Liability. While redistributing
167
+ the Work or Derivative Works thereof, You may choose to offer,
168
+ and charge a fee for, acceptance of support, warranty, indemnity,
169
+ or other liability obligations and/or rights consistent with this
170
+ License. However, in accepting such obligations, You may act only
171
+ on Your own behalf and on Your sole responsibility, not on behalf
172
+ of any other Contributor, and only if You agree to indemnify,
173
+ defend, and hold each Contributor harmless for any liability
174
+ incurred by, or claims asserted against, such Contributor by reason
175
+ of your accepting any such warranty or additional liability.
176
+
177
+ END OF TERMS AND CONDITIONS
178
+
179
+ APPENDIX: How to apply the Apache License to your work.
180
+
181
+ To apply the Apache License to your work, attach the following
182
+ boilerplate notice, with the fields enclosed by brackets "[]"
183
+ replaced with your own identifying information. (Don't include
184
+ the brackets!) The text should be enclosed in the appropriate
185
+ comment syntax for the file format. We also recommend that a
186
+ file or class name and description of purpose be included on the
187
+ same "printed page" as the copyright notice for easier
188
+ identification within third-party archives.
189
+
190
+ Copyright 2024 Alibaba Cloud
191
+
192
+ Licensed under the Apache License, Version 2.0 (the "License");
193
+ you may not use this file except in compliance with the License.
194
+ You may obtain a copy of the License at
195
+
196
+ http://www.apache.org/licenses/LICENSE-2.0
197
+
198
+ Unless required by applicable law or agreed to in writing, software
199
+ distributed under the License is distributed on an "AS IS" BASIS,
200
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201
+ See the License for the specific language governing permissions and
202
+ limitations under the License.
README.md ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ language:
4
+ - en
5
+ - zh
6
+ library_name: diffusers
7
+ pipeline_tag: text-to-image
8
+ ---
9
+ <p align="center">
10
+ <img src="https://qianwen-res.oss-cn-beijing.aliyuncs.com/Qwen-Image/qwen_image_logo.png" width="400"/>
11
+ <p>
12
+ <p align="center">
13
+ 💜 <a href="https://chat.qwen.ai/"><b>Qwen Chat</b></a>&nbsp&nbsp | &nbsp&nbsp🤗 <a href="https://huggingface.co/Qwen/Qwen-Image">Hugging Face</a>&nbsp&nbsp | &nbsp&nbsp🤖 <a href="https://modelscope.cn/models/Qwen/Qwen-Image">ModelScope</a>&nbsp&nbsp | &nbsp&nbsp 📑 <a href="https://qianwen-res.oss-cn-beijing.aliyuncs.com/Qwen-Image/Qwen_Image.pdf">Tech Report</a> &nbsp&nbsp | &nbsp&nbsp 📑 <a href="https://qwenlm.github.io/blog/qwen-image/">Blog</a> &nbsp&nbsp
14
+ <br>
15
+ 🖥️ <a href="https://huggingface.co/spaces/Qwen/qwen-image">Demo</a>&nbsp&nbsp | &nbsp&nbsp💬 <a href="https://github.com/QwenLM/Qwen-Image/blob/main/assets/wechat.png">WeChat (微信)</a>&nbsp&nbsp | &nbsp&nbsp🫨 <a href="https://discord.gg/CV4E9rpNSD">Discord</a>&nbsp&nbsp
16
+ </p>
17
+
18
+ <p align="center">
19
+ <img src="https://qianwen-res.oss-cn-beijing.aliyuncs.com/Qwen-Image/merge3.jpg" width="1600"/>
20
+ <p>
21
+
22
+ ## Introduction
23
+ We are thrilled to release **Qwen-Image**, an image generation foundation model in the Qwen series that achieves significant advances in **complex text rendering** and **precise image editing**. Experiments show strong general capabilities in both image generation and editing, with exceptional performance in text rendering, especially for Chinese.
24
+
25
+ ![](https://qianwen-res.oss-cn-beijing.aliyuncs.com/Qwen-Image/bench.png#center)
26
+
27
+ ## News
28
+ - 2025.08.04: We released the [Technical Report](https://arxiv.org/abs/2508.02324) of Qwen-Image!
29
+ - 2025.08.04: We released Qwen-Image weights! Check at [huggingface](https://huggingface.co/Qwen/Qwen-Image) and [Modelscope](https://modelscope.cn/models/Qwen/Qwen-Image)!
30
+ - 2025.08.04: We released Qwen-Image! Check our [blog](https://qwenlm.github.io/blog/qwen-image) for more details!
31
+
32
+
33
+ ## Quick Start
34
+
35
+ Install the latest version of diffusers
36
+ ```
37
+ pip install git+https://github.com/huggingface/diffusers
38
+ ```
39
+
40
+ The following contains a code snippet illustrating how to use the model to generate images based on text prompts:
41
+
42
+ ```python
43
+ from diffusers import DiffusionPipeline
44
+ import torch
45
+
46
+ model_name = "Qwen/Qwen-Image"
47
+
48
+ # Load the pipeline
49
+ if torch.cuda.is_available():
50
+ torch_dtype = torch.bfloat16
51
+ device = "cuda"
52
+ else:
53
+ torch_dtype = torch.float32
54
+ device = "cpu"
55
+
56
+ pipe = DiffusionPipeline.from_pretrained(model_name, torch_dtype=torch_dtype)
57
+ pipe = pipe.to(device)
58
+
59
+ positive_magic = {
60
+ "en": ", Ultra HD, 4K, cinematic composition.", # for english prompt
61
+ "zh": ", 超清,4K,电影级构图." # for chinese prompt
62
+ }
63
+
64
+ # Generate image
65
+ prompt = '''A coffee shop entrance features a chalkboard sign reading "Qwen Coffee 😊 $2 per cup," with a neon light beside it displaying "通义千问". Next to it hangs a poster showing a beautiful Chinese woman, and beneath the poster is written "π≈3.1415926-53589793-23846264-33832795-02384197". Ultra HD, 4K, cinematic composition'''
66
+
67
+ negative_prompt = " " # using an empty string if you do not have specific concept to remove
68
+
69
+
70
+ # Generate with different aspect ratios
71
+ aspect_ratios = {
72
+ "1:1": (1328, 1328),
73
+ "16:9": (1664, 928),
74
+ "9:16": (928, 1664),
75
+ "4:3": (1472, 1140),
76
+ "3:4": (1140, 1472),
77
+ "3:2": (1584, 1056),
78
+ "2:3": (1056, 1584),
79
+ }
80
+
81
+ width, height = aspect_ratios["16:9"]
82
+
83
+ image = pipe(
84
+ prompt=prompt + positive_magic["en"],
85
+ negative_prompt=negative_prompt,
86
+ width=width,
87
+ height=height,
88
+ num_inference_steps=50,
89
+ true_cfg_scale=4.0,
90
+ generator=torch.Generator(device="cuda").manual_seed(42)
91
+ ).images[0]
92
+
93
+ image.save("example.png")
94
+ ```
95
+
96
+ ## Show Cases
97
+
98
+ One of its standout capabilities is high-fidelity text rendering across diverse images. Whether it’s alphabetic languages like English or logographic scripts like Chinese, Qwen-Image preserves typographic details, layout coherence, and contextual harmony with stunning accuracy. Text isn’t just overlaid—it’s seamlessly integrated into the visual fabric.
99
+
100
+ ![](https://qianwen-res.oss-cn-beijing.aliyuncs.com/Qwen-Image/s1.jpg#center)
101
+
102
+ Beyond text, Qwen-Image excels at general image generation with support for a wide range of artistic styles. From photorealistic scenes to impressionist paintings, from anime aesthetics to minimalist design, the model adapts fluidly to creative prompts, making it a versatile tool for artists, designers, and storytellers.
103
+
104
+ ![](https://qianwen-res.oss-cn-beijing.aliyuncs.com/Qwen-Image/s2.jpg#center)
105
+
106
+ When it comes to image editing, Qwen-Image goes far beyond simple adjustments. It enables advanced operations such as style transfer, object insertion or removal, detail enhancement, text editing within images, and even human pose manipulation—all with intuitive input and coherent output. This level of control brings professional-grade editing within reach of everyday users.
107
+
108
+ ![](https://qianwen-res.oss-cn-beijing.aliyuncs.com/Qwen-Image/s3.jpg#center)
109
+
110
+ But Qwen-Image doesn’t just create or edit—it understands. It supports a suite of image understanding tasks, including object detection, semantic segmentation, depth and edge (Canny) estimation, novel view synthesis, and super-resolution. These capabilities, while technically distinct, can all be seen as specialized forms of intelligent image editing, powered by deep visual comprehension.
111
+
112
+ ![](https://qianwen-res.oss-cn-beijing.aliyuncs.com/Qwen-Image/s4.jpg#center)
113
+
114
+ Together, these features make Qwen-Image not just a tool for generating pretty pictures, but a comprehensive foundation model for intelligent visual creation and manipulation—where language, layout, and imagery converge.
115
+
116
+
117
+ ## License Agreement
118
+
119
+ Qwen-Image is licensed under Apache 2.0.
120
+
121
+ ## Citation
122
+
123
+ We kindly encourage citation of our work if you find it useful.
124
+
125
+ ```bibtex
126
+ @misc{wu2025qwenimagetechnicalreport,
127
+ title={Qwen-Image Technical Report},
128
+ author={Chenfei Wu and Jiahao Li and Jingren Zhou and Junyang Lin and Kaiyuan Gao and Kun Yan and Sheng-ming Yin and Shuai Bai and Xiao Xu and Yilei Chen and Yuxiang Chen and Zecheng Tang and Zekai Zhang and Zhengyi Wang and An Yang and Bowen Yu and Chen Cheng and Dayiheng Liu and Deqing Li and Hang Zhang and Hao Meng and Hu Wei and Jingyuan Ni and Kai Chen and Kuan Cao and Liang Peng and Lin Qu and Minggang Wu and Peng Wang and Shuting Yu and Tingkun Wen and Wensen Feng and Xiaoxiao Xu and Yi Wang and Yichang Zhang and Yongqiang Zhu and Yujia Wu and Yuxuan Cai and Zenan Liu},
129
+ year={2025},
130
+ eprint={2508.02324},
131
+ archivePrefix={arXiv},
132
+ primaryClass={cs.CV},
133
+ url={https://arxiv.org/abs/2508.02324},
134
+ }
135
+ ```
model_index.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "QwenImagePipeline",
3
+ "_diffusers_version": "0.34.0.dev0",
4
+ "scheduler": [
5
+ "diffusers",
6
+ "FlowMatchEulerDiscreteScheduler"
7
+ ],
8
+ "text_encoder": [
9
+ "transformers",
10
+ "Qwen2_5_VLForConditionalGeneration"
11
+ ],
12
+ "tokenizer": [
13
+ "transformers",
14
+ "Qwen2Tokenizer"
15
+ ],
16
+ "transformer": [
17
+ "diffusers",
18
+ "QwenImageTransformer2DModel"
19
+ ],
20
+ "vae": [
21
+ "diffusers",
22
+ "AutoencoderKLQwenImage"
23
+ ]
24
+ }
scheduler/scheduler_config.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "FlowMatchEulerDiscreteScheduler",
3
+ "_diffusers_version": "0.34.0.dev0",
4
+ "base_image_seq_len": 256,
5
+ "base_shift": 0.5,
6
+ "invert_sigmas": false,
7
+ "max_image_seq_len": 8192,
8
+ "max_shift": 0.9,
9
+ "num_train_timesteps": 1000,
10
+ "shift": 1.0,
11
+ "shift_terminal": 0.02,
12
+ "stochastic_sampling": false,
13
+ "time_shift_type": "exponential",
14
+ "use_beta_sigmas": false,
15
+ "use_dynamic_shifting": true,
16
+ "use_exponential_sigmas": false,
17
+ "use_karras_sigmas": false
18
+ }
text_encoder/.gitattributes ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
text_encoder/README.md ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ language:
4
+ - zh
5
+ - en
6
+ tags:
7
+ - qwen
8
+ - qwen2.5-vl
9
+ - image-captioning
10
+ - multimodal
11
+ - nsfw
12
+ - long-caption
13
+ library_name: transformers
14
+ base_model:
15
+ - Qwen/Qwen2.5-VL-7B-Instruct
16
+ - XiaomiMiMo/MiMo-VL-7B-RL-2508
17
+ ---
18
+ # 模型名称:thesby/Qwen2.5-VL-7B-NSFW-Caption-V4
19
+
20
+ ## 模型简介 (Model Description)
21
+
22
+ **thesby/Qwen2.5-VL-7B-NSFW-Caption-V4** 是一个专为高质量图像描述(Image Captioning)而优化的多模态大模型。该模型基于强大的 `Qwen/Qwen2.5-VL-7B-Instruct` 进行Lora微调,旨在提供卓越的图像理解和描述生成能力。
23
+
24
+ 我们在一个包含约 **200 万** 高质量图文对的混合数据集上对模型进行了训练,使其在多个维度上都表现出色。相比于 V3,V4模型加入了更多 NSFW 数据和人工标注数据。
25
+
26
+ ### 主要特点 (Key Features)
27
+
28
+ 1. **超高质量的图片描述能力 (Ultra-High Quality Captions)**:
29
+ 模型能够精准捕捉图像中的核心主体、环境背景、人物情绪、物体材质和光影等丰富细节,描述能力超过 gpt4.1-mini,接近于 gemini-2.5-flash。
30
+ 2. **SFW & NSFW 内容全覆盖 (Support for both SFW & NSFW Content)**:
31
+ 经过特殊的数据集训练,模型能够有效识别并描述 SFW (Safe for Work) 和 NSFW (Not Safe for Work) 类型的图像内容。无论是日常生活场景还是成人向内容,都能生成恰当且信息丰富的描述,极大地拓宽了模型的适用边界。
32
+ 3. **超长文本描述能力 (Long-form Detailed Description)**:
33
+ 与传统模型生成简短标题不同,本模型擅长对复杂的图像场景进行详尽入微的描绘。它可以生成包含数百个单词的详细段落,深入分析图像的叙事结构和潜在含义,尤其适合需要深度内容解读的应用场景。
34
+ ## 如何使用 (How to Use)
35
+
36
+ 使用方式和 Qwen/Qwen2.5-VL-7B-Instruct 完全相同,模型在图片最大尺寸 800*800 微调,遵从这一限制可以获得更好的效果。
37
+ ```python
38
+ from transformers import Qwen2_5_VLForConditionalGeneration, AutoTokenizer, AutoProcessor
39
+ from qwen_vl_utils import process_vision_info
40
+ # default: Load the model on the available device(s)
41
+ model = Qwen2_5_VLForConditionalGeneration.from_pretrained(
42
+ "thesby/Qwen2.5-VL-7B-NSFW-Caption-V4", torch_dtype="auto", device_map="auto"
43
+ )
44
+ # default processer
45
+ processor = AutoProcessor.from_pretrained("thesby/Qwen2.5-VL-7B-NSFW-Caption-V4")
46
+ # The default range for the number of visual tokens per image in the model is 4-16384.
47
+ # You can set min_pixels and max_pixels according to your needs, such as a token range of 256-1280, to balance performance and cost.
48
+ # min_pixels = 256*28*28
49
+ # max_pixels = 800 * 800
50
+ # processor = AutoProcessor.from_pretrained("thesby/Qwen2.5-VL-7B-NSFW-Caption-V4", min_pixels=min_pixels, max_pixels=max_pixels)
51
+ messages = [
52
+ {
53
+ "role": "user",
54
+ "content": [
55
+ {
56
+ "type": "image",
57
+ "image": "https://qianwen-res.oss-cn-beijing.aliyuncs.com/Qwen-VL/assets/demo.jpeg",
58
+ },
59
+ {"type": "text", "text": "请用自然流畅的中文对以下图片进行全面而详细的描述。包括所有可见元素及其属性(如颜色、大小、形状、质地),它们的空间关系,以及任何显著特征或上下文。确保用自然流畅的中文描述清晰、生动,能够捕捉图片的每一个方面,不遗漏任何重要细节。"},
60
+ ],
61
+ }
62
+ ]
63
+ # Preparation for inference
64
+ text = processor.apply_chat_template(
65
+ messages, tokenize=False, add_generation_prompt=True
66
+ )
67
+ image_inputs, video_inputs = process_vision_info(messages)
68
+ inputs = processor(
69
+ text=[text],
70
+ images=image_inputs,
71
+ videos=video_inputs,
72
+ padding=True,
73
+ return_tensors="pt",
74
+ )
75
+ inputs = inputs.to("cuda")
76
+ # Inference: Generation of the output
77
+ generated_ids = model.generate(**inputs, max_new_tokens=128)
78
+ generated_ids_trimmed = [
79
+ out_ids[len(in_ids) :] for in_ids, out_ids in zip(inputs.input_ids, generated_ids)
80
+ ]
81
+ output_text = processor.batch_decode(
82
+ generated_ids_trimmed, skip_special_tokens=True, clean_up_tokenization_spaces=False
83
+ )
84
+ print(output_text)
85
+ ```
86
+ ## 用途与限制 (Uses and Limitations)
87
+ ### 预期用途 (Intended Uses)
88
+ * **自动化内容标注**:为海量图片生成高质量的描述和标签,用于内容管理、检索和推荐系统。
89
+ * **无障碍辅助功能**:为视障用户描述图像内容,帮助他们理解视觉信息。
90
+ * **创意内容生成**:作为灵感来源,为艺术创作、故事写作、广告文案等提供基于图像的文本描述。
91
+ * **数字内容分析**:对包括SFW和NSFW在内的多种图像内容进行自动化分析和归档。
92
+ ### 不适用范围 (Out-of-Scope)
93
+ * 本模型不应用于生成任何有害、非法、歧视性或侵犯他人隐私的内容。
94
+ * 模型的输出不应被视为绝对事实,更不能用于任何需要高精度和高可靠性的关键决策(如医疗诊断、法律判决等),所有重要应用都需有人工审核。
95
+ * 用户在使用模型处理NSFW内容时,应严格遵守当地法律法规,并承担相应责任。
96
+ ### 局限性与偏见 (Limitations and Bias)
97
+ * **幻觉 (Hallucination)**:与所有大模型一样,本模型可能产生“幻觉”,即生成图像中不存在的细节。
98
+ * **数据偏见 (Data Bias)**:模型的输出可能反映其训练数据中存在的社会和文化偏见(例如,在性别、种族或年龄方面的刻板印象)。
99
+ * **NSFW判断边界**:对于处于SFW和NSFW边界的模糊图像,模型的判断和描述可能与人类预期不符。
100
+ ## 训练细节 (Training Details)
101
+ ### 训练数据 (Training Data)
102
+ 本模型在一个精心构建的、约 **200 万** 图文对的数据集上进行了微调。该数据集经过严格的筛选和清洗,混合了:
103
+ * 高质量的公开图文数据集。
104
+ * 专门收集和标注的、涵盖广泛SFW和NSFW场景的私有数据集。
105
+ 数据分布旨在提升模型在细节捕捉和长文本生成方面的能力,同时确保对不同类型内容的泛化性。
106
+ ### 训练过程 (Training Procedure)
107
+ * **基础模型 (Base Model)**:`Qwen/Qwen2.5-VL-7B-Instruct`
108
+ * **微调策略 (Finetuning Strategy)**:Lora微调 (Lora Fine-tuning)
109
+ * **框架 (Framework)**:unsloth
110
+ * **主要超参数 (Hyperparameters)**:
111
+ * Learning Rate: 2e-5
112
+ * Batch Size: 16
113
+ * Epochs: 1
114
+ * Optimizer: AdamW
115
+ ### 计划 (Plans)
116
+ * 提高**英文**caption效果
117
+ * 增加**短视频**caption能力
text_encoder/added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
text_encoder/chat_template.jinja ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system
2
+ You are MiMo, an AI assistant developed by Xiaomi.<|im_end|>
3
+ {% endif %}<|im_start|>{{ message['role'] }}
4
+ {% if message['content'] is string %}{{ message['content'] }}<|im_end|>
5
+ {% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>
6
+ {% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant
7
+ {% endif %}
text_encoder/config.json ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2_5_VLForConditionalGeneration"
4
+ ],
5
+ "attention_bias": true,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151645,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 4096,
11
+ "image_token_id": 151655,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 11008,
14
+ "max_position_embeddings": 128000,
15
+ "max_window_layers": 32,
16
+ "model_type": "qwen2_5_vl",
17
+ "num_attention_heads": 32,
18
+ "num_hidden_layers": 36,
19
+ "num_key_value_heads": 8,
20
+ "pad_token_id": 151643,
21
+ "rms_norm_eps": 1e-05,
22
+ "rope_scaling": {
23
+ "mrope_section": [
24
+ 16,
25
+ 24,
26
+ 24
27
+ ],
28
+ "rope_type": "default",
29
+ "type": "default"
30
+ },
31
+ "rope_theta": 640000.0,
32
+ "sliding_window": 8192,
33
+ "text_config": {
34
+ "architectures": [
35
+ "Qwen2_5_VLForConditionalGeneration"
36
+ ],
37
+ "attention_bias": true,
38
+ "attention_dropout": 0.0,
39
+ "bos_token_id": 151643,
40
+ "eos_token_id": 151645,
41
+ "hidden_act": "silu",
42
+ "hidden_size": 4096,
43
+ "image_token_id": null,
44
+ "initializer_range": 0.02,
45
+ "intermediate_size": 11008,
46
+ "layer_types": [
47
+ "full_attention",
48
+ "full_attention",
49
+ "full_attention",
50
+ "full_attention",
51
+ "full_attention",
52
+ "full_attention",
53
+ "full_attention",
54
+ "full_attention",
55
+ "full_attention",
56
+ "full_attention",
57
+ "full_attention",
58
+ "full_attention",
59
+ "full_attention",
60
+ "full_attention",
61
+ "full_attention",
62
+ "full_attention",
63
+ "full_attention",
64
+ "full_attention",
65
+ "full_attention",
66
+ "full_attention",
67
+ "full_attention",
68
+ "full_attention",
69
+ "full_attention",
70
+ "full_attention",
71
+ "full_attention",
72
+ "full_attention",
73
+ "full_attention",
74
+ "full_attention",
75
+ "full_attention",
76
+ "full_attention",
77
+ "full_attention",
78
+ "full_attention",
79
+ "full_attention",
80
+ "full_attention",
81
+ "full_attention",
82
+ "full_attention"
83
+ ],
84
+ "max_position_embeddings": 128000,
85
+ "max_window_layers": 32,
86
+ "model_type": "qwen2_5_vl_text",
87
+ "num_attention_heads": 32,
88
+ "num_hidden_layers": 36,
89
+ "num_key_value_heads": 8,
90
+ "rms_norm_eps": 1e-05,
91
+ "rope_scaling": {
92
+ "mrope_section": [
93
+ 16,
94
+ 24,
95
+ 24
96
+ ],
97
+ "rope_type": "default",
98
+ "type": "default"
99
+ },
100
+ "rope_theta": 640000.0,
101
+ "sliding_window": null,
102
+ "torch_dtype": "bfloat16",
103
+ "use_cache": true,
104
+ "use_sliding_window": false,
105
+ "video_token_id": null,
106
+ "vision_end_token_id": 151653,
107
+ "vision_start_token_id": 151652,
108
+ "vision_token_id": 151654,
109
+ "vocab_size": 151680
110
+ },
111
+ "tie_word_embeddings": false,
112
+ "torch_dtype": "bfloat16",
113
+ "transformers_version": "4.55.4",
114
+ "unsloth_version": "2025.9.6",
115
+ "use_cache": true,
116
+ "use_sliding_window": false,
117
+ "video_token_id": 151656,
118
+ "vision_config": {
119
+ "depth": 32,
120
+ "fullatt_block_indexes": [
121
+ 7,
122
+ 15,
123
+ 23,
124
+ 31
125
+ ],
126
+ "hidden_act": "silu",
127
+ "hidden_size": 1280,
128
+ "in_channels": 3,
129
+ "in_chans": 3,
130
+ "initializer_range": 0.02,
131
+ "intermediate_size": 3456,
132
+ "model_type": "qwen2_5_vl",
133
+ "num_heads": 16,
134
+ "out_hidden_size": 4096,
135
+ "patch_size": 14,
136
+ "spatial_merge_size": 2,
137
+ "spatial_patch_size": 14,
138
+ "temporal_patch_size": 2,
139
+ "tokens_per_second": 2,
140
+ "torch_dtype": "bfloat16",
141
+ "window_size": 112
142
+ },
143
+ "vision_end_token_id": 151653,
144
+ "vision_start_token_id": 151652,
145
+ "vision_token_id": 151654,
146
+ "vocab_size": 151680
147
+ }
text_encoder/generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "repetition_penalty": 1.05,
10
+ "temperature": 0.1,
11
+ "top_k": 1,
12
+ "top_p": 0.001,
13
+ "transformers_version": "4.53.1"
14
+ }
text_encoder/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
text_encoder/model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76e17a576844c0ff087b9b7f5f6fd0855f5669b38dd94de7dbf23010627d0e24
3
+ size 4612695408
text_encoder/model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ee5d4d87849a50e08a87c83d5da88034e4fc9a3d39ec41dd4f88b172d5325f4
3
+ size 4937303136
text_encoder/model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:faea51412eb2e565b82fac20b4a7891c29333ad82fc334b1e0d47e07feadd497
3
+ size 4982109888
text_encoder/model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e32dba688a138d2320b4e9c3f540ac34af0910c2561debdb42483d4289049928
3
+ size 2080418376
text_encoder/model.safetensors.index.json ADDED
@@ -0,0 +1,832 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 16612434432
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00002-of-00004.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00003-of-00004.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
13
+ "model.layers.0.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
16
+ "model.layers.0.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
18
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
19
+ "model.layers.0.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
20
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
21
+ "model.layers.1.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
22
+ "model.layers.1.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
23
+ "model.layers.1.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
24
+ "model.layers.1.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
25
+ "model.layers.1.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
26
+ "model.layers.1.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
27
+ "model.layers.1.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
28
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
29
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
30
+ "model.layers.1.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
31
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
32
+ "model.layers.10.input_layernorm.weight": "model-00003-of-00004.safetensors",
33
+ "model.layers.10.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
34
+ "model.layers.10.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
35
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
36
+ "model.layers.10.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
37
+ "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
38
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
39
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
40
+ "model.layers.10.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
41
+ "model.layers.10.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
42
+ "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
43
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
44
+ "model.layers.11.input_layernorm.weight": "model-00003-of-00004.safetensors",
45
+ "model.layers.11.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
46
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
47
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
48
+ "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
49
+ "model.layers.11.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
50
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
51
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
52
+ "model.layers.11.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
53
+ "model.layers.11.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
54
+ "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
55
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
56
+ "model.layers.12.input_layernorm.weight": "model-00001-of-00004.safetensors",
57
+ "model.layers.12.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
58
+ "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
59
+ "model.layers.12.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
60
+ "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
61
+ "model.layers.12.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
62
+ "model.layers.12.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
63
+ "model.layers.12.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
64
+ "model.layers.12.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
65
+ "model.layers.12.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
66
+ "model.layers.12.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
67
+ "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
68
+ "model.layers.13.input_layernorm.weight": "model-00004-of-00004.safetensors",
69
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
70
+ "model.layers.13.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
71
+ "model.layers.13.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
72
+ "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
73
+ "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
74
+ "model.layers.13.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
75
+ "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
76
+ "model.layers.13.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
77
+ "model.layers.13.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
78
+ "model.layers.13.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
79
+ "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
80
+ "model.layers.14.input_layernorm.weight": "model-00004-of-00004.safetensors",
81
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
82
+ "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
83
+ "model.layers.14.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
84
+ "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
85
+ "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
86
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
87
+ "model.layers.14.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
88
+ "model.layers.14.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
89
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
90
+ "model.layers.14.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
91
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
92
+ "model.layers.15.input_layernorm.weight": "model-00001-of-00004.safetensors",
93
+ "model.layers.15.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
94
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
95
+ "model.layers.15.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
96
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
97
+ "model.layers.15.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
98
+ "model.layers.15.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
99
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
101
+ "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
102
+ "model.layers.15.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
103
+ "model.layers.15.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
104
+ "model.layers.16.input_layernorm.weight": "model-00001-of-00004.safetensors",
105
+ "model.layers.16.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
106
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
107
+ "model.layers.16.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
108
+ "model.layers.16.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
109
+ "model.layers.16.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
110
+ "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
111
+ "model.layers.16.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
112
+ "model.layers.16.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
113
+ "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
114
+ "model.layers.16.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
115
+ "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
116
+ "model.layers.17.input_layernorm.weight": "model-00001-of-00004.safetensors",
117
+ "model.layers.17.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
118
+ "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
119
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
120
+ "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
121
+ "model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
122
+ "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
123
+ "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
124
+ "model.layers.17.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
125
+ "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
126
+ "model.layers.17.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
127
+ "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
128
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
129
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
130
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
131
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
132
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
133
+ "model.layers.18.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
134
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
135
+ "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
136
+ "model.layers.18.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
137
+ "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
138
+ "model.layers.18.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
139
+ "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
140
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00004.safetensors",
141
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
142
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
143
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
144
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
145
+ "model.layers.19.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
146
+ "model.layers.19.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
147
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
148
+ "model.layers.19.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
149
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
150
+ "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
151
+ "model.layers.19.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
152
+ "model.layers.2.input_layernorm.weight": "model-00003-of-00004.safetensors",
153
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
154
+ "model.layers.2.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
155
+ "model.layers.2.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
156
+ "model.layers.2.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
157
+ "model.layers.2.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
158
+ "model.layers.2.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
159
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
160
+ "model.layers.2.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
161
+ "model.layers.2.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
162
+ "model.layers.2.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
163
+ "model.layers.2.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
164
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
165
+ "model.layers.20.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
166
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
167
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
168
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
169
+ "model.layers.20.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
170
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
171
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
172
+ "model.layers.20.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
173
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
174
+ "model.layers.20.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
175
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
176
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
177
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
178
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
179
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
180
+ "model.layers.21.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
181
+ "model.layers.21.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
182
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
183
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
184
+ "model.layers.21.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
185
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
186
+ "model.layers.21.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
187
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
188
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
189
+ "model.layers.22.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
190
+ "model.layers.22.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
191
+ "model.layers.22.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
192
+ "model.layers.22.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
193
+ "model.layers.22.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
194
+ "model.layers.22.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
195
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
196
+ "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
197
+ "model.layers.22.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
198
+ "model.layers.22.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
199
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
200
+ "model.layers.23.input_layernorm.weight": "model-00001-of-00004.safetensors",
201
+ "model.layers.23.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
202
+ "model.layers.23.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
203
+ "model.layers.23.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
204
+ "model.layers.23.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
205
+ "model.layers.23.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
206
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
207
+ "model.layers.23.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
208
+ "model.layers.23.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
209
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
210
+ "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
211
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
212
+ "model.layers.24.input_layernorm.weight": "model-00001-of-00004.safetensors",
213
+ "model.layers.24.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
214
+ "model.layers.24.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
215
+ "model.layers.24.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
216
+ "model.layers.24.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
217
+ "model.layers.24.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
218
+ "model.layers.24.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
219
+ "model.layers.24.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
220
+ "model.layers.24.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
221
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
222
+ "model.layers.24.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
223
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
224
+ "model.layers.25.input_layernorm.weight": "model-00001-of-00004.safetensors",
225
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
226
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
227
+ "model.layers.25.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
228
+ "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
229
+ "model.layers.25.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
230
+ "model.layers.25.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
231
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.layers.25.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
233
+ "model.layers.25.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
234
+ "model.layers.25.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
235
+ "model.layers.25.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
236
+ "model.layers.26.input_layernorm.weight": "model-00002-of-00004.safetensors",
237
+ "model.layers.26.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
238
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
239
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
240
+ "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
241
+ "model.layers.26.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
242
+ "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
243
+ "model.layers.26.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
244
+ "model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
245
+ "model.layers.26.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
246
+ "model.layers.26.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
247
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
248
+ "model.layers.27.input_layernorm.weight": "model-00001-of-00004.safetensors",
249
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
250
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
251
+ "model.layers.27.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
252
+ "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
253
+ "model.layers.27.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
254
+ "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
255
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
256
+ "model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
257
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
258
+ "model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
259
+ "model.layers.27.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
260
+ "model.layers.28.input_layernorm.weight": "model-00001-of-00004.safetensors",
261
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
262
+ "model.layers.28.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
263
+ "model.layers.28.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
264
+ "model.layers.28.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
265
+ "model.layers.28.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
266
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
267
+ "model.layers.28.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
268
+ "model.layers.28.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
269
+ "model.layers.28.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
270
+ "model.layers.28.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
271
+ "model.layers.28.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
272
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00004.safetensors",
273
+ "model.layers.29.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
274
+ "model.layers.29.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
275
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
276
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
277
+ "model.layers.29.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
278
+ "model.layers.29.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
279
+ "model.layers.29.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
280
+ "model.layers.29.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
281
+ "model.layers.29.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
282
+ "model.layers.29.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
283
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
284
+ "model.layers.3.input_layernorm.weight": "model-00002-of-00004.safetensors",
285
+ "model.layers.3.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
286
+ "model.layers.3.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
287
+ "model.layers.3.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
288
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
289
+ "model.layers.3.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
290
+ "model.layers.3.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
291
+ "model.layers.3.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
292
+ "model.layers.3.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
293
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
294
+ "model.layers.3.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
295
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
296
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00004.safetensors",
297
+ "model.layers.30.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
298
+ "model.layers.30.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
299
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
300
+ "model.layers.30.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
301
+ "model.layers.30.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
302
+ "model.layers.30.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
303
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
304
+ "model.layers.30.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
305
+ "model.layers.30.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
306
+ "model.layers.30.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
307
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
308
+ "model.layers.31.input_layernorm.weight": "model-00002-of-00004.safetensors",
309
+ "model.layers.31.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
310
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
311
+ "model.layers.31.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
312
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
313
+ "model.layers.31.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
314
+ "model.layers.31.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
315
+ "model.layers.31.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
316
+ "model.layers.31.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
317
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
318
+ "model.layers.31.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
319
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
320
+ "model.layers.32.input_layernorm.weight": "model-00001-of-00004.safetensors",
321
+ "model.layers.32.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
322
+ "model.layers.32.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
323
+ "model.layers.32.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
324
+ "model.layers.32.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
325
+ "model.layers.32.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
326
+ "model.layers.32.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
327
+ "model.layers.32.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
328
+ "model.layers.32.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
329
+ "model.layers.32.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
330
+ "model.layers.32.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
331
+ "model.layers.32.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
332
+ "model.layers.33.input_layernorm.weight": "model-00002-of-00004.safetensors",
333
+ "model.layers.33.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
334
+ "model.layers.33.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
335
+ "model.layers.33.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
336
+ "model.layers.33.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
337
+ "model.layers.33.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
338
+ "model.layers.33.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
339
+ "model.layers.33.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
340
+ "model.layers.33.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
341
+ "model.layers.33.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
342
+ "model.layers.33.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
343
+ "model.layers.33.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
344
+ "model.layers.34.input_layernorm.weight": "model-00003-of-00004.safetensors",
345
+ "model.layers.34.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
346
+ "model.layers.34.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
347
+ "model.layers.34.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
348
+ "model.layers.34.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
349
+ "model.layers.34.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
350
+ "model.layers.34.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
351
+ "model.layers.34.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
352
+ "model.layers.34.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
353
+ "model.layers.34.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
354
+ "model.layers.34.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
355
+ "model.layers.34.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
356
+ "model.layers.35.input_layernorm.weight": "model-00001-of-00004.safetensors",
357
+ "model.layers.35.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
358
+ "model.layers.35.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
359
+ "model.layers.35.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
360
+ "model.layers.35.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
361
+ "model.layers.35.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
362
+ "model.layers.35.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
363
+ "model.layers.35.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
364
+ "model.layers.35.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
365
+ "model.layers.35.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
366
+ "model.layers.35.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
367
+ "model.layers.35.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
368
+ "model.layers.4.input_layernorm.weight": "model-00003-of-00004.safetensors",
369
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
370
+ "model.layers.4.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
371
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
372
+ "model.layers.4.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
373
+ "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
374
+ "model.layers.4.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
375
+ "model.layers.4.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
376
+ "model.layers.4.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
377
+ "model.layers.4.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
378
+ "model.layers.4.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
379
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
380
+ "model.layers.5.input_layernorm.weight": "model-00004-of-00004.safetensors",
381
+ "model.layers.5.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
382
+ "model.layers.5.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
383
+ "model.layers.5.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
384
+ "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
385
+ "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
386
+ "model.layers.5.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
387
+ "model.layers.5.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
388
+ "model.layers.5.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
389
+ "model.layers.5.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
390
+ "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
391
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
392
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
393
+ "model.layers.6.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
394
+ "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
395
+ "model.layers.6.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
396
+ "model.layers.6.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
397
+ "model.layers.6.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
398
+ "model.layers.6.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
399
+ "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
400
+ "model.layers.6.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
401
+ "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
402
+ "model.layers.6.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
403
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
404
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00004.safetensors",
405
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
406
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
407
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
408
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
409
+ "model.layers.7.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
410
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
411
+ "model.layers.7.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
412
+ "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
413
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
414
+ "model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
415
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
416
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
417
+ "model.layers.8.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
418
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
419
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
420
+ "model.layers.8.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
421
+ "model.layers.8.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
422
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
423
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
424
+ "model.layers.8.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
425
+ "model.layers.8.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
426
+ "model.layers.8.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
427
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
428
+ "model.layers.9.input_layernorm.weight": "model-00004-of-00004.safetensors",
429
+ "model.layers.9.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
430
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
431
+ "model.layers.9.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
432
+ "model.layers.9.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
433
+ "model.layers.9.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
434
+ "model.layers.9.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
435
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
436
+ "model.layers.9.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
437
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
438
+ "model.layers.9.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
439
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
440
+ "model.norm.weight": "model-00001-of-00004.safetensors",
441
+ "visual.blocks.0.attn.proj.bias": "model-00002-of-00004.safetensors",
442
+ "visual.blocks.0.attn.proj.weight": "model-00002-of-00004.safetensors",
443
+ "visual.blocks.0.attn.qkv.bias": "model-00003-of-00004.safetensors",
444
+ "visual.blocks.0.attn.qkv.weight": "model-00003-of-00004.safetensors",
445
+ "visual.blocks.0.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
446
+ "visual.blocks.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
447
+ "visual.blocks.0.mlp.gate_proj.bias": "model-00002-of-00004.safetensors",
448
+ "visual.blocks.0.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
449
+ "visual.blocks.0.mlp.up_proj.bias": "model-00003-of-00004.safetensors",
450
+ "visual.blocks.0.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
451
+ "visual.blocks.0.norm1.weight": "model-00001-of-00004.safetensors",
452
+ "visual.blocks.0.norm2.weight": "model-00002-of-00004.safetensors",
453
+ "visual.blocks.1.attn.proj.bias": "model-00004-of-00004.safetensors",
454
+ "visual.blocks.1.attn.proj.weight": "model-00003-of-00004.safetensors",
455
+ "visual.blocks.1.attn.qkv.bias": "model-00002-of-00004.safetensors",
456
+ "visual.blocks.1.attn.qkv.weight": "model-00002-of-00004.safetensors",
457
+ "visual.blocks.1.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
458
+ "visual.blocks.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
459
+ "visual.blocks.1.mlp.gate_proj.bias": "model-00004-of-00004.safetensors",
460
+ "visual.blocks.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
461
+ "visual.blocks.1.mlp.up_proj.bias": "model-00003-of-00004.safetensors",
462
+ "visual.blocks.1.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
463
+ "visual.blocks.1.norm1.weight": "model-00004-of-00004.safetensors",
464
+ "visual.blocks.1.norm2.weight": "model-00001-of-00004.safetensors",
465
+ "visual.blocks.10.attn.proj.bias": "model-00001-of-00004.safetensors",
466
+ "visual.blocks.10.attn.proj.weight": "model-00004-of-00004.safetensors",
467
+ "visual.blocks.10.attn.qkv.bias": "model-00001-of-00004.safetensors",
468
+ "visual.blocks.10.attn.qkv.weight": "model-00003-of-00004.safetensors",
469
+ "visual.blocks.10.mlp.down_proj.bias": "model-00002-of-00004.safetensors",
470
+ "visual.blocks.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
471
+ "visual.blocks.10.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
472
+ "visual.blocks.10.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
473
+ "visual.blocks.10.mlp.up_proj.bias": "model-00003-of-00004.safetensors",
474
+ "visual.blocks.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
475
+ "visual.blocks.10.norm1.weight": "model-00001-of-00004.safetensors",
476
+ "visual.blocks.10.norm2.weight": "model-00001-of-00004.safetensors",
477
+ "visual.blocks.11.attn.proj.bias": "model-00003-of-00004.safetensors",
478
+ "visual.blocks.11.attn.proj.weight": "model-00004-of-00004.safetensors",
479
+ "visual.blocks.11.attn.qkv.bias": "model-00003-of-00004.safetensors",
480
+ "visual.blocks.11.attn.qkv.weight": "model-00001-of-00004.safetensors",
481
+ "visual.blocks.11.mlp.down_proj.bias": "model-00002-of-00004.safetensors",
482
+ "visual.blocks.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
483
+ "visual.blocks.11.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
484
+ "visual.blocks.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
485
+ "visual.blocks.11.mlp.up_proj.bias": "model-00003-of-00004.safetensors",
486
+ "visual.blocks.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
487
+ "visual.blocks.11.norm1.weight": "model-00003-of-00004.safetensors",
488
+ "visual.blocks.11.norm2.weight": "model-00003-of-00004.safetensors",
489
+ "visual.blocks.12.attn.proj.bias": "model-00004-of-00004.safetensors",
490
+ "visual.blocks.12.attn.proj.weight": "model-00003-of-00004.safetensors",
491
+ "visual.blocks.12.attn.qkv.bias": "model-00002-of-00004.safetensors",
492
+ "visual.blocks.12.attn.qkv.weight": "model-00003-of-00004.safetensors",
493
+ "visual.blocks.12.mlp.down_proj.bias": "model-00003-of-00004.safetensors",
494
+ "visual.blocks.12.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
495
+ "visual.blocks.12.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
496
+ "visual.blocks.12.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
497
+ "visual.blocks.12.mlp.up_proj.bias": "model-00003-of-00004.safetensors",
498
+ "visual.blocks.12.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
499
+ "visual.blocks.12.norm1.weight": "model-00004-of-00004.safetensors",
500
+ "visual.blocks.12.norm2.weight": "model-00003-of-00004.safetensors",
501
+ "visual.blocks.13.attn.proj.bias": "model-00002-of-00004.safetensors",
502
+ "visual.blocks.13.attn.proj.weight": "model-00002-of-00004.safetensors",
503
+ "visual.blocks.13.attn.qkv.bias": "model-00003-of-00004.safetensors",
504
+ "visual.blocks.13.attn.qkv.weight": "model-00002-of-00004.safetensors",
505
+ "visual.blocks.13.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
506
+ "visual.blocks.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
507
+ "visual.blocks.13.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
508
+ "visual.blocks.13.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
509
+ "visual.blocks.13.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
510
+ "visual.blocks.13.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
511
+ "visual.blocks.13.norm1.weight": "model-00004-of-00004.safetensors",
512
+ "visual.blocks.13.norm2.weight": "model-00003-of-00004.safetensors",
513
+ "visual.blocks.14.attn.proj.bias": "model-00003-of-00004.safetensors",
514
+ "visual.blocks.14.attn.proj.weight": "model-00002-of-00004.safetensors",
515
+ "visual.blocks.14.attn.qkv.bias": "model-00003-of-00004.safetensors",
516
+ "visual.blocks.14.attn.qkv.weight": "model-00003-of-00004.safetensors",
517
+ "visual.blocks.14.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
518
+ "visual.blocks.14.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
519
+ "visual.blocks.14.mlp.gate_proj.bias": "model-00003-of-00004.safetensors",
520
+ "visual.blocks.14.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
521
+ "visual.blocks.14.mlp.up_proj.bias": "model-00002-of-00004.safetensors",
522
+ "visual.blocks.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
523
+ "visual.blocks.14.norm1.weight": "model-00001-of-00004.safetensors",
524
+ "visual.blocks.14.norm2.weight": "model-00002-of-00004.safetensors",
525
+ "visual.blocks.15.attn.proj.bias": "model-00003-of-00004.safetensors",
526
+ "visual.blocks.15.attn.proj.weight": "model-00004-of-00004.safetensors",
527
+ "visual.blocks.15.attn.qkv.bias": "model-00003-of-00004.safetensors",
528
+ "visual.blocks.15.attn.qkv.weight": "model-00002-of-00004.safetensors",
529
+ "visual.blocks.15.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
530
+ "visual.blocks.15.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
531
+ "visual.blocks.15.mlp.gate_proj.bias": "model-00002-of-00004.safetensors",
532
+ "visual.blocks.15.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
533
+ "visual.blocks.15.mlp.up_proj.bias": "model-00002-of-00004.safetensors",
534
+ "visual.blocks.15.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
535
+ "visual.blocks.15.norm1.weight": "model-00002-of-00004.safetensors",
536
+ "visual.blocks.15.norm2.weight": "model-00003-of-00004.safetensors",
537
+ "visual.blocks.16.attn.proj.bias": "model-00003-of-00004.safetensors",
538
+ "visual.blocks.16.attn.proj.weight": "model-00002-of-00004.safetensors",
539
+ "visual.blocks.16.attn.qkv.bias": "model-00001-of-00004.safetensors",
540
+ "visual.blocks.16.attn.qkv.weight": "model-00002-of-00004.safetensors",
541
+ "visual.blocks.16.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
542
+ "visual.blocks.16.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
543
+ "visual.blocks.16.mlp.gate_proj.bias": "model-00003-of-00004.safetensors",
544
+ "visual.blocks.16.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
545
+ "visual.blocks.16.mlp.up_proj.bias": "model-00003-of-00004.safetensors",
546
+ "visual.blocks.16.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
547
+ "visual.blocks.16.norm1.weight": "model-00003-of-00004.safetensors",
548
+ "visual.blocks.16.norm2.weight": "model-00001-of-00004.safetensors",
549
+ "visual.blocks.17.attn.proj.bias": "model-00001-of-00004.safetensors",
550
+ "visual.blocks.17.attn.proj.weight": "model-00002-of-00004.safetensors",
551
+ "visual.blocks.17.attn.qkv.bias": "model-00003-of-00004.safetensors",
552
+ "visual.blocks.17.attn.qkv.weight": "model-00002-of-00004.safetensors",
553
+ "visual.blocks.17.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
554
+ "visual.blocks.17.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
555
+ "visual.blocks.17.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
556
+ "visual.blocks.17.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
557
+ "visual.blocks.17.mlp.up_proj.bias": "model-00002-of-00004.safetensors",
558
+ "visual.blocks.17.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
559
+ "visual.blocks.17.norm1.weight": "model-00003-of-00004.safetensors",
560
+ "visual.blocks.17.norm2.weight": "model-00001-of-00004.safetensors",
561
+ "visual.blocks.18.attn.proj.bias": "model-00001-of-00004.safetensors",
562
+ "visual.blocks.18.attn.proj.weight": "model-00001-of-00004.safetensors",
563
+ "visual.blocks.18.attn.qkv.bias": "model-00003-of-00004.safetensors",
564
+ "visual.blocks.18.attn.qkv.weight": "model-00001-of-00004.safetensors",
565
+ "visual.blocks.18.mlp.down_proj.bias": "model-00003-of-00004.safetensors",
566
+ "visual.blocks.18.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
567
+ "visual.blocks.18.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
568
+ "visual.blocks.18.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
569
+ "visual.blocks.18.mlp.up_proj.bias": "model-00002-of-00004.safetensors",
570
+ "visual.blocks.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
571
+ "visual.blocks.18.norm1.weight": "model-00003-of-00004.safetensors",
572
+ "visual.blocks.18.norm2.weight": "model-00001-of-00004.safetensors",
573
+ "visual.blocks.19.attn.proj.bias": "model-00003-of-00004.safetensors",
574
+ "visual.blocks.19.attn.proj.weight": "model-00002-of-00004.safetensors",
575
+ "visual.blocks.19.attn.qkv.bias": "model-00002-of-00004.safetensors",
576
+ "visual.blocks.19.attn.qkv.weight": "model-00001-of-00004.safetensors",
577
+ "visual.blocks.19.mlp.down_proj.bias": "model-00002-of-00004.safetensors",
578
+ "visual.blocks.19.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
579
+ "visual.blocks.19.mlp.gate_proj.bias": "model-00002-of-00004.safetensors",
580
+ "visual.blocks.19.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
581
+ "visual.blocks.19.mlp.up_proj.bias": "model-00004-of-00004.safetensors",
582
+ "visual.blocks.19.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
583
+ "visual.blocks.19.norm1.weight": "model-00001-of-00004.safetensors",
584
+ "visual.blocks.19.norm2.weight": "model-00003-of-00004.safetensors",
585
+ "visual.blocks.2.attn.proj.bias": "model-00003-of-00004.safetensors",
586
+ "visual.blocks.2.attn.proj.weight": "model-00003-of-00004.safetensors",
587
+ "visual.blocks.2.attn.qkv.bias": "model-00003-of-00004.safetensors",
588
+ "visual.blocks.2.attn.qkv.weight": "model-00001-of-00004.safetensors",
589
+ "visual.blocks.2.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
590
+ "visual.blocks.2.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
591
+ "visual.blocks.2.mlp.gate_proj.bias": "model-00002-of-00004.safetensors",
592
+ "visual.blocks.2.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
593
+ "visual.blocks.2.mlp.up_proj.bias": "model-00002-of-00004.safetensors",
594
+ "visual.blocks.2.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
595
+ "visual.blocks.2.norm1.weight": "model-00001-of-00004.safetensors",
596
+ "visual.blocks.2.norm2.weight": "model-00001-of-00004.safetensors",
597
+ "visual.blocks.20.attn.proj.bias": "model-00004-of-00004.safetensors",
598
+ "visual.blocks.20.attn.proj.weight": "model-00002-of-00004.safetensors",
599
+ "visual.blocks.20.attn.qkv.bias": "model-00001-of-00004.safetensors",
600
+ "visual.blocks.20.attn.qkv.weight": "model-00001-of-00004.safetensors",
601
+ "visual.blocks.20.mlp.down_proj.bias": "model-00002-of-00004.safetensors",
602
+ "visual.blocks.20.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
603
+ "visual.blocks.20.mlp.gate_proj.bias": "model-00002-of-00004.safetensors",
604
+ "visual.blocks.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
605
+ "visual.blocks.20.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
606
+ "visual.blocks.20.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
607
+ "visual.blocks.20.norm1.weight": "model-00002-of-00004.safetensors",
608
+ "visual.blocks.20.norm2.weight": "model-00004-of-00004.safetensors",
609
+ "visual.blocks.21.attn.proj.bias": "model-00002-of-00004.safetensors",
610
+ "visual.blocks.21.attn.proj.weight": "model-00001-of-00004.safetensors",
611
+ "visual.blocks.21.attn.qkv.bias": "model-00003-of-00004.safetensors",
612
+ "visual.blocks.21.attn.qkv.weight": "model-00001-of-00004.safetensors",
613
+ "visual.blocks.21.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
614
+ "visual.blocks.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
615
+ "visual.blocks.21.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
616
+ "visual.blocks.21.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
617
+ "visual.blocks.21.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
618
+ "visual.blocks.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
619
+ "visual.blocks.21.norm1.weight": "model-00002-of-00004.safetensors",
620
+ "visual.blocks.21.norm2.weight": "model-00003-of-00004.safetensors",
621
+ "visual.blocks.22.attn.proj.bias": "model-00004-of-00004.safetensors",
622
+ "visual.blocks.22.attn.proj.weight": "model-00001-of-00004.safetensors",
623
+ "visual.blocks.22.attn.qkv.bias": "model-00002-of-00004.safetensors",
624
+ "visual.blocks.22.attn.qkv.weight": "model-00003-of-00004.safetensors",
625
+ "visual.blocks.22.mlp.down_proj.bias": "model-00002-of-00004.safetensors",
626
+ "visual.blocks.22.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
627
+ "visual.blocks.22.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
628
+ "visual.blocks.22.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
629
+ "visual.blocks.22.mlp.up_proj.bias": "model-00002-of-00004.safetensors",
630
+ "visual.blocks.22.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
631
+ "visual.blocks.22.norm1.weight": "model-00003-of-00004.safetensors",
632
+ "visual.blocks.22.norm2.weight": "model-00003-of-00004.safetensors",
633
+ "visual.blocks.23.attn.proj.bias": "model-00001-of-00004.safetensors",
634
+ "visual.blocks.23.attn.proj.weight": "model-00002-of-00004.safetensors",
635
+ "visual.blocks.23.attn.qkv.bias": "model-00004-of-00004.safetensors",
636
+ "visual.blocks.23.attn.qkv.weight": "model-00001-of-00004.safetensors",
637
+ "visual.blocks.23.mlp.down_proj.bias": "model-00002-of-00004.safetensors",
638
+ "visual.blocks.23.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
639
+ "visual.blocks.23.mlp.gate_proj.bias": "model-00002-of-00004.safetensors",
640
+ "visual.blocks.23.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
641
+ "visual.blocks.23.mlp.up_proj.bias": "model-00003-of-00004.safetensors",
642
+ "visual.blocks.23.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
643
+ "visual.blocks.23.norm1.weight": "model-00004-of-00004.safetensors",
644
+ "visual.blocks.23.norm2.weight": "model-00003-of-00004.safetensors",
645
+ "visual.blocks.24.attn.proj.bias": "model-00002-of-00004.safetensors",
646
+ "visual.blocks.24.attn.proj.weight": "model-00001-of-00004.safetensors",
647
+ "visual.blocks.24.attn.qkv.bias": "model-00004-of-00004.safetensors",
648
+ "visual.blocks.24.attn.qkv.weight": "model-00001-of-00004.safetensors",
649
+ "visual.blocks.24.mlp.down_proj.bias": "model-00003-of-00004.safetensors",
650
+ "visual.blocks.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
651
+ "visual.blocks.24.mlp.gate_proj.bias": "model-00003-of-00004.safetensors",
652
+ "visual.blocks.24.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
653
+ "visual.blocks.24.mlp.up_proj.bias": "model-00002-of-00004.safetensors",
654
+ "visual.blocks.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
655
+ "visual.blocks.24.norm1.weight": "model-00003-of-00004.safetensors",
656
+ "visual.blocks.24.norm2.weight": "model-00004-of-00004.safetensors",
657
+ "visual.blocks.25.attn.proj.bias": "model-00001-of-00004.safetensors",
658
+ "visual.blocks.25.attn.proj.weight": "model-00004-of-00004.safetensors",
659
+ "visual.blocks.25.attn.qkv.bias": "model-00003-of-00004.safetensors",
660
+ "visual.blocks.25.attn.qkv.weight": "model-00001-of-00004.safetensors",
661
+ "visual.blocks.25.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
662
+ "visual.blocks.25.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
663
+ "visual.blocks.25.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
664
+ "visual.blocks.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
665
+ "visual.blocks.25.mlp.up_proj.bias": "model-00002-of-00004.safetensors",
666
+ "visual.blocks.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
667
+ "visual.blocks.25.norm1.weight": "model-00002-of-00004.safetensors",
668
+ "visual.blocks.25.norm2.weight": "model-00001-of-00004.safetensors",
669
+ "visual.blocks.26.attn.proj.bias": "model-00002-of-00004.safetensors",
670
+ "visual.blocks.26.attn.proj.weight": "model-00003-of-00004.safetensors",
671
+ "visual.blocks.26.attn.qkv.bias": "model-00001-of-00004.safetensors",
672
+ "visual.blocks.26.attn.qkv.weight": "model-00002-of-00004.safetensors",
673
+ "visual.blocks.26.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
674
+ "visual.blocks.26.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
675
+ "visual.blocks.26.mlp.gate_proj.bias": "model-00003-of-00004.safetensors",
676
+ "visual.blocks.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
677
+ "visual.blocks.26.mlp.up_proj.bias": "model-00002-of-00004.safetensors",
678
+ "visual.blocks.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
679
+ "visual.blocks.26.norm1.weight": "model-00004-of-00004.safetensors",
680
+ "visual.blocks.26.norm2.weight": "model-00003-of-00004.safetensors",
681
+ "visual.blocks.27.attn.proj.bias": "model-00001-of-00004.safetensors",
682
+ "visual.blocks.27.attn.proj.weight": "model-00001-of-00004.safetensors",
683
+ "visual.blocks.27.attn.qkv.bias": "model-00001-of-00004.safetensors",
684
+ "visual.blocks.27.attn.qkv.weight": "model-00001-of-00004.safetensors",
685
+ "visual.blocks.27.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
686
+ "visual.blocks.27.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
687
+ "visual.blocks.27.mlp.gate_proj.bias": "model-00002-of-00004.safetensors",
688
+ "visual.blocks.27.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
689
+ "visual.blocks.27.mlp.up_proj.bias": "model-00003-of-00004.safetensors",
690
+ "visual.blocks.27.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
691
+ "visual.blocks.27.norm1.weight": "model-00002-of-00004.safetensors",
692
+ "visual.blocks.27.norm2.weight": "model-00002-of-00004.safetensors",
693
+ "visual.blocks.28.attn.proj.bias": "model-00002-of-00004.safetensors",
694
+ "visual.blocks.28.attn.proj.weight": "model-00004-of-00004.safetensors",
695
+ "visual.blocks.28.attn.qkv.bias": "model-00003-of-00004.safetensors",
696
+ "visual.blocks.28.attn.qkv.weight": "model-00004-of-00004.safetensors",
697
+ "visual.blocks.28.mlp.down_proj.bias": "model-00004-of-00004.safetensors",
698
+ "visual.blocks.28.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
699
+ "visual.blocks.28.mlp.gate_proj.bias": "model-00003-of-00004.safetensors",
700
+ "visual.blocks.28.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
701
+ "visual.blocks.28.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
702
+ "visual.blocks.28.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
703
+ "visual.blocks.28.norm1.weight": "model-00002-of-00004.safetensors",
704
+ "visual.blocks.28.norm2.weight": "model-00003-of-00004.safetensors",
705
+ "visual.blocks.29.attn.proj.bias": "model-00004-of-00004.safetensors",
706
+ "visual.blocks.29.attn.proj.weight": "model-00001-of-00004.safetensors",
707
+ "visual.blocks.29.attn.qkv.bias": "model-00001-of-00004.safetensors",
708
+ "visual.blocks.29.attn.qkv.weight": "model-00001-of-00004.safetensors",
709
+ "visual.blocks.29.mlp.down_proj.bias": "model-00003-of-00004.safetensors",
710
+ "visual.blocks.29.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
711
+ "visual.blocks.29.mlp.gate_proj.bias": "model-00002-of-00004.safetensors",
712
+ "visual.blocks.29.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
713
+ "visual.blocks.29.mlp.up_proj.bias": "model-00002-of-00004.safetensors",
714
+ "visual.blocks.29.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
715
+ "visual.blocks.29.norm1.weight": "model-00003-of-00004.safetensors",
716
+ "visual.blocks.29.norm2.weight": "model-00001-of-00004.safetensors",
717
+ "visual.blocks.3.attn.proj.bias": "model-00002-of-00004.safetensors",
718
+ "visual.blocks.3.attn.proj.weight": "model-00001-of-00004.safetensors",
719
+ "visual.blocks.3.attn.qkv.bias": "model-00003-of-00004.safetensors",
720
+ "visual.blocks.3.attn.qkv.weight": "model-00003-of-00004.safetensors",
721
+ "visual.blocks.3.mlp.down_proj.bias": "model-00003-of-00004.safetensors",
722
+ "visual.blocks.3.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
723
+ "visual.blocks.3.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
724
+ "visual.blocks.3.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
725
+ "visual.blocks.3.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
726
+ "visual.blocks.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
727
+ "visual.blocks.3.norm1.weight": "model-00003-of-00004.safetensors",
728
+ "visual.blocks.3.norm2.weight": "model-00002-of-00004.safetensors",
729
+ "visual.blocks.30.attn.proj.bias": "model-00001-of-00004.safetensors",
730
+ "visual.blocks.30.attn.proj.weight": "model-00003-of-00004.safetensors",
731
+ "visual.blocks.30.attn.qkv.bias": "model-00002-of-00004.safetensors",
732
+ "visual.blocks.30.attn.qkv.weight": "model-00004-of-00004.safetensors",
733
+ "visual.blocks.30.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
734
+ "visual.blocks.30.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
735
+ "visual.blocks.30.mlp.gate_proj.bias": "model-00004-of-00004.safetensors",
736
+ "visual.blocks.30.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
737
+ "visual.blocks.30.mlp.up_proj.bias": "model-00003-of-00004.safetensors",
738
+ "visual.blocks.30.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
739
+ "visual.blocks.30.norm1.weight": "model-00002-of-00004.safetensors",
740
+ "visual.blocks.30.norm2.weight": "model-00002-of-00004.safetensors",
741
+ "visual.blocks.31.attn.proj.bias": "model-00003-of-00004.safetensors",
742
+ "visual.blocks.31.attn.proj.weight": "model-00003-of-00004.safetensors",
743
+ "visual.blocks.31.attn.qkv.bias": "model-00004-of-00004.safetensors",
744
+ "visual.blocks.31.attn.qkv.weight": "model-00001-of-00004.safetensors",
745
+ "visual.blocks.31.mlp.down_proj.bias": "model-00003-of-00004.safetensors",
746
+ "visual.blocks.31.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
747
+ "visual.blocks.31.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
748
+ "visual.blocks.31.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
749
+ "visual.blocks.31.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
750
+ "visual.blocks.31.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
751
+ "visual.blocks.31.norm1.weight": "model-00002-of-00004.safetensors",
752
+ "visual.blocks.31.norm2.weight": "model-00002-of-00004.safetensors",
753
+ "visual.blocks.4.attn.proj.bias": "model-00003-of-00004.safetensors",
754
+ "visual.blocks.4.attn.proj.weight": "model-00004-of-00004.safetensors",
755
+ "visual.blocks.4.attn.qkv.bias": "model-00002-of-00004.safetensors",
756
+ "visual.blocks.4.attn.qkv.weight": "model-00003-of-00004.safetensors",
757
+ "visual.blocks.4.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
758
+ "visual.blocks.4.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
759
+ "visual.blocks.4.mlp.gate_proj.bias": "model-00002-of-00004.safetensors",
760
+ "visual.blocks.4.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
761
+ "visual.blocks.4.mlp.up_proj.bias": "model-00003-of-00004.safetensors",
762
+ "visual.blocks.4.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
763
+ "visual.blocks.4.norm1.weight": "model-00004-of-00004.safetensors",
764
+ "visual.blocks.4.norm2.weight": "model-00001-of-00004.safetensors",
765
+ "visual.blocks.5.attn.proj.bias": "model-00003-of-00004.safetensors",
766
+ "visual.blocks.5.attn.proj.weight": "model-00003-of-00004.safetensors",
767
+ "visual.blocks.5.attn.qkv.bias": "model-00003-of-00004.safetensors",
768
+ "visual.blocks.5.attn.qkv.weight": "model-00003-of-00004.safetensors",
769
+ "visual.blocks.5.mlp.down_proj.bias": "model-00002-of-00004.safetensors",
770
+ "visual.blocks.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
771
+ "visual.blocks.5.mlp.gate_proj.bias": "model-00003-of-00004.safetensors",
772
+ "visual.blocks.5.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
773
+ "visual.blocks.5.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
774
+ "visual.blocks.5.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
775
+ "visual.blocks.5.norm1.weight": "model-00001-of-00004.safetensors",
776
+ "visual.blocks.5.norm2.weight": "model-00003-of-00004.safetensors",
777
+ "visual.blocks.6.attn.proj.bias": "model-00001-of-00004.safetensors",
778
+ "visual.blocks.6.attn.proj.weight": "model-00003-of-00004.safetensors",
779
+ "visual.blocks.6.attn.qkv.bias": "model-00002-of-00004.safetensors",
780
+ "visual.blocks.6.attn.qkv.weight": "model-00003-of-00004.safetensors",
781
+ "visual.blocks.6.mlp.down_proj.bias": "model-00003-of-00004.safetensors",
782
+ "visual.blocks.6.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
783
+ "visual.blocks.6.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
784
+ "visual.blocks.6.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
785
+ "visual.blocks.6.mlp.up_proj.bias": "model-00003-of-00004.safetensors",
786
+ "visual.blocks.6.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
787
+ "visual.blocks.6.norm1.weight": "model-00002-of-00004.safetensors",
788
+ "visual.blocks.6.norm2.weight": "model-00004-of-00004.safetensors",
789
+ "visual.blocks.7.attn.proj.bias": "model-00001-of-00004.safetensors",
790
+ "visual.blocks.7.attn.proj.weight": "model-00003-of-00004.safetensors",
791
+ "visual.blocks.7.attn.qkv.bias": "model-00003-of-00004.safetensors",
792
+ "visual.blocks.7.attn.qkv.weight": "model-00001-of-00004.safetensors",
793
+ "visual.blocks.7.mlp.down_proj.bias": "model-00002-of-00004.safetensors",
794
+ "visual.blocks.7.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
795
+ "visual.blocks.7.mlp.gate_proj.bias": "model-00003-of-00004.safetensors",
796
+ "visual.blocks.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
797
+ "visual.blocks.7.mlp.up_proj.bias": "model-00003-of-00004.safetensors",
798
+ "visual.blocks.7.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
799
+ "visual.blocks.7.norm1.weight": "model-00001-of-00004.safetensors",
800
+ "visual.blocks.7.norm2.weight": "model-00004-of-00004.safetensors",
801
+ "visual.blocks.8.attn.proj.bias": "model-00001-of-00004.safetensors",
802
+ "visual.blocks.8.attn.proj.weight": "model-00001-of-00004.safetensors",
803
+ "visual.blocks.8.attn.qkv.bias": "model-00004-of-00004.safetensors",
804
+ "visual.blocks.8.attn.qkv.weight": "model-00003-of-00004.safetensors",
805
+ "visual.blocks.8.mlp.down_proj.bias": "model-00003-of-00004.safetensors",
806
+ "visual.blocks.8.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
807
+ "visual.blocks.8.mlp.gate_proj.bias": "model-00003-of-00004.safetensors",
808
+ "visual.blocks.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
809
+ "visual.blocks.8.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
810
+ "visual.blocks.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
811
+ "visual.blocks.8.norm1.weight": "model-00001-of-00004.safetensors",
812
+ "visual.blocks.8.norm2.weight": "model-00002-of-00004.safetensors",
813
+ "visual.blocks.9.attn.proj.bias": "model-00002-of-00004.safetensors",
814
+ "visual.blocks.9.attn.proj.weight": "model-00002-of-00004.safetensors",
815
+ "visual.blocks.9.attn.qkv.bias": "model-00004-of-00004.safetensors",
816
+ "visual.blocks.9.attn.qkv.weight": "model-00003-of-00004.safetensors",
817
+ "visual.blocks.9.mlp.down_proj.bias": "model-00002-of-00004.safetensors",
818
+ "visual.blocks.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
819
+ "visual.blocks.9.mlp.gate_proj.bias": "model-00004-of-00004.safetensors",
820
+ "visual.blocks.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
821
+ "visual.blocks.9.mlp.up_proj.bias": "model-00002-of-00004.safetensors",
822
+ "visual.blocks.9.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
823
+ "visual.blocks.9.norm1.weight": "model-00001-of-00004.safetensors",
824
+ "visual.blocks.9.norm2.weight": "model-00003-of-00004.safetensors",
825
+ "visual.merger.ln_q.weight": "model-00004-of-00004.safetensors",
826
+ "visual.merger.mlp.0.bias": "model-00002-of-00004.safetensors",
827
+ "visual.merger.mlp.0.weight": "model-00003-of-00004.safetensors",
828
+ "visual.merger.mlp.2.bias": "model-00004-of-00004.safetensors",
829
+ "visual.merger.mlp.2.weight": "model-00001-of-00004.safetensors",
830
+ "visual.patch_embed.proj.weight": "model-00004-of-00004.safetensors"
831
+ }
832
+ }
text_encoder/preprocessor_config.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "crop_size": null,
3
+ "data_format": "channels_first",
4
+ "default_to_square": true,
5
+ "device": null,
6
+ "disable_grouping": null,
7
+ "do_center_crop": null,
8
+ "do_convert_rgb": true,
9
+ "do_normalize": true,
10
+ "do_rescale": true,
11
+ "do_resize": true,
12
+ "image_mean": [
13
+ 0.48145466,
14
+ 0.4578275,
15
+ 0.40821073
16
+ ],
17
+ "image_processor_type": "Qwen2VLImageProcessorFast",
18
+ "image_std": [
19
+ 0.26862954,
20
+ 0.26130258,
21
+ 0.27577711
22
+ ],
23
+ "input_data_format": null,
24
+ "max_pixels": 12845056,
25
+ "merge_size": 2,
26
+ "min_pixels": 3136,
27
+ "patch_size": 14,
28
+ "processor_class": "Qwen2_5_VLProcessor",
29
+ "resample": 3,
30
+ "rescale_factor": 0.00392156862745098,
31
+ "return_tensors": null,
32
+ "size": {
33
+ "longest_edge": 12845056,
34
+ "shortest_edge": 3136
35
+ },
36
+ "temporal_patch_size": 2
37
+ }
text_encoder/special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
text_encoder/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e9ff5e369d57eba6483d24fda0078f2f6f59f718cf93352ce17bd48c1dbbdc0
3
+ size 11421995
text_encoder/tokenizer_config.json ADDED
@@ -0,0 +1,215 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "clean_up_tokenization_spaces": false,
199
+ "eos_token": "<|im_end|>",
200
+ "errors": "replace",
201
+ "extra_special_tokens": {},
202
+ "max_length": 5120,
203
+ "model_max_length": 131072,
204
+ "pad_to_multiple_of": null,
205
+ "pad_token": "<|endoftext|>",
206
+ "pad_token_type_id": 0,
207
+ "padding_side": "right",
208
+ "processor_class": "Qwen2_5_VLProcessor",
209
+ "split_special_tokens": false,
210
+ "stride": 0,
211
+ "tokenizer_class": "Qwen2Tokenizer",
212
+ "truncation_side": "right",
213
+ "truncation_strategy": "longest_first",
214
+ "unk_token": null
215
+ }
text_encoder/video_preprocessor_config.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "crop_size": null,
3
+ "data_format": "channels_first",
4
+ "default_to_square": true,
5
+ "device": null,
6
+ "do_center_crop": null,
7
+ "do_convert_rgb": true,
8
+ "do_normalize": true,
9
+ "do_pad": null,
10
+ "do_rescale": true,
11
+ "do_resize": true,
12
+ "do_sample_frames": false,
13
+ "fps": null,
14
+ "image_mean": [
15
+ 0.48145466,
16
+ 0.4578275,
17
+ 0.40821073
18
+ ],
19
+ "image_std": [
20
+ 0.26862954,
21
+ 0.26130258,
22
+ 0.27577711
23
+ ],
24
+ "input_data_format": null,
25
+ "max_frames": 768,
26
+ "max_pixels": 12845056,
27
+ "merge_size": 2,
28
+ "min_frames": 4,
29
+ "min_pixels": 3136,
30
+ "num_frames": null,
31
+ "patch_size": 14,
32
+ "processor_class": "Qwen2_5_VLProcessor",
33
+ "resample": 3,
34
+ "rescale_factor": 0.00392156862745098,
35
+ "size": {
36
+ "longest_edge": 12845056,
37
+ "shortest_edge": 3136
38
+ },
39
+ "size_divisor": null,
40
+ "temporal_patch_size": 2,
41
+ "video_metadata": null,
42
+ "video_processor_type": "Qwen2VLVideoProcessor"
43
+ }
text_encoder/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer/added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
tokenizer/chat_template.jinja ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- if tools %}
2
+ {{- '<|im_start|>system\n' }}
3
+ {%- if messages[0]['role'] == 'system' %}
4
+ {{- messages[0]['content'] }}
5
+ {%- else %}
6
+ {{- 'You are a helpful assistant.' }}
7
+ {%- endif %}
8
+ {{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
9
+ {%- for tool in tools %}
10
+ {{- "\n" }}
11
+ {{- tool | tojson }}
12
+ {%- endfor %}
13
+ {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
14
+ {%- else %}
15
+ {%- if messages[0]['role'] == 'system' %}
16
+ {{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}
17
+ {%- else %}
18
+ {{- '<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n' }}
19
+ {%- endif %}
20
+ {%- endif %}
21
+ {%- for message in messages %}
22
+ {%- if (message.role == "user") or (message.role == "system" and not loop.first) or (message.role == "assistant" and not message.tool_calls) %}
23
+ {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}
24
+ {%- elif message.role == "assistant" %}
25
+ {{- '<|im_start|>' + message.role }}
26
+ {%- if message.content %}
27
+ {{- '\n' + message.content }}
28
+ {%- endif %}
29
+ {%- for tool_call in message.tool_calls %}
30
+ {%- if tool_call.function is defined %}
31
+ {%- set tool_call = tool_call.function %}
32
+ {%- endif %}
33
+ {{- '\n<tool_call>\n{"name": "' }}
34
+ {{- tool_call.name }}
35
+ {{- '", "arguments": ' }}
36
+ {{- tool_call.arguments | tojson }}
37
+ {{- '}\n</tool_call>' }}
38
+ {%- endfor %}
39
+ {{- '<|im_end|>\n' }}
40
+ {%- elif message.role == "tool" %}
41
+ {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %}
42
+ {{- '<|im_start|>user' }}
43
+ {%- endif %}
44
+ {{- '\n<tool_response>\n' }}
45
+ {{- message.content }}
46
+ {{- '\n</tool_response>' }}
47
+ {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
48
+ {{- '<|im_end|>\n' }}
49
+ {%- endif %}
50
+ {%- endif %}
51
+ {%- endfor %}
52
+ {%- if add_generation_prompt %}
53
+ {{- '<|im_start|>assistant\n' }}
54
+ {%- endif %}
tokenizer/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer/special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer/tokenizer_config.json ADDED
@@ -0,0 +1,207 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "clean_up_tokenization_spaces": false,
199
+ "eos_token": "<|im_end|>",
200
+ "errors": "replace",
201
+ "extra_special_tokens": {},
202
+ "model_max_length": 131072,
203
+ "pad_token": "<|endoftext|>",
204
+ "split_special_tokens": false,
205
+ "tokenizer_class": "Qwen2Tokenizer",
206
+ "unk_token": null
207
+ }
tokenizer/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
transformer/config.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "QwenImageTransformer2DModel",
3
+ "_diffusers_version": "0.34.0.dev0",
4
+ "attention_head_dim": 128,
5
+ "axes_dims_rope": [
6
+ 16,
7
+ 56,
8
+ 56
9
+ ],
10
+ "guidance_embeds": false,
11
+ "in_channels": 64,
12
+ "joint_attention_dim": 3584,
13
+ "num_attention_heads": 24,
14
+ "num_layers": 60,
15
+ "out_channels": 16,
16
+ "patch_size": 2,
17
+ "pooled_projection_dim": 768
18
+ }
transformer/diffusion_pytorch_model-00001-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f33a59093af3abcc2836d4cf4b7bd122c238ca70a26c70f34fdde64646b3bcd
3
+ size 4989364312
transformer/diffusion_pytorch_model-00002-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:49d580e85b932e15c226e68a9d5549629878d3a128da7662d28401956cee0238
3
+ size 4984214160
transformer/diffusion_pytorch_model-00003-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d72f0cbd0d6c2bdc2e1e73e41ac143452fc9e4570d1362203498832302de69f
3
+ size 4946470000
transformer/diffusion_pytorch_model-00004-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee87991bfa1bc8e7553034b07099151532524101ca1581c4f3a86292814e9f0d
3
+ size 4984213736
transformer/diffusion_pytorch_model-00005-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83291a65be26fe688db4cf81383749831b623a6fcb1a71c851bafb55ddc0058d
3
+ size 4946471896
transformer/diffusion_pytorch_model-00006-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61e936a995e7f4ce08d44b9d6ffb56dc65dd61200288876ee2fda72da322583d
3
+ size 4946451560
transformer/diffusion_pytorch_model-00007-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:854bc9e4a65c11c256f2fef31606a6407e9a425e8bde394db8be71416c515ead
3
+ size 4908690520
transformer/diffusion_pytorch_model-00008-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:caedc7cc2914ab113cfbb3684cf072350201182ae8de1a7308e419385987ae40
3
+ size 4984232856
transformer/diffusion_pytorch_model-00009-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae0fef149890f61d54572baa1e806de97156c7ec5b7bbbbbbf27a2d87f33af61
3
+ size 1170918840
transformer/diffusion_pytorch_model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
vae/config.json ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "AutoencoderKLQwenImage",
3
+ "_diffusers_version": "0.34.0.dev0",
4
+ "attn_scales": [],
5
+ "base_dim": 96,
6
+ "dim_mult": [
7
+ 1,
8
+ 2,
9
+ 4,
10
+ 4
11
+ ],
12
+ "dropout": 0.0,
13
+ "latents_mean": [
14
+ -0.7571,
15
+ -0.7089,
16
+ -0.9113,
17
+ 0.1075,
18
+ -0.1745,
19
+ 0.9653,
20
+ -0.1517,
21
+ 1.5508,
22
+ 0.4134,
23
+ -0.0715,
24
+ 0.5517,
25
+ -0.3632,
26
+ -0.1922,
27
+ -0.9497,
28
+ 0.2503,
29
+ -0.2921
30
+ ],
31
+ "latents_std": [
32
+ 2.8184,
33
+ 1.4541,
34
+ 2.3275,
35
+ 2.6558,
36
+ 1.2196,
37
+ 1.7708,
38
+ 2.6052,
39
+ 2.0743,
40
+ 3.2687,
41
+ 2.1526,
42
+ 2.8652,
43
+ 1.5579,
44
+ 1.6382,
45
+ 1.1253,
46
+ 2.8251,
47
+ 1.916
48
+ ],
49
+ "num_res_blocks": 2,
50
+ "temperal_downsample": [
51
+ false,
52
+ true,
53
+ true
54
+ ],
55
+ "z_dim": 16
56
+ }
vae/diffusion_pytorch_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c8bc8b758c649abef9ea407b95408389a3b2f610d0d10fcb054fe171d0a8344
3
+ size 253806966