Duplicate from Qwen/Qwen2.5-Coder-32B-Instruct
Browse filesCo-authored-by: Yang Fan <clonefy@users.noreply.huggingface.co>
- .gitattributes +35 -0
- LICENSE +202 -0
- README.md +135 -0
- config.json +27 -0
- generation_config.json +14 -0
- merges.txt +0 -0
- model-00001-of-00014.safetensors +3 -0
- model-00002-of-00014.safetensors +3 -0
- model-00003-of-00014.safetensors +3 -0
- model-00004-of-00014.safetensors +3 -0
- model-00005-of-00014.safetensors +3 -0
- model-00006-of-00014.safetensors +3 -0
- model-00007-of-00014.safetensors +3 -0
- model-00008-of-00014.safetensors +3 -0
- model-00009-of-00014.safetensors +3 -0
- model-00010-of-00014.safetensors +3 -0
- model-00011-of-00014.safetensors +3 -0
- model-00012-of-00014.safetensors +3 -0
- model-00013-of-00014.safetensors +3 -0
- model-00014-of-00014.safetensors +3 -0
- model.safetensors.index.json +778 -0
- tokenizer.json +0 -0
- tokenizer_config.json +207 -0
- vocab.json +0 -0
    	
        .gitattributes
    ADDED
    
    | @@ -0,0 +1,35 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            *.7z filter=lfs diff=lfs merge=lfs -text
         | 
| 2 | 
            +
            *.arrow filter=lfs diff=lfs merge=lfs -text
         | 
| 3 | 
            +
            *.bin filter=lfs diff=lfs merge=lfs -text
         | 
| 4 | 
            +
            *.bz2 filter=lfs diff=lfs merge=lfs -text
         | 
| 5 | 
            +
            *.ckpt filter=lfs diff=lfs merge=lfs -text
         | 
| 6 | 
            +
            *.ftz filter=lfs diff=lfs merge=lfs -text
         | 
| 7 | 
            +
            *.gz filter=lfs diff=lfs merge=lfs -text
         | 
| 8 | 
            +
            *.h5 filter=lfs diff=lfs merge=lfs -text
         | 
| 9 | 
            +
            *.joblib filter=lfs diff=lfs merge=lfs -text
         | 
| 10 | 
            +
            *.lfs.* filter=lfs diff=lfs merge=lfs -text
         | 
| 11 | 
            +
            *.mlmodel filter=lfs diff=lfs merge=lfs -text
         | 
| 12 | 
            +
            *.model filter=lfs diff=lfs merge=lfs -text
         | 
| 13 | 
            +
            *.msgpack filter=lfs diff=lfs merge=lfs -text
         | 
| 14 | 
            +
            *.npy filter=lfs diff=lfs merge=lfs -text
         | 
| 15 | 
            +
            *.npz filter=lfs diff=lfs merge=lfs -text
         | 
| 16 | 
            +
            *.onnx filter=lfs diff=lfs merge=lfs -text
         | 
| 17 | 
            +
            *.ot filter=lfs diff=lfs merge=lfs -text
         | 
| 18 | 
            +
            *.parquet filter=lfs diff=lfs merge=lfs -text
         | 
| 19 | 
            +
            *.pb filter=lfs diff=lfs merge=lfs -text
         | 
| 20 | 
            +
            *.pickle filter=lfs diff=lfs merge=lfs -text
         | 
| 21 | 
            +
            *.pkl filter=lfs diff=lfs merge=lfs -text
         | 
| 22 | 
            +
            *.pt filter=lfs diff=lfs merge=lfs -text
         | 
| 23 | 
            +
            *.pth filter=lfs diff=lfs merge=lfs -text
         | 
| 24 | 
            +
            *.rar filter=lfs diff=lfs merge=lfs -text
         | 
| 25 | 
            +
            *.safetensors filter=lfs diff=lfs merge=lfs -text
         | 
| 26 | 
            +
            saved_model/**/* filter=lfs diff=lfs merge=lfs -text
         | 
| 27 | 
            +
            *.tar.* filter=lfs diff=lfs merge=lfs -text
         | 
| 28 | 
            +
            *.tar filter=lfs diff=lfs merge=lfs -text
         | 
| 29 | 
            +
            *.tflite filter=lfs diff=lfs merge=lfs -text
         | 
| 30 | 
            +
            *.tgz filter=lfs diff=lfs merge=lfs -text
         | 
| 31 | 
            +
            *.wasm filter=lfs diff=lfs merge=lfs -text
         | 
| 32 | 
            +
            *.xz filter=lfs diff=lfs merge=lfs -text
         | 
| 33 | 
            +
            *.zip filter=lfs diff=lfs merge=lfs -text
         | 
| 34 | 
            +
            *.zst filter=lfs diff=lfs merge=lfs -text
         | 
| 35 | 
            +
            *tfevents* filter=lfs diff=lfs merge=lfs -text
         | 
    	
        LICENSE
    ADDED
    
    | @@ -0,0 +1,202 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
             | 
| 2 | 
            +
                                             Apache License
         | 
| 3 | 
            +
                                       Version 2.0, January 2004
         | 
| 4 | 
            +
                                    http://www.apache.org/licenses/
         | 
| 5 | 
            +
             | 
| 6 | 
            +
               TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
         | 
| 7 | 
            +
             | 
| 8 | 
            +
               1. Definitions.
         | 
| 9 | 
            +
             | 
| 10 | 
            +
                  "License" shall mean the terms and conditions for use, reproduction,
         | 
| 11 | 
            +
                  and distribution as defined by Sections 1 through 9 of this document.
         | 
| 12 | 
            +
             | 
| 13 | 
            +
                  "Licensor" shall mean the copyright owner or entity authorized by
         | 
| 14 | 
            +
                  the copyright owner that is granting the License.
         | 
| 15 | 
            +
             | 
| 16 | 
            +
                  "Legal Entity" shall mean the union of the acting entity and all
         | 
| 17 | 
            +
                  other entities that control, are controlled by, or are under common
         | 
| 18 | 
            +
                  control with that entity. For the purposes of this definition,
         | 
| 19 | 
            +
                  "control" means (i) the power, direct or indirect, to cause the
         | 
| 20 | 
            +
                  direction or management of such entity, whether by contract or
         | 
| 21 | 
            +
                  otherwise, or (ii) ownership of fifty percent (50%) or more of the
         | 
| 22 | 
            +
                  outstanding shares, or (iii) beneficial ownership of such entity.
         | 
| 23 | 
            +
             | 
| 24 | 
            +
                  "You" (or "Your") shall mean an individual or Legal Entity
         | 
| 25 | 
            +
                  exercising permissions granted by this License.
         | 
| 26 | 
            +
             | 
| 27 | 
            +
                  "Source" form shall mean the preferred form for making modifications,
         | 
| 28 | 
            +
                  including but not limited to software source code, documentation
         | 
| 29 | 
            +
                  source, and configuration files.
         | 
| 30 | 
            +
             | 
| 31 | 
            +
                  "Object" form shall mean any form resulting from mechanical
         | 
| 32 | 
            +
                  transformation or translation of a Source form, including but
         | 
| 33 | 
            +
                  not limited to compiled object code, generated documentation,
         | 
| 34 | 
            +
                  and conversions to other media types.
         | 
| 35 | 
            +
             | 
| 36 | 
            +
                  "Work" shall mean the work of authorship, whether in Source or
         | 
| 37 | 
            +
                  Object form, made available under the License, as indicated by a
         | 
| 38 | 
            +
                  copyright notice that is included in or attached to the work
         | 
| 39 | 
            +
                  (an example is provided in the Appendix below).
         | 
| 40 | 
            +
             | 
| 41 | 
            +
                  "Derivative Works" shall mean any work, whether in Source or Object
         | 
| 42 | 
            +
                  form, that is based on (or derived from) the Work and for which the
         | 
| 43 | 
            +
                  editorial revisions, annotations, elaborations, or other modifications
         | 
| 44 | 
            +
                  represent, as a whole, an original work of authorship. For the purposes
         | 
| 45 | 
            +
                  of this License, Derivative Works shall not include works that remain
         | 
| 46 | 
            +
                  separable from, or merely link (or bind by name) to the interfaces of,
         | 
| 47 | 
            +
                  the Work and Derivative Works thereof.
         | 
| 48 | 
            +
             | 
| 49 | 
            +
                  "Contribution" shall mean any work of authorship, including
         | 
| 50 | 
            +
                  the original version of the Work and any modifications or additions
         | 
| 51 | 
            +
                  to that Work or Derivative Works thereof, that is intentionally
         | 
| 52 | 
            +
                  submitted to Licensor for inclusion in the Work by the copyright owner
         | 
| 53 | 
            +
                  or by an individual or Legal Entity authorized to submit on behalf of
         | 
| 54 | 
            +
                  the copyright owner. For the purposes of this definition, "submitted"
         | 
| 55 | 
            +
                  means any form of electronic, verbal, or written communication sent
         | 
| 56 | 
            +
                  to the Licensor or its representatives, including but not limited to
         | 
| 57 | 
            +
                  communication on electronic mailing lists, source code control systems,
         | 
| 58 | 
            +
                  and issue tracking systems that are managed by, or on behalf of, the
         | 
| 59 | 
            +
                  Licensor for the purpose of discussing and improving the Work, but
         | 
| 60 | 
            +
                  excluding communication that is conspicuously marked or otherwise
         | 
| 61 | 
            +
                  designated in writing by the copyright owner as "Not a Contribution."
         | 
| 62 | 
            +
             | 
| 63 | 
            +
                  "Contributor" shall mean Licensor and any individual or Legal Entity
         | 
| 64 | 
            +
                  on behalf of whom a Contribution has been received by Licensor and
         | 
| 65 | 
            +
                  subsequently incorporated within the Work.
         | 
| 66 | 
            +
             | 
| 67 | 
            +
               2. Grant of Copyright License. Subject to the terms and conditions of
         | 
| 68 | 
            +
                  this License, each Contributor hereby grants to You a perpetual,
         | 
| 69 | 
            +
                  worldwide, non-exclusive, no-charge, royalty-free, irrevocable
         | 
| 70 | 
            +
                  copyright license to reproduce, prepare Derivative Works of,
         | 
| 71 | 
            +
                  publicly display, publicly perform, sublicense, and distribute the
         | 
| 72 | 
            +
                  Work and such Derivative Works in Source or Object form.
         | 
| 73 | 
            +
             | 
| 74 | 
            +
               3. Grant of Patent License. Subject to the terms and conditions of
         | 
| 75 | 
            +
                  this License, each Contributor hereby grants to You a perpetual,
         | 
| 76 | 
            +
                  worldwide, non-exclusive, no-charge, royalty-free, irrevocable
         | 
| 77 | 
            +
                  (except as stated in this section) patent license to make, have made,
         | 
| 78 | 
            +
                  use, offer to sell, sell, import, and otherwise transfer the Work,
         | 
| 79 | 
            +
                  where such license applies only to those patent claims licensable
         | 
| 80 | 
            +
                  by such Contributor that are necessarily infringed by their
         | 
| 81 | 
            +
                  Contribution(s) alone or by combination of their Contribution(s)
         | 
| 82 | 
            +
                  with the Work to which such Contribution(s) was submitted. If You
         | 
| 83 | 
            +
                  institute patent litigation against any entity (including a
         | 
| 84 | 
            +
                  cross-claim or counterclaim in a lawsuit) alleging that the Work
         | 
| 85 | 
            +
                  or a Contribution incorporated within the Work constitutes direct
         | 
| 86 | 
            +
                  or contributory patent infringement, then any patent licenses
         | 
| 87 | 
            +
                  granted to You under this License for that Work shall terminate
         | 
| 88 | 
            +
                  as of the date such litigation is filed.
         | 
| 89 | 
            +
             | 
| 90 | 
            +
               4. Redistribution. You may reproduce and distribute copies of the
         | 
| 91 | 
            +
                  Work or Derivative Works thereof in any medium, with or without
         | 
| 92 | 
            +
                  modifications, and in Source or Object form, provided that You
         | 
| 93 | 
            +
                  meet the following conditions:
         | 
| 94 | 
            +
             | 
| 95 | 
            +
                  (a) You must give any other recipients of the Work or
         | 
| 96 | 
            +
                      Derivative Works a copy of this License; and
         | 
| 97 | 
            +
             | 
| 98 | 
            +
                  (b) You must cause any modified files to carry prominent notices
         | 
| 99 | 
            +
                      stating that You changed the files; and
         | 
| 100 | 
            +
             | 
| 101 | 
            +
                  (c) You must retain, in the Source form of any Derivative Works
         | 
| 102 | 
            +
                      that You distribute, all copyright, patent, trademark, and
         | 
| 103 | 
            +
                      attribution notices from the Source form of the Work,
         | 
| 104 | 
            +
                      excluding those notices that do not pertain to any part of
         | 
| 105 | 
            +
                      the Derivative Works; and
         | 
| 106 | 
            +
             | 
| 107 | 
            +
                  (d) If the Work includes a "NOTICE" text file as part of its
         | 
| 108 | 
            +
                      distribution, then any Derivative Works that You distribute must
         | 
| 109 | 
            +
                      include a readable copy of the attribution notices contained
         | 
| 110 | 
            +
                      within such NOTICE file, excluding those notices that do not
         | 
| 111 | 
            +
                      pertain to any part of the Derivative Works, in at least one
         | 
| 112 | 
            +
                      of the following places: within a NOTICE text file distributed
         | 
| 113 | 
            +
                      as part of the Derivative Works; within the Source form or
         | 
| 114 | 
            +
                      documentation, if provided along with the Derivative Works; or,
         | 
| 115 | 
            +
                      within a display generated by the Derivative Works, if and
         | 
| 116 | 
            +
                      wherever such third-party notices normally appear. The contents
         | 
| 117 | 
            +
                      of the NOTICE file are for informational purposes only and
         | 
| 118 | 
            +
                      do not modify the License. You may add Your own attribution
         | 
| 119 | 
            +
                      notices within Derivative Works that You distribute, alongside
         | 
| 120 | 
            +
                      or as an addendum to the NOTICE text from the Work, provided
         | 
| 121 | 
            +
                      that such additional attribution notices cannot be construed
         | 
| 122 | 
            +
                      as modifying the License.
         | 
| 123 | 
            +
             | 
| 124 | 
            +
                  You may add Your own copyright statement to Your modifications and
         | 
| 125 | 
            +
                  may provide additional or different license terms and conditions
         | 
| 126 | 
            +
                  for use, reproduction, or distribution of Your modifications, or
         | 
| 127 | 
            +
                  for any such Derivative Works as a whole, provided Your use,
         | 
| 128 | 
            +
                  reproduction, and distribution of the Work otherwise complies with
         | 
| 129 | 
            +
                  the conditions stated in this License.
         | 
| 130 | 
            +
             | 
| 131 | 
            +
               5. Submission of Contributions. Unless You explicitly state otherwise,
         | 
| 132 | 
            +
                  any Contribution intentionally submitted for inclusion in the Work
         | 
| 133 | 
            +
                  by You to the Licensor shall be under the terms and conditions of
         | 
| 134 | 
            +
                  this License, without any additional terms or conditions.
         | 
| 135 | 
            +
                  Notwithstanding the above, nothing herein shall supersede or modify
         | 
| 136 | 
            +
                  the terms of any separate license agreement you may have executed
         | 
| 137 | 
            +
                  with Licensor regarding such Contributions.
         | 
| 138 | 
            +
             | 
| 139 | 
            +
               6. Trademarks. This License does not grant permission to use the trade
         | 
| 140 | 
            +
                  names, trademarks, service marks, or product names of the Licensor,
         | 
| 141 | 
            +
                  except as required for reasonable and customary use in describing the
         | 
| 142 | 
            +
                  origin of the Work and reproducing the content of the NOTICE file.
         | 
| 143 | 
            +
             | 
| 144 | 
            +
               7. Disclaimer of Warranty. Unless required by applicable law or
         | 
| 145 | 
            +
                  agreed to in writing, Licensor provides the Work (and each
         | 
| 146 | 
            +
                  Contributor provides its Contributions) on an "AS IS" BASIS,
         | 
| 147 | 
            +
                  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
         | 
| 148 | 
            +
                  implied, including, without limitation, any warranties or conditions
         | 
| 149 | 
            +
                  of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
         | 
| 150 | 
            +
                  PARTICULAR PURPOSE. You are solely responsible for determining the
         | 
| 151 | 
            +
                  appropriateness of using or redistributing the Work and assume any
         | 
| 152 | 
            +
                  risks associated with Your exercise of permissions under this License.
         | 
| 153 | 
            +
             | 
| 154 | 
            +
               8. Limitation of Liability. In no event and under no legal theory,
         | 
| 155 | 
            +
                  whether in tort (including negligence), contract, or otherwise,
         | 
| 156 | 
            +
                  unless required by applicable law (such as deliberate and grossly
         | 
| 157 | 
            +
                  negligent acts) or agreed to in writing, shall any Contributor be
         | 
| 158 | 
            +
                  liable to You for damages, including any direct, indirect, special,
         | 
| 159 | 
            +
                  incidental, or consequential damages of any character arising as a
         | 
| 160 | 
            +
                  result of this License or out of the use or inability to use the
         | 
| 161 | 
            +
                  Work (including but not limited to damages for loss of goodwill,
         | 
| 162 | 
            +
                  work stoppage, computer failure or malfunction, or any and all
         | 
| 163 | 
            +
                  other commercial damages or losses), even if such Contributor
         | 
| 164 | 
            +
                  has been advised of the possibility of such damages.
         | 
| 165 | 
            +
             | 
| 166 | 
            +
               9. Accepting Warranty or Additional Liability. While redistributing
         | 
| 167 | 
            +
                  the Work or Derivative Works thereof, You may choose to offer,
         | 
| 168 | 
            +
                  and charge a fee for, acceptance of support, warranty, indemnity,
         | 
| 169 | 
            +
                  or other liability obligations and/or rights consistent with this
         | 
| 170 | 
            +
                  License. However, in accepting such obligations, You may act only
         | 
| 171 | 
            +
                  on Your own behalf and on Your sole responsibility, not on behalf
         | 
| 172 | 
            +
                  of any other Contributor, and only if You agree to indemnify,
         | 
| 173 | 
            +
                  defend, and hold each Contributor harmless for any liability
         | 
| 174 | 
            +
                  incurred by, or claims asserted against, such Contributor by reason
         | 
| 175 | 
            +
                  of your accepting any such warranty or additional liability.
         | 
| 176 | 
            +
             | 
| 177 | 
            +
               END OF TERMS AND CONDITIONS
         | 
| 178 | 
            +
             | 
| 179 | 
            +
               APPENDIX: How to apply the Apache License to your work.
         | 
| 180 | 
            +
             | 
| 181 | 
            +
                  To apply the Apache License to your work, attach the following
         | 
| 182 | 
            +
                  boilerplate notice, with the fields enclosed by brackets "[]"
         | 
| 183 | 
            +
                  replaced with your own identifying information. (Don't include
         | 
| 184 | 
            +
                  the brackets!)  The text should be enclosed in the appropriate
         | 
| 185 | 
            +
                  comment syntax for the file format. We also recommend that a
         | 
| 186 | 
            +
                  file or class name and description of purpose be included on the
         | 
| 187 | 
            +
                  same "printed page" as the copyright notice for easier
         | 
| 188 | 
            +
                  identification within third-party archives.
         | 
| 189 | 
            +
             | 
| 190 | 
            +
               Copyright 2024 Alibaba Cloud
         | 
| 191 | 
            +
             | 
| 192 | 
            +
               Licensed under the Apache License, Version 2.0 (the "License");
         | 
| 193 | 
            +
               you may not use this file except in compliance with the License.
         | 
| 194 | 
            +
               You may obtain a copy of the License at
         | 
| 195 | 
            +
             | 
| 196 | 
            +
                   http://www.apache.org/licenses/LICENSE-2.0
         | 
| 197 | 
            +
             | 
| 198 | 
            +
               Unless required by applicable law or agreed to in writing, software
         | 
| 199 | 
            +
               distributed under the License is distributed on an "AS IS" BASIS,
         | 
| 200 | 
            +
               WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
         | 
| 201 | 
            +
               See the License for the specific language governing permissions and
         | 
| 202 | 
            +
               limitations under the License.
         | 
    	
        README.md
    ADDED
    
    | @@ -0,0 +1,135 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            ---
         | 
| 2 | 
            +
            license: apache-2.0
         | 
| 3 | 
            +
            license_link: https://huggingface.co/Qwen/Qwen2.5-Coder-32B-Instruct/blob/main/LICENSE
         | 
| 4 | 
            +
            language:
         | 
| 5 | 
            +
            - en
         | 
| 6 | 
            +
            base_model:
         | 
| 7 | 
            +
            - Qwen/Qwen2.5-Coder-32B
         | 
| 8 | 
            +
            pipeline_tag: text-generation
         | 
| 9 | 
            +
            library_name: transformers
         | 
| 10 | 
            +
            tags:
         | 
| 11 | 
            +
            - code
         | 
| 12 | 
            +
            - codeqwen
         | 
| 13 | 
            +
            - chat
         | 
| 14 | 
            +
            - qwen
         | 
| 15 | 
            +
            - qwen-coder
         | 
| 16 | 
            +
            ---
         | 
| 17 | 
            +
             | 
| 18 | 
            +
             | 
| 19 | 
            +
            # Qwen2.5-Coder-32B-Instruct
         | 
| 20 | 
            +
             | 
| 21 | 
            +
            ## Introduction
         | 
| 22 | 
            +
             | 
| 23 | 
            +
            Qwen2.5-Coder is the latest series of Code-Specific Qwen large language models (formerly known as CodeQwen). As of now, Qwen2.5-Coder has covered six mainstream model sizes, 0.5, 1.5, 3, 7, 14, 32 billion parameters, to meet the needs of different developers. Qwen2.5-Coder brings the following improvements upon CodeQwen1.5:
         | 
| 24 | 
            +
             | 
| 25 | 
            +
            - Significantly improvements in **code generation**, **code reasoning** and **code fixing**. Base on the strong Qwen2.5, we scale up the training tokens into 5.5 trillion including source code, text-code grounding, Synthetic data, etc. Qwen2.5-Coder-32B has become the current state-of-the-art open-source codeLLM, with its coding abilities matching those of GPT-4o.
         | 
| 26 | 
            +
            - A more comprehensive foundation for real-world applications such as **Code Agents**. Not only enhancing coding capabilities but also maintaining its strengths in mathematics and general competencies.
         | 
| 27 | 
            +
            - **Long-context Support** up to 128K tokens.
         | 
| 28 | 
            +
             | 
| 29 | 
            +
            **This repo contains the instruction-tuned 32B Qwen2.5-Coder model**, which has the following features:
         | 
| 30 | 
            +
            - Type: Causal Language Models
         | 
| 31 | 
            +
            - Training Stage: Pretraining & Post-training
         | 
| 32 | 
            +
            - Architecture: transformers with RoPE, SwiGLU, RMSNorm, and Attention QKV bias
         | 
| 33 | 
            +
            - Number of Parameters: 32.5B
         | 
| 34 | 
            +
            - Number of Paramaters (Non-Embedding): 31.0B
         | 
| 35 | 
            +
            - Number of Layers: 64
         | 
| 36 | 
            +
            - Number of Attention Heads (GQA): 40 for Q and 8 for KV
         | 
| 37 | 
            +
            - Context Length: Full 131,072 tokens
         | 
| 38 | 
            +
              - Please refer to [this section](#processing-long-texts) for detailed instructions on how to deploy Qwen2.5 for handling long texts.
         | 
| 39 | 
            +
              
         | 
| 40 | 
            +
            For more details, please refer to our [blog](https://qwenlm.github.io/blog/qwen2.5-coder-family/), [GitHub](https://github.com/QwenLM/Qwen2.5-Coder), [Documentation](https://qwen.readthedocs.io/en/latest/), [Arxiv](https://arxiv.org/abs/2409.12186).
         | 
| 41 | 
            +
             | 
| 42 | 
            +
            ## Requirements
         | 
| 43 | 
            +
             | 
| 44 | 
            +
            The code of Qwen2.5-Coder has been in the latest Hugging face `transformers` and we advise you to use the latest version of `transformers`.
         | 
| 45 | 
            +
             | 
| 46 | 
            +
            With `transformers<4.37.0`, you will encounter the following error:
         | 
| 47 | 
            +
            ```
         | 
| 48 | 
            +
            KeyError: 'qwen2'
         | 
| 49 | 
            +
            ```
         | 
| 50 | 
            +
             | 
| 51 | 
            +
            ## Quickstart
         | 
| 52 | 
            +
             | 
| 53 | 
            +
            Here provides a code snippet with `apply_chat_template` to show you how to load the tokenizer and model and how to generate contents.
         | 
| 54 | 
            +
             | 
| 55 | 
            +
            ```python
         | 
| 56 | 
            +
            from transformers import AutoModelForCausalLM, AutoTokenizer
         | 
| 57 | 
            +
             | 
| 58 | 
            +
            model_name = "Qwen/Qwen2.5-Coder-32B-Instruct"
         | 
| 59 | 
            +
             | 
| 60 | 
            +
            model = AutoModelForCausalLM.from_pretrained(
         | 
| 61 | 
            +
                model_name,
         | 
| 62 | 
            +
                torch_dtype="auto",
         | 
| 63 | 
            +
                device_map="auto"
         | 
| 64 | 
            +
            )
         | 
| 65 | 
            +
            tokenizer = AutoTokenizer.from_pretrained(model_name)
         | 
| 66 | 
            +
             | 
| 67 | 
            +
            prompt = "write a quick sort algorithm."
         | 
| 68 | 
            +
            messages = [
         | 
| 69 | 
            +
                {"role": "system", "content": "You are Qwen, created by Alibaba Cloud. You are a helpful assistant."},
         | 
| 70 | 
            +
                {"role": "user", "content": prompt}
         | 
| 71 | 
            +
            ]
         | 
| 72 | 
            +
            text = tokenizer.apply_chat_template(
         | 
| 73 | 
            +
                messages,
         | 
| 74 | 
            +
                tokenize=False,
         | 
| 75 | 
            +
                add_generation_prompt=True
         | 
| 76 | 
            +
            )
         | 
| 77 | 
            +
            model_inputs = tokenizer([text], return_tensors="pt").to(model.device)
         | 
| 78 | 
            +
             | 
| 79 | 
            +
            generated_ids = model.generate(
         | 
| 80 | 
            +
                **model_inputs,
         | 
| 81 | 
            +
                max_new_tokens=512
         | 
| 82 | 
            +
            )
         | 
| 83 | 
            +
            generated_ids = [
         | 
| 84 | 
            +
                output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids)
         | 
| 85 | 
            +
            ]
         | 
| 86 | 
            +
             | 
| 87 | 
            +
            response = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0]
         | 
| 88 | 
            +
            ```
         | 
| 89 | 
            +
             | 
| 90 | 
            +
            ### Processing Long Texts
         | 
| 91 | 
            +
             | 
| 92 | 
            +
            The current `config.json` is set for context length up to 32,768 tokens.
         | 
| 93 | 
            +
            To handle extensive inputs exceeding 32,768 tokens, we utilize [YaRN](https://arxiv.org/abs/2309.00071), a technique for enhancing model length extrapolation, ensuring optimal performance on lengthy texts.
         | 
| 94 | 
            +
             | 
| 95 | 
            +
            For supported frameworks, you could add the following to `config.json` to enable YaRN:
         | 
| 96 | 
            +
            ```json
         | 
| 97 | 
            +
            {
         | 
| 98 | 
            +
              ...,
         | 
| 99 | 
            +
              "rope_scaling": {
         | 
| 100 | 
            +
                "factor": 4.0,
         | 
| 101 | 
            +
                "original_max_position_embeddings": 32768,
         | 
| 102 | 
            +
                "type": "yarn"
         | 
| 103 | 
            +
              }
         | 
| 104 | 
            +
            }
         | 
| 105 | 
            +
            ```
         | 
| 106 | 
            +
             | 
| 107 | 
            +
            For deployment, we recommend using vLLM. 
         | 
| 108 | 
            +
            Please refer to our [Documentation](https://qwen.readthedocs.io/en/latest/deployment/vllm.html) for usage if you are not familar with vLLM.
         | 
| 109 | 
            +
            Presently, vLLM only supports static YARN, which means the scaling factor remains constant regardless of input length, **potentially impacting performance on shorter texts**. 
         | 
| 110 | 
            +
            We advise adding the `rope_scaling` configuration only when processing long contexts is required.
         | 
| 111 | 
            +
             | 
| 112 | 
            +
            ## Evaluation & Performance
         | 
| 113 | 
            +
             | 
| 114 | 
            +
            Detailed evaluation results are reported in this [📑 blog](https://qwenlm.github.io/blog/qwen2.5-coder-family/).
         | 
| 115 | 
            +
             | 
| 116 | 
            +
            For requirements on GPU memory and the respective throughput, see results [here](https://qwen.readthedocs.io/en/latest/benchmark/speed_benchmark.html).
         | 
| 117 | 
            +
             | 
| 118 | 
            +
            ## Citation
         | 
| 119 | 
            +
             | 
| 120 | 
            +
            If you find our work helpful, feel free to give us a cite.
         | 
| 121 | 
            +
             | 
| 122 | 
            +
            ```
         | 
| 123 | 
            +
            @article{hui2024qwen2,
         | 
| 124 | 
            +
                  title={Qwen2. 5-Coder Technical Report},
         | 
| 125 | 
            +
                  author={Hui, Binyuan and Yang, Jian and Cui, Zeyu and Yang, Jiaxi and Liu, Dayiheng and Zhang, Lei and Liu, Tianyu and Zhang, Jiajun and Yu, Bowen and Dang, Kai and others},
         | 
| 126 | 
            +
                  journal={arXiv preprint arXiv:2409.12186},
         | 
| 127 | 
            +
                  year={2024}
         | 
| 128 | 
            +
            }
         | 
| 129 | 
            +
            @article{qwen2,
         | 
| 130 | 
            +
                  title={Qwen2 Technical Report}, 
         | 
| 131 | 
            +
                  author={An Yang and Baosong Yang and Binyuan Hui and Bo Zheng and Bowen Yu and Chang Zhou and Chengpeng Li and Chengyuan Li and Dayiheng Liu and Fei Huang and Guanting Dong and Haoran Wei and Huan Lin and Jialong Tang and Jialin Wang and Jian Yang and Jianhong Tu and Jianwei Zhang and Jianxin Ma and Jin Xu and Jingren Zhou and Jinze Bai and Jinzheng He and Junyang Lin and Kai Dang and Keming Lu and Keqin Chen and Kexin Yang and Mei Li and Mingfeng Xue and Na Ni and Pei Zhang and Peng Wang and Ru Peng and Rui Men and Ruize Gao and Runji Lin and Shijie Wang and Shuai Bai and Sinan Tan and Tianhang Zhu and Tianhao Li and Tianyu Liu and Wenbin Ge and Xiaodong Deng and Xiaohuan Zhou and Xingzhang Ren and Xinyu Zhang and Xipin Wei and Xuancheng Ren and Yang Fan and Yang Yao and Yichang Zhang and Yu Wan and Yunfei Chu and Yuqiong Liu and Zeyu Cui and Zhenru Zhang and Zhihao Fan},
         | 
| 132 | 
            +
                  journal={arXiv preprint arXiv:2407.10671},
         | 
| 133 | 
            +
                  year={2024}
         | 
| 134 | 
            +
            }
         | 
| 135 | 
            +
            ```
         | 
    	
        config.json
    ADDED
    
    | @@ -0,0 +1,27 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "architectures": [
         | 
| 3 | 
            +
                "Qwen2ForCausalLM"
         | 
| 4 | 
            +
              ],
         | 
| 5 | 
            +
              "attention_dropout": 0.0,
         | 
| 6 | 
            +
              "bos_token_id": 151643,
         | 
| 7 | 
            +
              "eos_token_id": 151645,
         | 
| 8 | 
            +
              "hidden_act": "silu",
         | 
| 9 | 
            +
              "hidden_size": 5120,
         | 
| 10 | 
            +
              "initializer_range": 0.02,
         | 
| 11 | 
            +
              "intermediate_size": 27648,
         | 
| 12 | 
            +
              "max_position_embeddings": 32768,
         | 
| 13 | 
            +
              "max_window_layers": 70,
         | 
| 14 | 
            +
              "model_type": "qwen2",
         | 
| 15 | 
            +
              "num_attention_heads": 40,
         | 
| 16 | 
            +
              "num_hidden_layers": 64,
         | 
| 17 | 
            +
              "num_key_value_heads": 8,
         | 
| 18 | 
            +
              "rms_norm_eps": 1e-06,
         | 
| 19 | 
            +
              "rope_theta": 1000000.0,
         | 
| 20 | 
            +
              "sliding_window": 131072,
         | 
| 21 | 
            +
              "tie_word_embeddings": false,
         | 
| 22 | 
            +
              "torch_dtype": "bfloat16",
         | 
| 23 | 
            +
              "transformers_version": "4.43.1",
         | 
| 24 | 
            +
              "use_cache": true,
         | 
| 25 | 
            +
              "use_sliding_window": false,
         | 
| 26 | 
            +
              "vocab_size": 152064
         | 
| 27 | 
            +
            }
         | 
    	
        generation_config.json
    ADDED
    
    | @@ -0,0 +1,14 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "bos_token_id": 151643,
         | 
| 3 | 
            +
              "pad_token_id": 151643,
         | 
| 4 | 
            +
              "do_sample": true,
         | 
| 5 | 
            +
              "eos_token_id": [
         | 
| 6 | 
            +
                151645,
         | 
| 7 | 
            +
                151643
         | 
| 8 | 
            +
              ],
         | 
| 9 | 
            +
              "repetition_penalty": 1.05,
         | 
| 10 | 
            +
              "temperature": 0.7,
         | 
| 11 | 
            +
              "top_p": 0.8,
         | 
| 12 | 
            +
              "top_k": 20,
         | 
| 13 | 
            +
              "transformers_version": "4.37.0"
         | 
| 14 | 
            +
            }
         | 
    	
        merges.txt
    ADDED
    
    | The diff for this file is too large to render. 
		See raw diff | 
|  | 
    	
        model-00001-of-00014.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:5746929d771eb06ba0105671a2018dca3c648d4664ca5a5ce2077df82fabad92
         | 
| 3 | 
            +
            size 4891730992
         | 
    	
        model-00002-of-00014.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:12aaad653a85d7212a9cc7722937fff7b2bd060ab8522399b4a74d2bf736df8d
         | 
| 3 | 
            +
            size 4876059352
         | 
    	
        model-00003-of-00014.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:9bac433064b2c29f3e2a891539993b74b7a1da5b0a78c900b2e22fc20fe66059
         | 
| 3 | 
            +
            size 4876059384
         | 
    	
        model-00004-of-00014.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:4e859e864a61c45dc2e38650359540e5f35f09d4a6993ef66d7274760e7d1905
         | 
| 3 | 
            +
            size 4876059416
         | 
    	
        model-00005-of-00014.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:57000579cf2e87cd575333288c25588a45816fc406ace74a52efed1e7bc910d5
         | 
| 3 | 
            +
            size 4876059416
         | 
    	
        model-00006-of-00014.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:72dd80465092fad2665c9705332efa99c20b9fb22ad1944e7fe7429a222ede42
         | 
| 3 | 
            +
            size 4876059416
         | 
    	
        model-00007-of-00014.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:b91130f8049d3446221000a1d44cdf7bd344f01a2b5843f9bb81ac8193beeae8
         | 
| 3 | 
            +
            size 4876059416
         | 
    	
        model-00008-of-00014.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:522db6c67390557f2964a4a0094a463a067c91992be164b6e141aa1267a225e5
         | 
| 3 | 
            +
            size 4876059416
         | 
    	
        model-00009-of-00014.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:7c03b072b4aad4da68016b75b78c6eb3ea6b35e18ac0dae4906146603fa32e23
         | 
| 3 | 
            +
            size 4876059416
         | 
    	
        model-00010-of-00014.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:c5ee9a4665ae7d5c9e6d37f77d93c47244a80ee41284060efde69d223472a87a
         | 
| 3 | 
            +
            size 4876059416
         | 
    	
        model-00011-of-00014.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:49f60205e190369145352de972e3510bcfeef6b5a26041573032edbe132c1e06
         | 
| 3 | 
            +
            size 4876059416
         | 
    	
        model-00012-of-00014.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:9a53a0d4a02dcb5f036da4fcd50e408107144ff4c4b09357cf1829be2ef382f9
         | 
| 3 | 
            +
            size 4876059416
         | 
    	
        model-00013-of-00014.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:8b26127defc68bff2e0744ae9afc9c8ab1730fee071e0400b4bfe8f07fadda5b
         | 
| 3 | 
            +
            size 4876059416
         | 
    	
        model-00014-of-00014.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:983ebbded19c19264d08845710c9993714d60e17f81c62e1e15c8f174af79847
         | 
| 3 | 
            +
            size 2123397800
         | 
    	
        model.safetensors.index.json
    ADDED
    
    | @@ -0,0 +1,778 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "metadata": {
         | 
| 3 | 
            +
                "total_size": 65527752704
         | 
| 4 | 
            +
              },
         | 
| 5 | 
            +
              "weight_map": {
         | 
| 6 | 
            +
                "lm_head.weight": "model-00014-of-00014.safetensors",
         | 
| 7 | 
            +
                "model.embed_tokens.weight": "model-00001-of-00014.safetensors",
         | 
| 8 | 
            +
                "model.layers.0.input_layernorm.weight": "model-00001-of-00014.safetensors",
         | 
| 9 | 
            +
                "model.layers.0.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 10 | 
            +
                "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 11 | 
            +
                "model.layers.0.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 12 | 
            +
                "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
         | 
| 13 | 
            +
                "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
         | 
| 14 | 
            +
                "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 15 | 
            +
                "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 16 | 
            +
                "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
         | 
| 17 | 
            +
                "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 18 | 
            +
                "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
         | 
| 19 | 
            +
                "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 20 | 
            +
                "model.layers.1.input_layernorm.weight": "model-00001-of-00014.safetensors",
         | 
| 21 | 
            +
                "model.layers.1.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 22 | 
            +
                "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 23 | 
            +
                "model.layers.1.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 24 | 
            +
                "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
         | 
| 25 | 
            +
                "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
         | 
| 26 | 
            +
                "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 27 | 
            +
                "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 28 | 
            +
                "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
         | 
| 29 | 
            +
                "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 30 | 
            +
                "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
         | 
| 31 | 
            +
                "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 32 | 
            +
                "model.layers.10.input_layernorm.weight": "model-00003-of-00014.safetensors",
         | 
| 33 | 
            +
                "model.layers.10.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 34 | 
            +
                "model.layers.10.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 35 | 
            +
                "model.layers.10.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 36 | 
            +
                "model.layers.10.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
         | 
| 37 | 
            +
                "model.layers.10.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
         | 
| 38 | 
            +
                "model.layers.10.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 39 | 
            +
                "model.layers.10.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 40 | 
            +
                "model.layers.10.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
         | 
| 41 | 
            +
                "model.layers.10.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 42 | 
            +
                "model.layers.10.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
         | 
| 43 | 
            +
                "model.layers.10.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 44 | 
            +
                "model.layers.11.input_layernorm.weight": "model-00003-of-00014.safetensors",
         | 
| 45 | 
            +
                "model.layers.11.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 46 | 
            +
                "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 47 | 
            +
                "model.layers.11.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 48 | 
            +
                "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
         | 
| 49 | 
            +
                "model.layers.11.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
         | 
| 50 | 
            +
                "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 51 | 
            +
                "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 52 | 
            +
                "model.layers.11.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
         | 
| 53 | 
            +
                "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 54 | 
            +
                "model.layers.11.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
         | 
| 55 | 
            +
                "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 56 | 
            +
                "model.layers.12.input_layernorm.weight": "model-00003-of-00014.safetensors",
         | 
| 57 | 
            +
                "model.layers.12.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 58 | 
            +
                "model.layers.12.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 59 | 
            +
                "model.layers.12.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 60 | 
            +
                "model.layers.12.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
         | 
| 61 | 
            +
                "model.layers.12.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
         | 
| 62 | 
            +
                "model.layers.12.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 63 | 
            +
                "model.layers.12.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 64 | 
            +
                "model.layers.12.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
         | 
| 65 | 
            +
                "model.layers.12.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 66 | 
            +
                "model.layers.12.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
         | 
| 67 | 
            +
                "model.layers.12.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 68 | 
            +
                "model.layers.13.input_layernorm.weight": "model-00004-of-00014.safetensors",
         | 
| 69 | 
            +
                "model.layers.13.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 70 | 
            +
                "model.layers.13.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 71 | 
            +
                "model.layers.13.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 72 | 
            +
                "model.layers.13.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
         | 
| 73 | 
            +
                "model.layers.13.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
         | 
| 74 | 
            +
                "model.layers.13.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 75 | 
            +
                "model.layers.13.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 76 | 
            +
                "model.layers.13.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
         | 
| 77 | 
            +
                "model.layers.13.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 78 | 
            +
                "model.layers.13.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
         | 
| 79 | 
            +
                "model.layers.13.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 80 | 
            +
                "model.layers.14.input_layernorm.weight": "model-00004-of-00014.safetensors",
         | 
| 81 | 
            +
                "model.layers.14.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 82 | 
            +
                "model.layers.14.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 83 | 
            +
                "model.layers.14.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 84 | 
            +
                "model.layers.14.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
         | 
| 85 | 
            +
                "model.layers.14.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
         | 
| 86 | 
            +
                "model.layers.14.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 87 | 
            +
                "model.layers.14.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 88 | 
            +
                "model.layers.14.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
         | 
| 89 | 
            +
                "model.layers.14.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 90 | 
            +
                "model.layers.14.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
         | 
| 91 | 
            +
                "model.layers.14.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 92 | 
            +
                "model.layers.15.input_layernorm.weight": "model-00004-of-00014.safetensors",
         | 
| 93 | 
            +
                "model.layers.15.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 94 | 
            +
                "model.layers.15.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 95 | 
            +
                "model.layers.15.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 96 | 
            +
                "model.layers.15.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
         | 
| 97 | 
            +
                "model.layers.15.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
         | 
| 98 | 
            +
                "model.layers.15.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 99 | 
            +
                "model.layers.15.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 100 | 
            +
                "model.layers.15.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
         | 
| 101 | 
            +
                "model.layers.15.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 102 | 
            +
                "model.layers.15.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
         | 
| 103 | 
            +
                "model.layers.15.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 104 | 
            +
                "model.layers.16.input_layernorm.weight": "model-00004-of-00014.safetensors",
         | 
| 105 | 
            +
                "model.layers.16.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 106 | 
            +
                "model.layers.16.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 107 | 
            +
                "model.layers.16.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 108 | 
            +
                "model.layers.16.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
         | 
| 109 | 
            +
                "model.layers.16.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
         | 
| 110 | 
            +
                "model.layers.16.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 111 | 
            +
                "model.layers.16.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 112 | 
            +
                "model.layers.16.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
         | 
| 113 | 
            +
                "model.layers.16.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 114 | 
            +
                "model.layers.16.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
         | 
| 115 | 
            +
                "model.layers.16.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 116 | 
            +
                "model.layers.17.input_layernorm.weight": "model-00004-of-00014.safetensors",
         | 
| 117 | 
            +
                "model.layers.17.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 118 | 
            +
                "model.layers.17.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 119 | 
            +
                "model.layers.17.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 120 | 
            +
                "model.layers.17.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
         | 
| 121 | 
            +
                "model.layers.17.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
         | 
| 122 | 
            +
                "model.layers.17.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 123 | 
            +
                "model.layers.17.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 124 | 
            +
                "model.layers.17.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
         | 
| 125 | 
            +
                "model.layers.17.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 126 | 
            +
                "model.layers.17.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
         | 
| 127 | 
            +
                "model.layers.17.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 128 | 
            +
                "model.layers.18.input_layernorm.weight": "model-00005-of-00014.safetensors",
         | 
| 129 | 
            +
                "model.layers.18.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 130 | 
            +
                "model.layers.18.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 131 | 
            +
                "model.layers.18.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 132 | 
            +
                "model.layers.18.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
         | 
| 133 | 
            +
                "model.layers.18.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
         | 
| 134 | 
            +
                "model.layers.18.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 135 | 
            +
                "model.layers.18.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 136 | 
            +
                "model.layers.18.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
         | 
| 137 | 
            +
                "model.layers.18.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 138 | 
            +
                "model.layers.18.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
         | 
| 139 | 
            +
                "model.layers.18.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
         | 
| 140 | 
            +
                "model.layers.19.input_layernorm.weight": "model-00005-of-00014.safetensors",
         | 
| 141 | 
            +
                "model.layers.19.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 142 | 
            +
                "model.layers.19.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 143 | 
            +
                "model.layers.19.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 144 | 
            +
                "model.layers.19.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
         | 
| 145 | 
            +
                "model.layers.19.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
         | 
| 146 | 
            +
                "model.layers.19.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 147 | 
            +
                "model.layers.19.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 148 | 
            +
                "model.layers.19.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
         | 
| 149 | 
            +
                "model.layers.19.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 150 | 
            +
                "model.layers.19.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
         | 
| 151 | 
            +
                "model.layers.19.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 152 | 
            +
                "model.layers.2.input_layernorm.weight": "model-00001-of-00014.safetensors",
         | 
| 153 | 
            +
                "model.layers.2.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 154 | 
            +
                "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 155 | 
            +
                "model.layers.2.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 156 | 
            +
                "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
         | 
| 157 | 
            +
                "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
         | 
| 158 | 
            +
                "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 159 | 
            +
                "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 160 | 
            +
                "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
         | 
| 161 | 
            +
                "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 162 | 
            +
                "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
         | 
| 163 | 
            +
                "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 164 | 
            +
                "model.layers.20.input_layernorm.weight": "model-00005-of-00014.safetensors",
         | 
| 165 | 
            +
                "model.layers.20.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 166 | 
            +
                "model.layers.20.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 167 | 
            +
                "model.layers.20.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 168 | 
            +
                "model.layers.20.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
         | 
| 169 | 
            +
                "model.layers.20.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
         | 
| 170 | 
            +
                "model.layers.20.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 171 | 
            +
                "model.layers.20.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 172 | 
            +
                "model.layers.20.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
         | 
| 173 | 
            +
                "model.layers.20.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 174 | 
            +
                "model.layers.20.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
         | 
| 175 | 
            +
                "model.layers.20.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 176 | 
            +
                "model.layers.21.input_layernorm.weight": "model-00005-of-00014.safetensors",
         | 
| 177 | 
            +
                "model.layers.21.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 178 | 
            +
                "model.layers.21.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 179 | 
            +
                "model.layers.21.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 180 | 
            +
                "model.layers.21.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
         | 
| 181 | 
            +
                "model.layers.21.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
         | 
| 182 | 
            +
                "model.layers.21.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 183 | 
            +
                "model.layers.21.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 184 | 
            +
                "model.layers.21.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
         | 
| 185 | 
            +
                "model.layers.21.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 186 | 
            +
                "model.layers.21.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
         | 
| 187 | 
            +
                "model.layers.21.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 188 | 
            +
                "model.layers.22.input_layernorm.weight": "model-00005-of-00014.safetensors",
         | 
| 189 | 
            +
                "model.layers.22.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 190 | 
            +
                "model.layers.22.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 191 | 
            +
                "model.layers.22.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 192 | 
            +
                "model.layers.22.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
         | 
| 193 | 
            +
                "model.layers.22.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
         | 
| 194 | 
            +
                "model.layers.22.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 195 | 
            +
                "model.layers.22.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 196 | 
            +
                "model.layers.22.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
         | 
| 197 | 
            +
                "model.layers.22.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 198 | 
            +
                "model.layers.22.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
         | 
| 199 | 
            +
                "model.layers.22.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 200 | 
            +
                "model.layers.23.input_layernorm.weight": "model-00006-of-00014.safetensors",
         | 
| 201 | 
            +
                "model.layers.23.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 202 | 
            +
                "model.layers.23.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 203 | 
            +
                "model.layers.23.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 204 | 
            +
                "model.layers.23.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
         | 
| 205 | 
            +
                "model.layers.23.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
         | 
| 206 | 
            +
                "model.layers.23.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 207 | 
            +
                "model.layers.23.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 208 | 
            +
                "model.layers.23.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
         | 
| 209 | 
            +
                "model.layers.23.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 210 | 
            +
                "model.layers.23.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
         | 
| 211 | 
            +
                "model.layers.23.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
         | 
| 212 | 
            +
                "model.layers.24.input_layernorm.weight": "model-00006-of-00014.safetensors",
         | 
| 213 | 
            +
                "model.layers.24.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 214 | 
            +
                "model.layers.24.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 215 | 
            +
                "model.layers.24.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 216 | 
            +
                "model.layers.24.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
         | 
| 217 | 
            +
                "model.layers.24.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
         | 
| 218 | 
            +
                "model.layers.24.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 219 | 
            +
                "model.layers.24.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 220 | 
            +
                "model.layers.24.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
         | 
| 221 | 
            +
                "model.layers.24.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 222 | 
            +
                "model.layers.24.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
         | 
| 223 | 
            +
                "model.layers.24.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 224 | 
            +
                "model.layers.25.input_layernorm.weight": "model-00006-of-00014.safetensors",
         | 
| 225 | 
            +
                "model.layers.25.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 226 | 
            +
                "model.layers.25.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 227 | 
            +
                "model.layers.25.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 228 | 
            +
                "model.layers.25.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
         | 
| 229 | 
            +
                "model.layers.25.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
         | 
| 230 | 
            +
                "model.layers.25.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 231 | 
            +
                "model.layers.25.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 232 | 
            +
                "model.layers.25.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
         | 
| 233 | 
            +
                "model.layers.25.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 234 | 
            +
                "model.layers.25.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
         | 
| 235 | 
            +
                "model.layers.25.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 236 | 
            +
                "model.layers.26.input_layernorm.weight": "model-00006-of-00014.safetensors",
         | 
| 237 | 
            +
                "model.layers.26.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 238 | 
            +
                "model.layers.26.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 239 | 
            +
                "model.layers.26.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 240 | 
            +
                "model.layers.26.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
         | 
| 241 | 
            +
                "model.layers.26.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
         | 
| 242 | 
            +
                "model.layers.26.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 243 | 
            +
                "model.layers.26.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 244 | 
            +
                "model.layers.26.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
         | 
| 245 | 
            +
                "model.layers.26.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 246 | 
            +
                "model.layers.26.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
         | 
| 247 | 
            +
                "model.layers.26.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 248 | 
            +
                "model.layers.27.input_layernorm.weight": "model-00006-of-00014.safetensors",
         | 
| 249 | 
            +
                "model.layers.27.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 250 | 
            +
                "model.layers.27.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 251 | 
            +
                "model.layers.27.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 252 | 
            +
                "model.layers.27.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
         | 
| 253 | 
            +
                "model.layers.27.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
         | 
| 254 | 
            +
                "model.layers.27.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 255 | 
            +
                "model.layers.27.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 256 | 
            +
                "model.layers.27.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
         | 
| 257 | 
            +
                "model.layers.27.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 258 | 
            +
                "model.layers.27.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
         | 
| 259 | 
            +
                "model.layers.27.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 260 | 
            +
                "model.layers.28.input_layernorm.weight": "model-00007-of-00014.safetensors",
         | 
| 261 | 
            +
                "model.layers.28.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 262 | 
            +
                "model.layers.28.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 263 | 
            +
                "model.layers.28.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 264 | 
            +
                "model.layers.28.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
         | 
| 265 | 
            +
                "model.layers.28.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
         | 
| 266 | 
            +
                "model.layers.28.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 267 | 
            +
                "model.layers.28.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 268 | 
            +
                "model.layers.28.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
         | 
| 269 | 
            +
                "model.layers.28.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 270 | 
            +
                "model.layers.28.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
         | 
| 271 | 
            +
                "model.layers.28.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
         | 
| 272 | 
            +
                "model.layers.29.input_layernorm.weight": "model-00007-of-00014.safetensors",
         | 
| 273 | 
            +
                "model.layers.29.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 274 | 
            +
                "model.layers.29.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 275 | 
            +
                "model.layers.29.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 276 | 
            +
                "model.layers.29.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
         | 
| 277 | 
            +
                "model.layers.29.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
         | 
| 278 | 
            +
                "model.layers.29.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 279 | 
            +
                "model.layers.29.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 280 | 
            +
                "model.layers.29.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
         | 
| 281 | 
            +
                "model.layers.29.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 282 | 
            +
                "model.layers.29.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
         | 
| 283 | 
            +
                "model.layers.29.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 284 | 
            +
                "model.layers.3.input_layernorm.weight": "model-00002-of-00014.safetensors",
         | 
| 285 | 
            +
                "model.layers.3.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 286 | 
            +
                "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 287 | 
            +
                "model.layers.3.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 288 | 
            +
                "model.layers.3.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
         | 
| 289 | 
            +
                "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
         | 
| 290 | 
            +
                "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 291 | 
            +
                "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 292 | 
            +
                "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
         | 
| 293 | 
            +
                "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 294 | 
            +
                "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
         | 
| 295 | 
            +
                "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
         | 
| 296 | 
            +
                "model.layers.30.input_layernorm.weight": "model-00007-of-00014.safetensors",
         | 
| 297 | 
            +
                "model.layers.30.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 298 | 
            +
                "model.layers.30.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 299 | 
            +
                "model.layers.30.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 300 | 
            +
                "model.layers.30.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
         | 
| 301 | 
            +
                "model.layers.30.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
         | 
| 302 | 
            +
                "model.layers.30.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 303 | 
            +
                "model.layers.30.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 304 | 
            +
                "model.layers.30.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
         | 
| 305 | 
            +
                "model.layers.30.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 306 | 
            +
                "model.layers.30.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
         | 
| 307 | 
            +
                "model.layers.30.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 308 | 
            +
                "model.layers.31.input_layernorm.weight": "model-00007-of-00014.safetensors",
         | 
| 309 | 
            +
                "model.layers.31.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 310 | 
            +
                "model.layers.31.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 311 | 
            +
                "model.layers.31.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 312 | 
            +
                "model.layers.31.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
         | 
| 313 | 
            +
                "model.layers.31.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
         | 
| 314 | 
            +
                "model.layers.31.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 315 | 
            +
                "model.layers.31.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 316 | 
            +
                "model.layers.31.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
         | 
| 317 | 
            +
                "model.layers.31.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 318 | 
            +
                "model.layers.31.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
         | 
| 319 | 
            +
                "model.layers.31.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 320 | 
            +
                "model.layers.32.input_layernorm.weight": "model-00007-of-00014.safetensors",
         | 
| 321 | 
            +
                "model.layers.32.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 322 | 
            +
                "model.layers.32.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 323 | 
            +
                "model.layers.32.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 324 | 
            +
                "model.layers.32.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
         | 
| 325 | 
            +
                "model.layers.32.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
         | 
| 326 | 
            +
                "model.layers.32.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 327 | 
            +
                "model.layers.32.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 328 | 
            +
                "model.layers.32.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
         | 
| 329 | 
            +
                "model.layers.32.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 330 | 
            +
                "model.layers.32.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
         | 
| 331 | 
            +
                "model.layers.32.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 332 | 
            +
                "model.layers.33.input_layernorm.weight": "model-00008-of-00014.safetensors",
         | 
| 333 | 
            +
                "model.layers.33.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 334 | 
            +
                "model.layers.33.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 335 | 
            +
                "model.layers.33.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 336 | 
            +
                "model.layers.33.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
         | 
| 337 | 
            +
                "model.layers.33.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
         | 
| 338 | 
            +
                "model.layers.33.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 339 | 
            +
                "model.layers.33.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 340 | 
            +
                "model.layers.33.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
         | 
| 341 | 
            +
                "model.layers.33.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 342 | 
            +
                "model.layers.33.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
         | 
| 343 | 
            +
                "model.layers.33.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
         | 
| 344 | 
            +
                "model.layers.34.input_layernorm.weight": "model-00008-of-00014.safetensors",
         | 
| 345 | 
            +
                "model.layers.34.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 346 | 
            +
                "model.layers.34.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 347 | 
            +
                "model.layers.34.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 348 | 
            +
                "model.layers.34.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
         | 
| 349 | 
            +
                "model.layers.34.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
         | 
| 350 | 
            +
                "model.layers.34.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 351 | 
            +
                "model.layers.34.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 352 | 
            +
                "model.layers.34.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
         | 
| 353 | 
            +
                "model.layers.34.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 354 | 
            +
                "model.layers.34.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
         | 
| 355 | 
            +
                "model.layers.34.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 356 | 
            +
                "model.layers.35.input_layernorm.weight": "model-00008-of-00014.safetensors",
         | 
| 357 | 
            +
                "model.layers.35.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 358 | 
            +
                "model.layers.35.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 359 | 
            +
                "model.layers.35.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 360 | 
            +
                "model.layers.35.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
         | 
| 361 | 
            +
                "model.layers.35.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
         | 
| 362 | 
            +
                "model.layers.35.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 363 | 
            +
                "model.layers.35.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 364 | 
            +
                "model.layers.35.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
         | 
| 365 | 
            +
                "model.layers.35.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 366 | 
            +
                "model.layers.35.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
         | 
| 367 | 
            +
                "model.layers.35.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 368 | 
            +
                "model.layers.36.input_layernorm.weight": "model-00008-of-00014.safetensors",
         | 
| 369 | 
            +
                "model.layers.36.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 370 | 
            +
                "model.layers.36.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 371 | 
            +
                "model.layers.36.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 372 | 
            +
                "model.layers.36.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
         | 
| 373 | 
            +
                "model.layers.36.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
         | 
| 374 | 
            +
                "model.layers.36.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 375 | 
            +
                "model.layers.36.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 376 | 
            +
                "model.layers.36.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
         | 
| 377 | 
            +
                "model.layers.36.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 378 | 
            +
                "model.layers.36.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
         | 
| 379 | 
            +
                "model.layers.36.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 380 | 
            +
                "model.layers.37.input_layernorm.weight": "model-00008-of-00014.safetensors",
         | 
| 381 | 
            +
                "model.layers.37.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 382 | 
            +
                "model.layers.37.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 383 | 
            +
                "model.layers.37.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 384 | 
            +
                "model.layers.37.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
         | 
| 385 | 
            +
                "model.layers.37.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
         | 
| 386 | 
            +
                "model.layers.37.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 387 | 
            +
                "model.layers.37.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 388 | 
            +
                "model.layers.37.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
         | 
| 389 | 
            +
                "model.layers.37.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 390 | 
            +
                "model.layers.37.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
         | 
| 391 | 
            +
                "model.layers.37.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 392 | 
            +
                "model.layers.38.input_layernorm.weight": "model-00009-of-00014.safetensors",
         | 
| 393 | 
            +
                "model.layers.38.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 394 | 
            +
                "model.layers.38.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 395 | 
            +
                "model.layers.38.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 396 | 
            +
                "model.layers.38.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
         | 
| 397 | 
            +
                "model.layers.38.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
         | 
| 398 | 
            +
                "model.layers.38.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 399 | 
            +
                "model.layers.38.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 400 | 
            +
                "model.layers.38.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
         | 
| 401 | 
            +
                "model.layers.38.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 402 | 
            +
                "model.layers.38.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
         | 
| 403 | 
            +
                "model.layers.38.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
         | 
| 404 | 
            +
                "model.layers.39.input_layernorm.weight": "model-00009-of-00014.safetensors",
         | 
| 405 | 
            +
                "model.layers.39.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 406 | 
            +
                "model.layers.39.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 407 | 
            +
                "model.layers.39.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 408 | 
            +
                "model.layers.39.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
         | 
| 409 | 
            +
                "model.layers.39.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
         | 
| 410 | 
            +
                "model.layers.39.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 411 | 
            +
                "model.layers.39.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 412 | 
            +
                "model.layers.39.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
         | 
| 413 | 
            +
                "model.layers.39.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 414 | 
            +
                "model.layers.39.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
         | 
| 415 | 
            +
                "model.layers.39.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 416 | 
            +
                "model.layers.4.input_layernorm.weight": "model-00002-of-00014.safetensors",
         | 
| 417 | 
            +
                "model.layers.4.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 418 | 
            +
                "model.layers.4.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 419 | 
            +
                "model.layers.4.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 420 | 
            +
                "model.layers.4.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
         | 
| 421 | 
            +
                "model.layers.4.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
         | 
| 422 | 
            +
                "model.layers.4.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 423 | 
            +
                "model.layers.4.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 424 | 
            +
                "model.layers.4.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
         | 
| 425 | 
            +
                "model.layers.4.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 426 | 
            +
                "model.layers.4.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
         | 
| 427 | 
            +
                "model.layers.4.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 428 | 
            +
                "model.layers.40.input_layernorm.weight": "model-00009-of-00014.safetensors",
         | 
| 429 | 
            +
                "model.layers.40.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 430 | 
            +
                "model.layers.40.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 431 | 
            +
                "model.layers.40.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 432 | 
            +
                "model.layers.40.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
         | 
| 433 | 
            +
                "model.layers.40.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
         | 
| 434 | 
            +
                "model.layers.40.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 435 | 
            +
                "model.layers.40.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 436 | 
            +
                "model.layers.40.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
         | 
| 437 | 
            +
                "model.layers.40.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 438 | 
            +
                "model.layers.40.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
         | 
| 439 | 
            +
                "model.layers.40.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 440 | 
            +
                "model.layers.41.input_layernorm.weight": "model-00009-of-00014.safetensors",
         | 
| 441 | 
            +
                "model.layers.41.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 442 | 
            +
                "model.layers.41.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 443 | 
            +
                "model.layers.41.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 444 | 
            +
                "model.layers.41.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
         | 
| 445 | 
            +
                "model.layers.41.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
         | 
| 446 | 
            +
                "model.layers.41.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 447 | 
            +
                "model.layers.41.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 448 | 
            +
                "model.layers.41.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
         | 
| 449 | 
            +
                "model.layers.41.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 450 | 
            +
                "model.layers.41.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
         | 
| 451 | 
            +
                "model.layers.41.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 452 | 
            +
                "model.layers.42.input_layernorm.weight": "model-00009-of-00014.safetensors",
         | 
| 453 | 
            +
                "model.layers.42.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 454 | 
            +
                "model.layers.42.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 455 | 
            +
                "model.layers.42.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 456 | 
            +
                "model.layers.42.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
         | 
| 457 | 
            +
                "model.layers.42.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
         | 
| 458 | 
            +
                "model.layers.42.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 459 | 
            +
                "model.layers.42.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 460 | 
            +
                "model.layers.42.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
         | 
| 461 | 
            +
                "model.layers.42.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 462 | 
            +
                "model.layers.42.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
         | 
| 463 | 
            +
                "model.layers.42.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 464 | 
            +
                "model.layers.43.input_layernorm.weight": "model-00010-of-00014.safetensors",
         | 
| 465 | 
            +
                "model.layers.43.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 466 | 
            +
                "model.layers.43.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 467 | 
            +
                "model.layers.43.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 468 | 
            +
                "model.layers.43.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
         | 
| 469 | 
            +
                "model.layers.43.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
         | 
| 470 | 
            +
                "model.layers.43.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 471 | 
            +
                "model.layers.43.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 472 | 
            +
                "model.layers.43.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
         | 
| 473 | 
            +
                "model.layers.43.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 474 | 
            +
                "model.layers.43.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
         | 
| 475 | 
            +
                "model.layers.43.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
         | 
| 476 | 
            +
                "model.layers.44.input_layernorm.weight": "model-00010-of-00014.safetensors",
         | 
| 477 | 
            +
                "model.layers.44.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 478 | 
            +
                "model.layers.44.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 479 | 
            +
                "model.layers.44.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 480 | 
            +
                "model.layers.44.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
         | 
| 481 | 
            +
                "model.layers.44.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
         | 
| 482 | 
            +
                "model.layers.44.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 483 | 
            +
                "model.layers.44.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 484 | 
            +
                "model.layers.44.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
         | 
| 485 | 
            +
                "model.layers.44.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 486 | 
            +
                "model.layers.44.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
         | 
| 487 | 
            +
                "model.layers.44.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 488 | 
            +
                "model.layers.45.input_layernorm.weight": "model-00010-of-00014.safetensors",
         | 
| 489 | 
            +
                "model.layers.45.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 490 | 
            +
                "model.layers.45.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 491 | 
            +
                "model.layers.45.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 492 | 
            +
                "model.layers.45.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
         | 
| 493 | 
            +
                "model.layers.45.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
         | 
| 494 | 
            +
                "model.layers.45.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 495 | 
            +
                "model.layers.45.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 496 | 
            +
                "model.layers.45.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
         | 
| 497 | 
            +
                "model.layers.45.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 498 | 
            +
                "model.layers.45.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
         | 
| 499 | 
            +
                "model.layers.45.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 500 | 
            +
                "model.layers.46.input_layernorm.weight": "model-00010-of-00014.safetensors",
         | 
| 501 | 
            +
                "model.layers.46.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 502 | 
            +
                "model.layers.46.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 503 | 
            +
                "model.layers.46.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 504 | 
            +
                "model.layers.46.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
         | 
| 505 | 
            +
                "model.layers.46.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
         | 
| 506 | 
            +
                "model.layers.46.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 507 | 
            +
                "model.layers.46.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 508 | 
            +
                "model.layers.46.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
         | 
| 509 | 
            +
                "model.layers.46.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 510 | 
            +
                "model.layers.46.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
         | 
| 511 | 
            +
                "model.layers.46.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 512 | 
            +
                "model.layers.47.input_layernorm.weight": "model-00010-of-00014.safetensors",
         | 
| 513 | 
            +
                "model.layers.47.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 514 | 
            +
                "model.layers.47.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 515 | 
            +
                "model.layers.47.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 516 | 
            +
                "model.layers.47.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
         | 
| 517 | 
            +
                "model.layers.47.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
         | 
| 518 | 
            +
                "model.layers.47.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 519 | 
            +
                "model.layers.47.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 520 | 
            +
                "model.layers.47.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
         | 
| 521 | 
            +
                "model.layers.47.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 522 | 
            +
                "model.layers.47.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
         | 
| 523 | 
            +
                "model.layers.47.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 524 | 
            +
                "model.layers.48.input_layernorm.weight": "model-00011-of-00014.safetensors",
         | 
| 525 | 
            +
                "model.layers.48.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 526 | 
            +
                "model.layers.48.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 527 | 
            +
                "model.layers.48.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 528 | 
            +
                "model.layers.48.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
         | 
| 529 | 
            +
                "model.layers.48.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
         | 
| 530 | 
            +
                "model.layers.48.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 531 | 
            +
                "model.layers.48.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 532 | 
            +
                "model.layers.48.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
         | 
| 533 | 
            +
                "model.layers.48.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 534 | 
            +
                "model.layers.48.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
         | 
| 535 | 
            +
                "model.layers.48.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
         | 
| 536 | 
            +
                "model.layers.49.input_layernorm.weight": "model-00011-of-00014.safetensors",
         | 
| 537 | 
            +
                "model.layers.49.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 538 | 
            +
                "model.layers.49.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 539 | 
            +
                "model.layers.49.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 540 | 
            +
                "model.layers.49.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
         | 
| 541 | 
            +
                "model.layers.49.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
         | 
| 542 | 
            +
                "model.layers.49.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 543 | 
            +
                "model.layers.49.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 544 | 
            +
                "model.layers.49.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
         | 
| 545 | 
            +
                "model.layers.49.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 546 | 
            +
                "model.layers.49.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
         | 
| 547 | 
            +
                "model.layers.49.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 548 | 
            +
                "model.layers.5.input_layernorm.weight": "model-00002-of-00014.safetensors",
         | 
| 549 | 
            +
                "model.layers.5.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 550 | 
            +
                "model.layers.5.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 551 | 
            +
                "model.layers.5.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 552 | 
            +
                "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
         | 
| 553 | 
            +
                "model.layers.5.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
         | 
| 554 | 
            +
                "model.layers.5.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 555 | 
            +
                "model.layers.5.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 556 | 
            +
                "model.layers.5.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
         | 
| 557 | 
            +
                "model.layers.5.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 558 | 
            +
                "model.layers.5.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
         | 
| 559 | 
            +
                "model.layers.5.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 560 | 
            +
                "model.layers.50.input_layernorm.weight": "model-00011-of-00014.safetensors",
         | 
| 561 | 
            +
                "model.layers.50.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 562 | 
            +
                "model.layers.50.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 563 | 
            +
                "model.layers.50.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 564 | 
            +
                "model.layers.50.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
         | 
| 565 | 
            +
                "model.layers.50.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
         | 
| 566 | 
            +
                "model.layers.50.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 567 | 
            +
                "model.layers.50.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 568 | 
            +
                "model.layers.50.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
         | 
| 569 | 
            +
                "model.layers.50.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 570 | 
            +
                "model.layers.50.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
         | 
| 571 | 
            +
                "model.layers.50.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 572 | 
            +
                "model.layers.51.input_layernorm.weight": "model-00011-of-00014.safetensors",
         | 
| 573 | 
            +
                "model.layers.51.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 574 | 
            +
                "model.layers.51.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 575 | 
            +
                "model.layers.51.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 576 | 
            +
                "model.layers.51.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
         | 
| 577 | 
            +
                "model.layers.51.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
         | 
| 578 | 
            +
                "model.layers.51.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 579 | 
            +
                "model.layers.51.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 580 | 
            +
                "model.layers.51.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
         | 
| 581 | 
            +
                "model.layers.51.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 582 | 
            +
                "model.layers.51.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
         | 
| 583 | 
            +
                "model.layers.51.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 584 | 
            +
                "model.layers.52.input_layernorm.weight": "model-00011-of-00014.safetensors",
         | 
| 585 | 
            +
                "model.layers.52.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 586 | 
            +
                "model.layers.52.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 587 | 
            +
                "model.layers.52.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 588 | 
            +
                "model.layers.52.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
         | 
| 589 | 
            +
                "model.layers.52.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
         | 
| 590 | 
            +
                "model.layers.52.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 591 | 
            +
                "model.layers.52.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 592 | 
            +
                "model.layers.52.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
         | 
| 593 | 
            +
                "model.layers.52.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 594 | 
            +
                "model.layers.52.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
         | 
| 595 | 
            +
                "model.layers.52.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 596 | 
            +
                "model.layers.53.input_layernorm.weight": "model-00012-of-00014.safetensors",
         | 
| 597 | 
            +
                "model.layers.53.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 598 | 
            +
                "model.layers.53.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 599 | 
            +
                "model.layers.53.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 600 | 
            +
                "model.layers.53.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
         | 
| 601 | 
            +
                "model.layers.53.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
         | 
| 602 | 
            +
                "model.layers.53.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 603 | 
            +
                "model.layers.53.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 604 | 
            +
                "model.layers.53.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
         | 
| 605 | 
            +
                "model.layers.53.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 606 | 
            +
                "model.layers.53.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
         | 
| 607 | 
            +
                "model.layers.53.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
         | 
| 608 | 
            +
                "model.layers.54.input_layernorm.weight": "model-00012-of-00014.safetensors",
         | 
| 609 | 
            +
                "model.layers.54.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 610 | 
            +
                "model.layers.54.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 611 | 
            +
                "model.layers.54.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 612 | 
            +
                "model.layers.54.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
         | 
| 613 | 
            +
                "model.layers.54.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
         | 
| 614 | 
            +
                "model.layers.54.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 615 | 
            +
                "model.layers.54.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 616 | 
            +
                "model.layers.54.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
         | 
| 617 | 
            +
                "model.layers.54.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 618 | 
            +
                "model.layers.54.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
         | 
| 619 | 
            +
                "model.layers.54.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 620 | 
            +
                "model.layers.55.input_layernorm.weight": "model-00012-of-00014.safetensors",
         | 
| 621 | 
            +
                "model.layers.55.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 622 | 
            +
                "model.layers.55.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 623 | 
            +
                "model.layers.55.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 624 | 
            +
                "model.layers.55.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
         | 
| 625 | 
            +
                "model.layers.55.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
         | 
| 626 | 
            +
                "model.layers.55.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 627 | 
            +
                "model.layers.55.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 628 | 
            +
                "model.layers.55.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
         | 
| 629 | 
            +
                "model.layers.55.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 630 | 
            +
                "model.layers.55.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
         | 
| 631 | 
            +
                "model.layers.55.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 632 | 
            +
                "model.layers.56.input_layernorm.weight": "model-00012-of-00014.safetensors",
         | 
| 633 | 
            +
                "model.layers.56.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 634 | 
            +
                "model.layers.56.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 635 | 
            +
                "model.layers.56.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 636 | 
            +
                "model.layers.56.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
         | 
| 637 | 
            +
                "model.layers.56.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
         | 
| 638 | 
            +
                "model.layers.56.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 639 | 
            +
                "model.layers.56.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 640 | 
            +
                "model.layers.56.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
         | 
| 641 | 
            +
                "model.layers.56.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 642 | 
            +
                "model.layers.56.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
         | 
| 643 | 
            +
                "model.layers.56.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 644 | 
            +
                "model.layers.57.input_layernorm.weight": "model-00012-of-00014.safetensors",
         | 
| 645 | 
            +
                "model.layers.57.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 646 | 
            +
                "model.layers.57.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 647 | 
            +
                "model.layers.57.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 648 | 
            +
                "model.layers.57.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
         | 
| 649 | 
            +
                "model.layers.57.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
         | 
| 650 | 
            +
                "model.layers.57.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 651 | 
            +
                "model.layers.57.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 652 | 
            +
                "model.layers.57.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
         | 
| 653 | 
            +
                "model.layers.57.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 654 | 
            +
                "model.layers.57.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
         | 
| 655 | 
            +
                "model.layers.57.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 656 | 
            +
                "model.layers.58.input_layernorm.weight": "model-00013-of-00014.safetensors",
         | 
| 657 | 
            +
                "model.layers.58.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 658 | 
            +
                "model.layers.58.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 659 | 
            +
                "model.layers.58.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 660 | 
            +
                "model.layers.58.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
         | 
| 661 | 
            +
                "model.layers.58.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
         | 
| 662 | 
            +
                "model.layers.58.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 663 | 
            +
                "model.layers.58.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 664 | 
            +
                "model.layers.58.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
         | 
| 665 | 
            +
                "model.layers.58.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 666 | 
            +
                "model.layers.58.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
         | 
| 667 | 
            +
                "model.layers.58.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
         | 
| 668 | 
            +
                "model.layers.59.input_layernorm.weight": "model-00013-of-00014.safetensors",
         | 
| 669 | 
            +
                "model.layers.59.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 670 | 
            +
                "model.layers.59.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 671 | 
            +
                "model.layers.59.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 672 | 
            +
                "model.layers.59.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
         | 
| 673 | 
            +
                "model.layers.59.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
         | 
| 674 | 
            +
                "model.layers.59.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 675 | 
            +
                "model.layers.59.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 676 | 
            +
                "model.layers.59.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
         | 
| 677 | 
            +
                "model.layers.59.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 678 | 
            +
                "model.layers.59.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
         | 
| 679 | 
            +
                "model.layers.59.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 680 | 
            +
                "model.layers.6.input_layernorm.weight": "model-00002-of-00014.safetensors",
         | 
| 681 | 
            +
                "model.layers.6.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 682 | 
            +
                "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 683 | 
            +
                "model.layers.6.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 684 | 
            +
                "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
         | 
| 685 | 
            +
                "model.layers.6.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
         | 
| 686 | 
            +
                "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 687 | 
            +
                "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 688 | 
            +
                "model.layers.6.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
         | 
| 689 | 
            +
                "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 690 | 
            +
                "model.layers.6.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
         | 
| 691 | 
            +
                "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 692 | 
            +
                "model.layers.60.input_layernorm.weight": "model-00013-of-00014.safetensors",
         | 
| 693 | 
            +
                "model.layers.60.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 694 | 
            +
                "model.layers.60.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 695 | 
            +
                "model.layers.60.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 696 | 
            +
                "model.layers.60.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
         | 
| 697 | 
            +
                "model.layers.60.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
         | 
| 698 | 
            +
                "model.layers.60.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 699 | 
            +
                "model.layers.60.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 700 | 
            +
                "model.layers.60.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
         | 
| 701 | 
            +
                "model.layers.60.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 702 | 
            +
                "model.layers.60.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
         | 
| 703 | 
            +
                "model.layers.60.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 704 | 
            +
                "model.layers.61.input_layernorm.weight": "model-00013-of-00014.safetensors",
         | 
| 705 | 
            +
                "model.layers.61.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 706 | 
            +
                "model.layers.61.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 707 | 
            +
                "model.layers.61.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 708 | 
            +
                "model.layers.61.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
         | 
| 709 | 
            +
                "model.layers.61.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
         | 
| 710 | 
            +
                "model.layers.61.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 711 | 
            +
                "model.layers.61.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 712 | 
            +
                "model.layers.61.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
         | 
| 713 | 
            +
                "model.layers.61.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 714 | 
            +
                "model.layers.61.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
         | 
| 715 | 
            +
                "model.layers.61.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 716 | 
            +
                "model.layers.62.input_layernorm.weight": "model-00013-of-00014.safetensors",
         | 
| 717 | 
            +
                "model.layers.62.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 718 | 
            +
                "model.layers.62.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 719 | 
            +
                "model.layers.62.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 720 | 
            +
                "model.layers.62.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
         | 
| 721 | 
            +
                "model.layers.62.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
         | 
| 722 | 
            +
                "model.layers.62.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 723 | 
            +
                "model.layers.62.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 724 | 
            +
                "model.layers.62.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
         | 
| 725 | 
            +
                "model.layers.62.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 726 | 
            +
                "model.layers.62.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
         | 
| 727 | 
            +
                "model.layers.62.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 728 | 
            +
                "model.layers.63.input_layernorm.weight": "model-00014-of-00014.safetensors",
         | 
| 729 | 
            +
                "model.layers.63.mlp.down_proj.weight": "model-00014-of-00014.safetensors",
         | 
| 730 | 
            +
                "model.layers.63.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 731 | 
            +
                "model.layers.63.mlp.up_proj.weight": "model-00014-of-00014.safetensors",
         | 
| 732 | 
            +
                "model.layers.63.post_attention_layernorm.weight": "model-00014-of-00014.safetensors",
         | 
| 733 | 
            +
                "model.layers.63.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
         | 
| 734 | 
            +
                "model.layers.63.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 735 | 
            +
                "model.layers.63.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 736 | 
            +
                "model.layers.63.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
         | 
| 737 | 
            +
                "model.layers.63.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 738 | 
            +
                "model.layers.63.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
         | 
| 739 | 
            +
                "model.layers.63.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
         | 
| 740 | 
            +
                "model.layers.7.input_layernorm.weight": "model-00002-of-00014.safetensors",
         | 
| 741 | 
            +
                "model.layers.7.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 742 | 
            +
                "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 743 | 
            +
                "model.layers.7.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 744 | 
            +
                "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
         | 
| 745 | 
            +
                "model.layers.7.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
         | 
| 746 | 
            +
                "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 747 | 
            +
                "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 748 | 
            +
                "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
         | 
| 749 | 
            +
                "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 750 | 
            +
                "model.layers.7.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
         | 
| 751 | 
            +
                "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 752 | 
            +
                "model.layers.8.input_layernorm.weight": "model-00003-of-00014.safetensors",
         | 
| 753 | 
            +
                "model.layers.8.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 754 | 
            +
                "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 755 | 
            +
                "model.layers.8.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 756 | 
            +
                "model.layers.8.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
         | 
| 757 | 
            +
                "model.layers.8.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
         | 
| 758 | 
            +
                "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 759 | 
            +
                "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 760 | 
            +
                "model.layers.8.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
         | 
| 761 | 
            +
                "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 762 | 
            +
                "model.layers.8.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
         | 
| 763 | 
            +
                "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
         | 
| 764 | 
            +
                "model.layers.9.input_layernorm.weight": "model-00003-of-00014.safetensors",
         | 
| 765 | 
            +
                "model.layers.9.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 766 | 
            +
                "model.layers.9.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 767 | 
            +
                "model.layers.9.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 768 | 
            +
                "model.layers.9.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
         | 
| 769 | 
            +
                "model.layers.9.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
         | 
| 770 | 
            +
                "model.layers.9.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 771 | 
            +
                "model.layers.9.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 772 | 
            +
                "model.layers.9.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
         | 
| 773 | 
            +
                "model.layers.9.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 774 | 
            +
                "model.layers.9.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
         | 
| 775 | 
            +
                "model.layers.9.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
         | 
| 776 | 
            +
                "model.norm.weight": "model-00014-of-00014.safetensors"
         | 
| 777 | 
            +
              }
         | 
| 778 | 
            +
            }
         | 
    	
        tokenizer.json
    ADDED
    
    | The diff for this file is too large to render. 
		See raw diff | 
|  | 
    	
        tokenizer_config.json
    ADDED
    
    | @@ -0,0 +1,207 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "add_bos_token": false,
         | 
| 3 | 
            +
              "add_prefix_space": false,
         | 
| 4 | 
            +
              "added_tokens_decoder": {
         | 
| 5 | 
            +
                "151643": {
         | 
| 6 | 
            +
                  "content": "<|endoftext|>",
         | 
| 7 | 
            +
                  "lstrip": false,
         | 
| 8 | 
            +
                  "normalized": false,
         | 
| 9 | 
            +
                  "rstrip": false,
         | 
| 10 | 
            +
                  "single_word": false,
         | 
| 11 | 
            +
                  "special": true
         | 
| 12 | 
            +
                },
         | 
| 13 | 
            +
                "151644": {
         | 
| 14 | 
            +
                  "content": "<|im_start|>",
         | 
| 15 | 
            +
                  "lstrip": false,
         | 
| 16 | 
            +
                  "normalized": false,
         | 
| 17 | 
            +
                  "rstrip": false,
         | 
| 18 | 
            +
                  "single_word": false,
         | 
| 19 | 
            +
                  "special": true
         | 
| 20 | 
            +
                },
         | 
| 21 | 
            +
                "151645": {
         | 
| 22 | 
            +
                  "content": "<|im_end|>",
         | 
| 23 | 
            +
                  "lstrip": false,
         | 
| 24 | 
            +
                  "normalized": false,
         | 
| 25 | 
            +
                  "rstrip": false,
         | 
| 26 | 
            +
                  "single_word": false,
         | 
| 27 | 
            +
                  "special": true
         | 
| 28 | 
            +
                },
         | 
| 29 | 
            +
                "151646": {
         | 
| 30 | 
            +
                  "content": "<|object_ref_start|>",
         | 
| 31 | 
            +
                  "lstrip": false,
         | 
| 32 | 
            +
                  "normalized": false,
         | 
| 33 | 
            +
                  "rstrip": false,
         | 
| 34 | 
            +
                  "single_word": false,
         | 
| 35 | 
            +
                  "special": true
         | 
| 36 | 
            +
                },
         | 
| 37 | 
            +
                "151647": {
         | 
| 38 | 
            +
                  "content": "<|object_ref_end|>",
         | 
| 39 | 
            +
                  "lstrip": false,
         | 
| 40 | 
            +
                  "normalized": false,
         | 
| 41 | 
            +
                  "rstrip": false,
         | 
| 42 | 
            +
                  "single_word": false,
         | 
| 43 | 
            +
                  "special": true
         | 
| 44 | 
            +
                },
         | 
| 45 | 
            +
                "151648": {
         | 
| 46 | 
            +
                  "content": "<|box_start|>",
         | 
| 47 | 
            +
                  "lstrip": false,
         | 
| 48 | 
            +
                  "normalized": false,
         | 
| 49 | 
            +
                  "rstrip": false,
         | 
| 50 | 
            +
                  "single_word": false,
         | 
| 51 | 
            +
                  "special": true
         | 
| 52 | 
            +
                },
         | 
| 53 | 
            +
                "151649": {
         | 
| 54 | 
            +
                  "content": "<|box_end|>",
         | 
| 55 | 
            +
                  "lstrip": false,
         | 
| 56 | 
            +
                  "normalized": false,
         | 
| 57 | 
            +
                  "rstrip": false,
         | 
| 58 | 
            +
                  "single_word": false,
         | 
| 59 | 
            +
                  "special": true
         | 
| 60 | 
            +
                },
         | 
| 61 | 
            +
                "151650": {
         | 
| 62 | 
            +
                  "content": "<|quad_start|>",
         | 
| 63 | 
            +
                  "lstrip": false,
         | 
| 64 | 
            +
                  "normalized": false,
         | 
| 65 | 
            +
                  "rstrip": false,
         | 
| 66 | 
            +
                  "single_word": false,
         | 
| 67 | 
            +
                  "special": true
         | 
| 68 | 
            +
                },
         | 
| 69 | 
            +
                "151651": {
         | 
| 70 | 
            +
                  "content": "<|quad_end|>",
         | 
| 71 | 
            +
                  "lstrip": false,
         | 
| 72 | 
            +
                  "normalized": false,
         | 
| 73 | 
            +
                  "rstrip": false,
         | 
| 74 | 
            +
                  "single_word": false,
         | 
| 75 | 
            +
                  "special": true
         | 
| 76 | 
            +
                },
         | 
| 77 | 
            +
                "151652": {
         | 
| 78 | 
            +
                  "content": "<|vision_start|>",
         | 
| 79 | 
            +
                  "lstrip": false,
         | 
| 80 | 
            +
                  "normalized": false,
         | 
| 81 | 
            +
                  "rstrip": false,
         | 
| 82 | 
            +
                  "single_word": false,
         | 
| 83 | 
            +
                  "special": true
         | 
| 84 | 
            +
                },
         | 
| 85 | 
            +
                "151653": {
         | 
| 86 | 
            +
                  "content": "<|vision_end|>",
         | 
| 87 | 
            +
                  "lstrip": false,
         | 
| 88 | 
            +
                  "normalized": false,
         | 
| 89 | 
            +
                  "rstrip": false,
         | 
| 90 | 
            +
                  "single_word": false,
         | 
| 91 | 
            +
                  "special": true
         | 
| 92 | 
            +
                },
         | 
| 93 | 
            +
                "151654": {
         | 
| 94 | 
            +
                  "content": "<|vision_pad|>",
         | 
| 95 | 
            +
                  "lstrip": false,
         | 
| 96 | 
            +
                  "normalized": false,
         | 
| 97 | 
            +
                  "rstrip": false,
         | 
| 98 | 
            +
                  "single_word": false,
         | 
| 99 | 
            +
                  "special": true
         | 
| 100 | 
            +
                },
         | 
| 101 | 
            +
                "151655": {
         | 
| 102 | 
            +
                  "content": "<|image_pad|>",
         | 
| 103 | 
            +
                  "lstrip": false,
         | 
| 104 | 
            +
                  "normalized": false,
         | 
| 105 | 
            +
                  "rstrip": false,
         | 
| 106 | 
            +
                  "single_word": false,
         | 
| 107 | 
            +
                  "special": true
         | 
| 108 | 
            +
                },
         | 
| 109 | 
            +
                "151656": {
         | 
| 110 | 
            +
                  "content": "<|video_pad|>",
         | 
| 111 | 
            +
                  "lstrip": false,
         | 
| 112 | 
            +
                  "normalized": false,
         | 
| 113 | 
            +
                  "rstrip": false,
         | 
| 114 | 
            +
                  "single_word": false,
         | 
| 115 | 
            +
                  "special": true
         | 
| 116 | 
            +
                },
         | 
| 117 | 
            +
                "151657": {
         | 
| 118 | 
            +
                  "content": "<tool_call>",
         | 
| 119 | 
            +
                  "lstrip": false,
         | 
| 120 | 
            +
                  "normalized": false,
         | 
| 121 | 
            +
                  "rstrip": false,
         | 
| 122 | 
            +
                  "single_word": false,
         | 
| 123 | 
            +
                  "special": false
         | 
| 124 | 
            +
                },
         | 
| 125 | 
            +
                "151658": {
         | 
| 126 | 
            +
                  "content": "</tool_call>",
         | 
| 127 | 
            +
                  "lstrip": false,
         | 
| 128 | 
            +
                  "normalized": false,
         | 
| 129 | 
            +
                  "rstrip": false,
         | 
| 130 | 
            +
                  "single_word": false,
         | 
| 131 | 
            +
                  "special": false
         | 
| 132 | 
            +
                },
         | 
| 133 | 
            +
                "151659": {
         | 
| 134 | 
            +
                  "content": "<|fim_prefix|>",
         | 
| 135 | 
            +
                  "lstrip": false,
         | 
| 136 | 
            +
                  "normalized": false,
         | 
| 137 | 
            +
                  "rstrip": false,
         | 
| 138 | 
            +
                  "single_word": false,
         | 
| 139 | 
            +
                  "special": false
         | 
| 140 | 
            +
                },
         | 
| 141 | 
            +
                "151660": {
         | 
| 142 | 
            +
                  "content": "<|fim_middle|>",
         | 
| 143 | 
            +
                  "lstrip": false,
         | 
| 144 | 
            +
                  "normalized": false,
         | 
| 145 | 
            +
                  "rstrip": false,
         | 
| 146 | 
            +
                  "single_word": false,
         | 
| 147 | 
            +
                  "special": false
         | 
| 148 | 
            +
                },
         | 
| 149 | 
            +
                "151661": {
         | 
| 150 | 
            +
                  "content": "<|fim_suffix|>",
         | 
| 151 | 
            +
                  "lstrip": false,
         | 
| 152 | 
            +
                  "normalized": false,
         | 
| 153 | 
            +
                  "rstrip": false,
         | 
| 154 | 
            +
                  "single_word": false,
         | 
| 155 | 
            +
                  "special": false
         | 
| 156 | 
            +
                },
         | 
| 157 | 
            +
                "151662": {
         | 
| 158 | 
            +
                  "content": "<|fim_pad|>",
         | 
| 159 | 
            +
                  "lstrip": false,
         | 
| 160 | 
            +
                  "normalized": false,
         | 
| 161 | 
            +
                  "rstrip": false,
         | 
| 162 | 
            +
                  "single_word": false,
         | 
| 163 | 
            +
                  "special": false
         | 
| 164 | 
            +
                },
         | 
| 165 | 
            +
                "151663": {
         | 
| 166 | 
            +
                  "content": "<|repo_name|>",
         | 
| 167 | 
            +
                  "lstrip": false,
         | 
| 168 | 
            +
                  "normalized": false,
         | 
| 169 | 
            +
                  "rstrip": false,
         | 
| 170 | 
            +
                  "single_word": false,
         | 
| 171 | 
            +
                  "special": false
         | 
| 172 | 
            +
                },
         | 
| 173 | 
            +
                "151664": {
         | 
| 174 | 
            +
                  "content": "<|file_sep|>",
         | 
| 175 | 
            +
                  "lstrip": false,
         | 
| 176 | 
            +
                  "normalized": false,
         | 
| 177 | 
            +
                  "rstrip": false,
         | 
| 178 | 
            +
                  "single_word": false,
         | 
| 179 | 
            +
                  "special": false
         | 
| 180 | 
            +
                }
         | 
| 181 | 
            +
              },
         | 
| 182 | 
            +
              "additional_special_tokens": [
         | 
| 183 | 
            +
                "<|im_start|>",
         | 
| 184 | 
            +
                "<|im_end|>",
         | 
| 185 | 
            +
                "<|object_ref_start|>",
         | 
| 186 | 
            +
                "<|object_ref_end|>",
         | 
| 187 | 
            +
                "<|box_start|>",
         | 
| 188 | 
            +
                "<|box_end|>",
         | 
| 189 | 
            +
                "<|quad_start|>",
         | 
| 190 | 
            +
                "<|quad_end|>",
         | 
| 191 | 
            +
                "<|vision_start|>",
         | 
| 192 | 
            +
                "<|vision_end|>",
         | 
| 193 | 
            +
                "<|vision_pad|>",
         | 
| 194 | 
            +
                "<|image_pad|>",
         | 
| 195 | 
            +
                "<|video_pad|>"
         | 
| 196 | 
            +
              ],
         | 
| 197 | 
            +
              "bos_token": null,
         | 
| 198 | 
            +
              "chat_template": "{%- if tools %}\n    {{- '<|im_start|>system\\n' }}\n    {%- if messages[0]['role'] == 'system' %}\n        {{- messages[0]['content'] }}\n    {%- else %}\n        {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n    {%- endif %}\n    {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n    {%- for tool in tools %}\n        {{- \"\\n\" }}\n        {{- tool | tojson }}\n    {%- endfor %}\n    {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n    {%- if messages[0]['role'] == 'system' %}\n        {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n    {%- else %}\n        {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n    {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n    {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n        {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n    {%- elif message.role == \"assistant\" %}\n        {{- '<|im_start|>' + message.role }}\n        {%- if message.content %}\n            {{- '\\n' + message.content }}\n        {%- endif %}\n        {%- for tool_call in message.tool_calls %}\n            {%- if tool_call.function is defined %}\n                {%- set tool_call = tool_call.function %}\n            {%- endif %}\n            {{- '\\n<tool_call>\\n{\"name\": \"' }}\n            {{- tool_call.name }}\n            {{- '\", \"arguments\": ' }}\n            {{- tool_call.arguments | tojson }}\n            {{- '}\\n</tool_call>' }}\n        {%- endfor %}\n        {{- '<|im_end|>\\n' }}\n    {%- elif message.role == \"tool\" %}\n        {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n            {{- '<|im_start|>user' }}\n        {%- endif %}\n        {{- '\\n<tool_response>\\n' }}\n        {{- message.content }}\n        {{- '\\n</tool_response>' }}\n        {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n            {{- '<|im_end|>\\n' }}\n        {%- endif %}\n    {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n    {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
         | 
| 199 | 
            +
              "clean_up_tokenization_spaces": false,
         | 
| 200 | 
            +
              "eos_token": "<|im_end|>",
         | 
| 201 | 
            +
              "errors": "replace",
         | 
| 202 | 
            +
              "model_max_length": 131072,
         | 
| 203 | 
            +
              "pad_token": "<|endoftext|>",
         | 
| 204 | 
            +
              "split_special_tokens": false,
         | 
| 205 | 
            +
              "tokenizer_class": "Qwen2Tokenizer",
         | 
| 206 | 
            +
              "unk_token": null
         | 
| 207 | 
            +
            }
         | 
    	
        vocab.json
    ADDED
    
    | The diff for this file is too large to render. 
		See raw diff | 
|  |