ek826 commited on
Commit
c3aafe6
1 Parent(s): 73de6c9
Files changed (48) hide show
  1. LICENSE +114 -0
  2. README.md +1168 -3
  3. USE_POLICY.md +51 -0
  4. config.json +49 -0
  5. generation_config.json +12 -0
  6. model.safetensors.index.json +1144 -0
  7. output-00001-of-00038.safetensors +3 -0
  8. output-00002-of-00038.safetensors +3 -0
  9. output-00003-of-00038.safetensors +3 -0
  10. output-00004-of-00038.safetensors +3 -0
  11. output-00005-of-00038.safetensors +3 -0
  12. output-00006-of-00038.safetensors +3 -0
  13. output-00007-of-00038.safetensors +3 -0
  14. output-00008-of-00038.safetensors +3 -0
  15. output-00009-of-00038.safetensors +3 -0
  16. output-00010-of-00038.safetensors +3 -0
  17. output-00011-of-00038.safetensors +3 -0
  18. output-00012-of-00038.safetensors +3 -0
  19. output-00013-of-00038.safetensors +3 -0
  20. output-00014-of-00038.safetensors +3 -0
  21. output-00015-of-00038.safetensors +3 -0
  22. output-00016-of-00038.safetensors +3 -0
  23. output-00017-of-00038.safetensors +3 -0
  24. output-00018-of-00038.safetensors +3 -0
  25. output-00019-of-00038.safetensors +3 -0
  26. output-00020-of-00038.safetensors +3 -0
  27. output-00021-of-00038.safetensors +3 -0
  28. output-00022-of-00038.safetensors +3 -0
  29. output-00023-of-00038.safetensors +3 -0
  30. output-00024-of-00038.safetensors +3 -0
  31. output-00025-of-00038.safetensors +3 -0
  32. output-00026-of-00038.safetensors +3 -0
  33. output-00027-of-00038.safetensors +3 -0
  34. output-00028-of-00038.safetensors +3 -0
  35. output-00029-of-00038.safetensors +3 -0
  36. output-00030-of-00038.safetensors +3 -0
  37. output-00031-of-00038.safetensors +3 -0
  38. output-00032-of-00038.safetensors +3 -0
  39. output-00033-of-00038.safetensors +3 -0
  40. output-00034-of-00038.safetensors +3 -0
  41. output-00035-of-00038.safetensors +3 -0
  42. output-00036-of-00038.safetensors +3 -0
  43. output-00037-of-00038.safetensors +3 -0
  44. output-00038-of-00038.safetensors +3 -0
  45. patch.diff +291 -0
  46. special_tokens_map.json +16 -0
  47. tokenizer.json +0 -0
  48. tokenizer_config.json +2062 -0
LICENSE ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ LLAMA 3.1 COMMUNITY LICENSE AGREEMENT
2
+ Llama 3.1 Version Release Date: July 23, 2024
3
+
4
+ “Agreement” means the terms and conditions for use, reproduction, distribution and modification of the
5
+ Llama Materials set forth herein.
6
+
7
+ “Documentation” means the specifications, manuals and documentation accompanying Llama 3.1
8
+ distributed by Meta at https://llama.meta.com/doc/overview.
9
+
10
+ “Licensee” or “you” means you, or your employer or any other person or entity (if you are entering into
11
+ this Agreement on such person or entity’s behalf), of the age required under applicable laws, rules or
12
+ regulations to provide legal consent and that has legal authority to bind your employer or such other
13
+ person or entity if you are entering in this Agreement on their behalf.
14
+
15
+ “Llama 3.1” means the foundational large language models and software and algorithms, including
16
+ machine-learning model code, trained model weights, inference-enabling code, training-enabling code,
17
+ fine-tuning enabling code and other elements of the foregoing distributed by Meta at
18
+ https://llama.meta.com/llama-downloads.
19
+
20
+ “Llama Materials” means, collectively, Meta’s proprietary Llama 3.1 and Documentation (and any
21
+ portion thereof) made available under this Agreement.
22
+
23
+ “Meta” or “we” means Meta Platforms Ireland Limited (if you are located in or, if you are an entity, your
24
+ principal place of business is in the EEA or Switzerland) and Meta Platforms, Inc. (if you are located
25
+ outside of the EEA or Switzerland).
26
+
27
+ By clicking “I Accept” below or by using or distributing any portion or element of the Llama Materials,
28
+ you agree to be bound by this Agreement.
29
+
30
+ 1. License Rights and Redistribution.
31
+
32
+ a. Grant of Rights. You are granted a non-exclusive, worldwide, non-transferable and royalty-free
33
+ limited license under Meta’s intellectual property or other rights owned by Meta embodied in the Llama
34
+ Materials to use, reproduce, distribute, copy, create derivative works of, and make modifications to the
35
+ Llama Materials.
36
+
37
+ b. Redistribution and Use.
38
+
39
+ i. If you distribute or make available the Llama Materials (or any derivative works
40
+ thereof), or a product or service (including another AI model) that contains any of them, you shall (A)
41
+ provide a copy of this Agreement with any such Llama Materials; and (B) prominently display “Built with
42
+ Llama” on a related website, user interface, blogpost, about page, or product documentation. If you use
43
+ the Llama Materials or any outputs or results of the Llama Materials to create, train, fine tune, or
44
+ otherwise improve an AI model, which is distributed or made available, you shall also include “Llama” at
45
+ the beginning of any such AI model name.
46
+
47
+ ii. If you receive Llama Materials, or any derivative works thereof, from a Licensee as part
48
+ of an integrated end user product, then Section 2 of this Agreement will not apply to you.
49
+
50
+ iii. You must retain in all copies of the Llama Materials that you distribute the following
51
+ attribution notice within a “Notice” text file distributed as a part of such copies: “Llama 3.1 is
52
+ licensed under the Llama 3.1 Community License, Copyright © Meta Platforms, Inc. All Rights
53
+ Reserved.”
54
+
55
+ iv. Your use of the Llama Materials must comply with applicable laws and regulations
56
+ (including trade compliance laws and regulations) and adhere to the Acceptable Use Policy for the Llama
57
+ Materials (available at https://llama.meta.com/llama3_1/use-policy), which is hereby incorporated by
58
+ reference into this Agreement.
59
+
60
+ 2. Additional Commercial Terms. If, on the Llama 3.1 version release date, the monthly active users
61
+ of the products or services made available by or for Licensee, or Licensee’s affiliates, is greater than 700
62
+ million monthly active users in the preceding calendar month, you must request a license from Meta,
63
+ which Meta may grant to you in its sole discretion, and you are not authorized to exercise any of the
64
+ rights under this Agreement unless or until Meta otherwise expressly grants you such rights.
65
+
66
+ 3. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE LAW, THE LLAMA MATERIALS AND ANY
67
+ OUTPUT AND RESULTS THEREFROM ARE PROVIDED ON AN “AS IS” BASIS, WITHOUT WARRANTIES OF
68
+ ANY KIND, AND META DISCLAIMS ALL WARRANTIES OF ANY KIND, BOTH EXPRESS AND IMPLIED,
69
+ INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OF TITLE, NON-INFRINGEMENT,
70
+ MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE FOR
71
+ DETERMINING THE APPROPRIATENESS OF USING OR REDISTRIBUTING THE LLAMA MATERIALS AND
72
+ ASSUME ANY RISKS ASSOCIATED WITH YOUR USE OF THE LLAMA MATERIALS AND ANY OUTPUT AND
73
+ RESULTS.
74
+
75
+ 4. Limitation of Liability. IN NO EVENT WILL META OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY OF
76
+ LIABILITY, WHETHER IN CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR OTHERWISE, ARISING
77
+ OUT OF THIS AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, SPECIAL, CONSEQUENTIAL,
78
+ INCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF META OR ITS AFFILIATES HAVE BEEN ADVISED
79
+ OF THE POSSIBILITY OF ANY OF THE FOREGOING.
80
+
81
+ 5. Intellectual Property.
82
+
83
+ a. No trademark licenses are granted under this Agreement, and in connection with the Llama
84
+ Materials, neither Meta nor Licensee may use any name or mark owned by or associated with the other
85
+ or any of its affiliates, except as required for reasonable and customary use in describing and
86
+ redistributing the Llama Materials or as set forth in this Section 5(a). Meta hereby grants you a license to
87
+ use “Llama” (the “Mark”) solely as required to comply with the last sentence of Section 1.b.i. You will
88
+ comply with Meta’s brand guidelines (currently accessible at
89
+ https://about.meta.com/brand/resources/meta/company-brand/ ). All goodwill arising out of your use
90
+ of the Mark will inure to the benefit of Meta.
91
+
92
+ b. Subject to Meta’s ownership of Llama Materials and derivatives made by or for Meta, with
93
+ respect to any derivative works and modifications of the Llama Materials that are made by you, as
94
+ between you and Meta, you are and will be the owner of such derivative works and modifications.
95
+
96
+ c. If you institute litigation or other proceedings against Meta or any entity (including a
97
+ cross-claim or counterclaim in a lawsuit) alleging that the Llama Materials or Llama 3.1 outputs or
98
+ results, or any portion of any of the foregoing, constitutes infringement of intellectual property or other
99
+ rights owned or licensable by you, then any licenses granted to you under this Agreement shall
100
+ terminate as of the date such litigation or claim is filed or instituted. You will indemnify and hold
101
+ harmless Meta from and against any claim by any third party arising out of or related to your use or
102
+ distribution of the Llama Materials.
103
+
104
+ 6. Term and Termination. The term of this Agreement will commence upon your acceptance of this
105
+ Agreement or access to the Llama Materials and will continue in full force and effect until terminated in
106
+ accordance with the terms and conditions herein. Meta may terminate this Agreement if you are in
107
+ breach of any term or condition of this Agreement. Upon termination of this Agreement, you shall delete
108
+ and cease use of the Llama Materials. Sections 3, 4 and 7 shall survive the termination of this
109
+ Agreement.
110
+
111
+ 7. Governing Law and Jurisdiction. This Agreement will be governed and construed under the laws of
112
+ the State of California without regard to choice of law principles, and the UN Convention on Contracts
113
+ for the International Sale of Goods does not apply to this Agreement. The courts of California shall have
114
+ exclusive jurisdiction of any dispute arising out of this Agreement.
README.md CHANGED
@@ -1,3 +1,1168 @@
1
- ---
2
- license: llama3.1
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - en
4
+ - de
5
+ - fr
6
+ - it
7
+ - pt
8
+ - hi
9
+ - es
10
+ - th
11
+ library_name: transformers
12
+ license: llama3.1
13
+ pipeline_tag: text-generation
14
+ tags:
15
+ - facebook
16
+ - meta
17
+ - pytorch
18
+ - llama
19
+ - llama-3
20
+ extra_gated_prompt: "### LLAMA 3.1 COMMUNITY LICENSE AGREEMENT\nLlama 3.1 Version\
21
+ \ Release Date: July 23, 2024\n\"Agreement\" means the terms and conditions for\
22
+ \ use, reproduction, distribution and modification of the Llama Materials set forth\
23
+ \ herein.\n\"Documentation\" means the specifications, manuals and documentation\
24
+ \ accompanying Llama 3.1 distributed by Meta at https://llama.meta.com/doc/overview.\n\
25
+ \"Licensee\" or \"you\" means you, or your employer or any other person or entity\
26
+ \ (if you are entering into this Agreement on such person or entity’s behalf), of\
27
+ \ the age required under applicable laws, rules or regulations to provide legal\
28
+ \ consent and that has legal authority to bind your employer or such other person\
29
+ \ or entity if you are entering in this Agreement on their behalf.\n\"Llama 3.1\"\
30
+ \ means the foundational large language models and software and algorithms, including\
31
+ \ machine-learning model code, trained model weights, inference-enabling code, training-enabling\
32
+ \ code, fine-tuning enabling code and other elements of the foregoing distributed\
33
+ \ by Meta at https://llama.meta.com/llama-downloads.\n\"Llama Materials\" means,\
34
+ \ collectively, Meta’s proprietary Llama 3.1 and Documentation (and any portion\
35
+ \ thereof) made available under this Agreement.\n\"Meta\" or \"we\" means Meta Platforms\
36
+ \ Ireland Limited (if you are located in or, if you are an entity, your principal\
37
+ \ place of business is in the EEA or Switzerland) and Meta Platforms, Inc. (if you\
38
+ \ are located outside of the EEA or Switzerland).\n \n1. License Rights and Redistribution.\n\
39
+ a. Grant of Rights. You are granted a non-exclusive, worldwide, non-transferable\
40
+ \ and royalty-free limited license under Meta’s intellectual property or other rights\
41
+ \ owned by Meta embodied in the Llama Materials to use, reproduce, distribute, copy,\
42
+ \ create derivative works of, and make modifications to the Llama Materials.\nb.\
43
+ \ Redistribution and Use.\ni. If you distribute or make available the Llama Materials\
44
+ \ (or any derivative works thereof), or a product or service (including another\
45
+ \ AI model) that contains any of them, you shall (A) provide a copy of this Agreement\
46
+ \ with any such Llama Materials; and (B) prominently display “Built with Llama”\
47
+ \ on a related website, user interface, blogpost, about page, or product documentation.\
48
+ \ If you use the Llama Materials or any outputs or results of the Llama Materials\
49
+ \ to create, train, fine tune, or otherwise improve an AI model, which is distributed\
50
+ \ or made available, you shall also include “Llama” at the beginning of any such\
51
+ \ AI model name.\nii. If you receive Llama Materials, or any derivative works thereof,\
52
+ \ from a Licensee as part of an integrated end user product, then Section 2 of\
53
+ \ this Agreement will not apply to you.\niii. You must retain in all copies of the\
54
+ \ Llama Materials that you distribute the following attribution notice within a\
55
+ \ “Notice” text file distributed as a part of such copies: “Llama 3.1 is licensed\
56
+ \ under the Llama 3.1 Community License, Copyright © Meta Platforms, Inc. All Rights\
57
+ \ Reserved.”\niv. Your use of the Llama Materials must comply with applicable laws\
58
+ \ and regulations (including trade compliance laws and regulations) and adhere to\
59
+ \ the Acceptable Use Policy for the Llama Materials (available at https://llama.meta.com/llama3_1/use-policy),\
60
+ \ which is hereby incorporated by reference into this Agreement.\n2. Additional\
61
+ \ Commercial Terms. If, on the Llama 3.1 version release date, the monthly active\
62
+ \ users of the products or services made available by or for Licensee, or Licensee’s\
63
+ \ affiliates, is greater than 700 million monthly active users in the preceding\
64
+ \ calendar month, you must request a license from Meta, which Meta may grant to\
65
+ \ you in its sole discretion, and you are not authorized to exercise any of the\
66
+ \ rights under this Agreement unless or until Meta otherwise expressly grants you\
67
+ \ such rights.\n3. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE LAW, THE\
68
+ \ LLAMA MATERIALS AND ANY OUTPUT AND RESULTS THEREFROM ARE PROVIDED ON AN “AS IS”\
69
+ \ BASIS, WITHOUT WARRANTIES OF ANY KIND, AND META DISCLAIMS ALL WARRANTIES OF ANY\
70
+ \ KIND, BOTH EXPRESS AND IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES\
71
+ \ OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE.\
72
+ \ YOU ARE SOLELY RESPONSIBLE FOR DETERMINING THE APPROPRIATENESS OF USING OR REDISTRIBUTING\
73
+ \ THE LLAMA MATERIALS AND ASSUME ANY RISKS ASSOCIATED WITH YOUR USE OF THE LLAMA\
74
+ \ MATERIALS AND ANY OUTPUT AND RESULTS.\n4. Limitation of Liability. IN NO EVENT\
75
+ \ WILL META OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY OF LIABILITY, WHETHER IN\
76
+ \ CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR OTHERWISE, ARISING OUT OF THIS\
77
+ \ AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL,\
78
+ \ EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF META OR ITS AFFILIATES HAVE BEEN ADVISED\
79
+ \ OF THE POSSIBILITY OF ANY OF THE FOREGOING.\n5. Intellectual Property.\na. No\
80
+ \ trademark licenses are granted under this Agreement, and in connection with the\
81
+ \ Llama Materials, neither Meta nor Licensee may use any name or mark owned by or\
82
+ \ associated with the other or any of its affiliates, except as required for reasonable\
83
+ \ and customary use in describing and redistributing the Llama Materials or as set\
84
+ \ forth in this Section 5(a). Meta hereby grants you a license to use “Llama” (the\
85
+ \ “Mark”) solely as required to comply with the last sentence of Section 1.b.i.\
86
+ \ You will comply with Meta’s brand guidelines (currently accessible at https://about.meta.com/brand/resources/meta/company-brand/\
87
+ \ ). All goodwill arising out of your use of the Mark will inure to the benefit\
88
+ \ of Meta.\nb. Subject to Meta’s ownership of Llama Materials and derivatives made\
89
+ \ by or for Meta, with respect to any derivative works and modifications of the\
90
+ \ Llama Materials that are made by you, as between you and Meta, you are and will\
91
+ \ be the owner of such derivative works and modifications.\nc. If you institute\
92
+ \ litigation or other proceedings against Meta or any entity (including a cross-claim\
93
+ \ or counterclaim in a lawsuit) alleging that the Llama Materials or Llama 3.1 outputs\
94
+ \ or results, or any portion of any of the foregoing, constitutes infringement of\
95
+ \ intellectual property or other rights owned or licensable by you, then any licenses\
96
+ \ granted to you under this Agreement shall terminate as of the date such litigation\
97
+ \ or claim is filed or instituted. You will indemnify and hold harmless Meta from\
98
+ \ and against any claim by any third party arising out of or related to your use\
99
+ \ or distribution of the Llama Materials.\n6. Term and Termination. The term of\
100
+ \ this Agreement will commence upon your acceptance of this Agreement or access\
101
+ \ to the Llama Materials and will continue in full force and effect until terminated\
102
+ \ in accordance with the terms and conditions herein. Meta may terminate this Agreement\
103
+ \ if you are in breach of any term or condition of this Agreement. Upon termination\
104
+ \ of this Agreement, you shall delete and cease use of the Llama Materials. Sections\
105
+ \ 3, 4 and 7 shall survive the termination of this Agreement.\n7. Governing Law\
106
+ \ and Jurisdiction. This Agreement will be governed and construed under the laws\
107
+ \ of the State of California without regard to choice of law principles, and the\
108
+ \ UN Convention on Contracts for the International Sale of Goods does not apply\
109
+ \ to this Agreement. The courts of California shall have exclusive jurisdiction\
110
+ \ of any dispute arising out of this Agreement.\n### Llama 3.1 Acceptable Use Policy\n\
111
+ Meta is committed to promoting safe and fair use of its tools and features, including\
112
+ \ Llama 3.1. If you access or use Llama 3.1, you agree to this Acceptable Use Policy\
113
+ \ (“Policy”). The most recent copy of this policy can be found at [https://llama.meta.com/llama3_1/use-policy](https://llama.meta.com/llama3_1/use-policy)\n\
114
+ #### Prohibited Uses\nWe want everyone to use Llama 3.1 safely and responsibly.\
115
+ \ You agree you will not use, or allow others to use, Llama 3.1 to:\n 1. Violate\
116
+ \ the law or others’ rights, including to:\n 1. Engage in, promote, generate,\
117
+ \ contribute to, encourage, plan, incite, or further illegal or unlawful activity\
118
+ \ or content, such as:\n 1. Violence or terrorism\n 2. Exploitation\
119
+ \ or harm to children, including the solicitation, creation, acquisition, or dissemination\
120
+ \ of child exploitative content or failure to report Child Sexual Abuse Material\n\
121
+ \ 3. Human trafficking, exploitation, and sexual violence\n 4. The\
122
+ \ illegal distribution of information or materials to minors, including obscene\
123
+ \ materials, or failure to employ legally required age-gating in connection with\
124
+ \ such information or materials.\n 5. Sexual solicitation\n 6. Any\
125
+ \ other criminal activity\n 3. Engage in, promote, incite, or facilitate the\
126
+ \ harassment, abuse, threatening, or bullying of individuals or groups of individuals\n\
127
+ \ 4. Engage in, promote, incite, or facilitate discrimination or other unlawful\
128
+ \ or harmful conduct in the provision of employment, employment benefits, credit,\
129
+ \ housing, other economic benefits, or other essential goods and services\n 5.\
130
+ \ Engage in the unauthorized or unlicensed practice of any profession including,\
131
+ \ but not limited to, financial, legal, medical/health, or related professional\
132
+ \ practices\n 6. Collect, process, disclose, generate, or infer health, demographic,\
133
+ \ or other sensitive personal or private information about individuals without rights\
134
+ \ and consents required by applicable laws\n 7. Engage in or facilitate any action\
135
+ \ or generate any content that infringes, misappropriates, or otherwise violates\
136
+ \ any third-party rights, including the outputs or results of any products or services\
137
+ \ using the Llama Materials\n 8. Create, generate, or facilitate the creation\
138
+ \ of malicious code, malware, computer viruses or do anything else that could disable,\
139
+ \ overburden, interfere with or impair the proper working, integrity, operation\
140
+ \ or appearance of a website or computer system\n2. Engage in, promote, incite,\
141
+ \ facilitate, or assist in the planning or development of activities that present\
142
+ \ a risk of death or bodily harm to individuals, including use of Llama 3.1 related\
143
+ \ to the following:\n 1. Military, warfare, nuclear industries or applications,\
144
+ \ espionage, use for materials or activities that are subject to the International\
145
+ \ Traffic Arms Regulations (ITAR) maintained by the United States Department of\
146
+ \ State\n 2. Guns and illegal weapons (including weapon development)\n 3.\
147
+ \ Illegal drugs and regulated/controlled substances\n 4. Operation of critical\
148
+ \ infrastructure, transportation technologies, or heavy machinery\n 5. Self-harm\
149
+ \ or harm to others, including suicide, cutting, and eating disorders\n 6. Any\
150
+ \ content intended to incite or promote violence, abuse, or any infliction of bodily\
151
+ \ harm to an individual\n3. Intentionally deceive or mislead others, including use\
152
+ \ of Llama 3.1 related to the following:\n 1. Generating, promoting, or furthering\
153
+ \ fraud or the creation or promotion of disinformation\n 2. Generating, promoting,\
154
+ \ or furthering defamatory content, including the creation of defamatory statements,\
155
+ \ images, or other content\n 3. Generating, promoting, or further distributing\
156
+ \ spam\n 4. Impersonating another individual without consent, authorization,\
157
+ \ or legal right\n 5. Representing that the use of Llama 3.1 or outputs are human-generated\n\
158
+ \ 6. Generating or facilitating false online engagement, including fake reviews\
159
+ \ and other means of fake online engagement\n4. Fail to appropriately disclose to\
160
+ \ end users any known dangers of your AI system\nPlease report any violation of\
161
+ \ this Policy, software “bug,” or other problems that could lead to a violation\
162
+ \ of this Policy through one of the following means:\n * Reporting issues with\
163
+ \ the model: [https://github.com/meta-llama/llama-models/issues](https://github.com/meta-llama/llama-models/issues)\n\
164
+ \ * Reporting risky content generated by the model:\n developers.facebook.com/llama_output_feedback\n\
165
+ \ * Reporting bugs and security concerns: facebook.com/whitehat/info\n * Reporting\
166
+ \ violations of the Acceptable Use Policy or unlicensed uses of Meta Llama 3: LlamaUseReport@meta.com"
167
+ extra_gated_fields:
168
+ First Name: text
169
+ Last Name: text
170
+ Date of birth: date_picker
171
+ Country: country
172
+ Affiliation: text
173
+ Job title:
174
+ type: select
175
+ options:
176
+ - Student
177
+ - Research Graduate
178
+ - AI researcher
179
+ - AI developer/engineer
180
+ - Reporter
181
+ - Other
182
+ geo: ip_location
183
+ ? By clicking Submit below I accept the terms of the license and acknowledge that
184
+ the information I provide will be collected stored processed and shared in accordance
185
+ with the Meta Privacy Policy
186
+ : checkbox
187
+ extra_gated_description: The information you provide will be collected, stored, processed
188
+ and shared in accordance with the [Meta Privacy Policy](https://www.facebook.com/privacy/policy/).
189
+ extra_gated_button_content: Submit
190
+ ---
191
+
192
+ ## Model Information
193
+
194
+ The Meta Llama 3.1 collection of multilingual large language models (LLMs) is a collection of pretrained and instruction tuned generative models in 8B, 70B and 405B sizes (text in/text out). The Llama 3.1 instruction tuned text only models (8B, 70B, 405B) are optimized for multilingual dialogue use cases and outperform many of the available open source and closed chat models on common industry benchmarks.
195
+
196
+ **Model developer**: Meta
197
+
198
+ **Model Architecture:** Llama 3.1 is an auto-regressive language model that uses an optimized transformer architecture. The tuned versions use supervised fine-tuning (SFT) and reinforcement learning with human feedback (RLHF) to align with human preferences for helpfulness and safety.
199
+
200
+
201
+ <table>
202
+ <tr>
203
+ <td>
204
+ </td>
205
+ <td><strong>Training Data</strong>
206
+ </td>
207
+ <td><strong>Params</strong>
208
+ </td>
209
+ <td><strong>Input modalities</strong>
210
+ </td>
211
+ <td><strong>Output modalities</strong>
212
+ </td>
213
+ <td><strong>Context length</strong>
214
+ </td>
215
+ <td><strong>GQA</strong>
216
+ </td>
217
+ <td><strong>Token count</strong>
218
+ </td>
219
+ <td><strong>Knowledge cutoff</strong>
220
+ </td>
221
+ </tr>
222
+ <tr>
223
+ <td rowspan="3" >Llama 3.1 (text only)
224
+ </td>
225
+ <td rowspan="3" >A new mix of publicly available online data.
226
+ </td>
227
+ <td>8B
228
+ </td>
229
+ <td>Multilingual Text
230
+ </td>
231
+ <td>Multilingual Text and code
232
+ </td>
233
+ <td>128k
234
+ </td>
235
+ <td>Yes
236
+ </td>
237
+ <td rowspan="3" >15T+
238
+ </td>
239
+ <td rowspan="3" >December 2023
240
+ </td>
241
+ </tr>
242
+ <tr>
243
+ <td>70B
244
+ </td>
245
+ <td>Multilingual Text
246
+ </td>
247
+ <td>Multilingual Text and code
248
+ </td>
249
+ <td>128k
250
+ </td>
251
+ <td>Yes
252
+ </td>
253
+ </tr>
254
+ <tr>
255
+ <td>405B
256
+ </td>
257
+ <td>Multilingual Text
258
+ </td>
259
+ <td>Multilingual Text and code
260
+ </td>
261
+ <td>128k
262
+ </td>
263
+ <td>Yes
264
+ </td>
265
+ </tr>
266
+ </table>
267
+
268
+
269
+ **Supported languages:** English, German, French, Italian, Portuguese, Hindi, Spanish, and Thai.
270
+
271
+ **Llama 3.1 family of models**. Token counts refer to pretraining data only. All model versions use Grouped-Query Attention (GQA) for improved inference scalability.
272
+
273
+ **Model Release Date:** July 23, 2024.
274
+
275
+ **Status:** This is a static model trained on an offline dataset. Future versions of the tuned models will be released as we improve model safety with community feedback.
276
+
277
+ **License:** A custom commercial license, the Llama 3.1 Community License, is available at: [https://github.com/meta-llama/llama-models/blob/main/models/llama3_1/LICENSE](https://github.com/meta-llama/llama-models/blob/main/models/llama3_1/LICENSE)
278
+
279
+ Where to send questions or comments about the model Instructions on how to provide feedback or comments on the model can be found in the model [README](https://github.com/meta-llama/llama3). For more technical information about generation parameters and recipes for how to use Llama 3.1 in applications, please go [here](https://github.com/meta-llama/llama-recipes).
280
+
281
+
282
+ ## Intended Use
283
+
284
+ **Intended Use Cases** Llama 3.1 is intended for commercial and research use in multiple languages. Instruction tuned text only models are intended for assistant-like chat, whereas pretrained models can be adapted for a variety of natural language generation tasks. The Llama 3.1 model collection also supports the ability to leverage the outputs of its models to improve other models including synthetic data generation and distillation. The Llama 3.1 Community License allows for these use cases.
285
+
286
+ **Out-of-scope** Use in any manner that violates applicable laws or regulations (including trade compliance laws). Use in any other way that is prohibited by the Acceptable Use Policy and Llama 3.1 Community License. Use in languages beyond those explicitly referenced as supported in this model card**.
287
+
288
+ **<span style="text-decoration:underline;">Note</span>: Llama 3.1 has been trained on a broader collection of languages than the 8 supported languages. Developers may fine-tune Llama 3.1 models for languages beyond the 8 supported languages provided they comply with the Llama 3.1 Community License and the Acceptable Use Policy and in such cases are responsible for ensuring that any uses of Llama 3.1 in additional languages is done in a safe and responsible manner.
289
+
290
+
291
+ ## Hardware and Software
292
+
293
+ **Training Factors** We used custom training libraries, Meta's custom built GPU cluster, and production infrastructure for pretraining. Fine-tuning, annotation, and evaluation were also performed on production infrastructure.
294
+
295
+ **Training utilized a cumulative of** 39.3M GPU hours of computation on H100-80GB (TDP of 700W) type hardware, per the table below. Training time is the total GPU time required for training each model and power consumption is the peak power capacity per GPU device used, adjusted for power usage efficiency.
296
+
297
+
298
+ **Training Greenhouse Gas Emissions** Estimated total location-based greenhouse gas emissions were **11,390** tons CO2eq for training. Since 2020, Meta has maintained net zero greenhouse gas emissions in its global operations and matched 100% of its electricity use with renewable energy, therefore the total market-based greenhouse gas emissions for training were 0 tons CO2eq.
299
+
300
+
301
+ <table>
302
+ <tr>
303
+ <td>
304
+ </td>
305
+ <td><strong>Training Time (GPU hours)</strong>
306
+ </td>
307
+ <td><strong>Training Power Consumption (W)</strong>
308
+ </td>
309
+ <td><strong>Training Location-Based Greenhouse Gas Emissions</strong>
310
+ <p>
311
+ <strong>(tons CO2eq)</strong>
312
+ </td>
313
+ <td><strong>Training Market-Based Greenhouse Gas Emissions</strong>
314
+ <p>
315
+ <strong>(tons CO2eq)</strong>
316
+ </td>
317
+ </tr>
318
+ <tr>
319
+ <td>Llama 3.1 8B
320
+ </td>
321
+ <td>1.46M
322
+ </td>
323
+ <td>700
324
+ </td>
325
+ <td>420
326
+ </td>
327
+ <td>0
328
+ </td>
329
+ </tr>
330
+ <tr>
331
+ <td>Llama 3.1 70B
332
+ </td>
333
+ <td>7.0M
334
+ </td>
335
+ <td>700
336
+ </td>
337
+ <td>2,040
338
+ </td>
339
+ <td>0
340
+ </td>
341
+ </tr>
342
+ <tr>
343
+ <td>Llama 3.1 405B
344
+ </td>
345
+ <td>30.84M
346
+ </td>
347
+ <td>700
348
+ </td>
349
+ <td>8,930
350
+ </td>
351
+ <td>0
352
+ </td>
353
+ </tr>
354
+ <tr>
355
+ <td>Total
356
+ </td>
357
+ <td>39.3M
358
+ <td>
359
+ <ul>
360
+
361
+ </ul>
362
+ </td>
363
+ <td>11,390
364
+ </td>
365
+ <td>0
366
+ </td>
367
+ </tr>
368
+ </table>
369
+
370
+
371
+
372
+ The methodology used to determine training energy use and greenhouse gas emissions can be found [here](https://arxiv.org/pdf/2204.05149). Since Meta is openly releasing these models, the training energy use and greenhouse gas emissions will not be incurred by others.
373
+
374
+
375
+ ## Training Data
376
+
377
+ **Overview:** Llama 3.1 was pretrained on ~15 trillion tokens of data from publicly available sources. The fine-tuning data includes publicly available instruction datasets, as well as over 25M synthetically generated examples.
378
+
379
+ **Data Freshness:** The pretraining data has a cutoff of December 2023.
380
+
381
+
382
+ ## Benchmark scores
383
+
384
+ In this section, we report the results for Llama 3.1 models on standard automatic benchmarks. For all the evaluations, we use our internal evaluations library.
385
+
386
+ ### Base pretrained models
387
+
388
+
389
+ <table>
390
+ <tr>
391
+ <td><strong>Category</strong>
392
+ </td>
393
+ <td><strong>Benchmark</strong>
394
+ </td>
395
+ <td><strong># Shots</strong>
396
+ </td>
397
+ <td><strong>Metric</strong>
398
+ </td>
399
+ <td><strong>Llama 3 8B</strong>
400
+ </td>
401
+ <td><strong>Llama 3.1 8B</strong>
402
+ </td>
403
+ <td><strong>Llama 3 70B</strong>
404
+ </td>
405
+ <td><strong>Llama 3.1 70B</strong>
406
+ </td>
407
+ <td><strong>Llama 3.1 405B</strong>
408
+ </td>
409
+ </tr>
410
+ <tr>
411
+ <td rowspan="7" >General
412
+ </td>
413
+ <td>MMLU
414
+ </td>
415
+ <td>5
416
+ </td>
417
+ <td>macro_avg/acc_char
418
+ </td>
419
+ <td>66.7
420
+ </td>
421
+ <td>66.7
422
+ </td>
423
+ <td>79.5
424
+ </td>
425
+ <td>79.3
426
+ </td>
427
+ <td>85.2
428
+ </td>
429
+ </tr>
430
+ <tr>
431
+ <td>MMLU-Pro (CoT)
432
+ </td>
433
+ <td>5
434
+ </td>
435
+ <td>macro_avg/acc_char
436
+ </td>
437
+ <td>36.2
438
+ </td>
439
+ <td>37.1
440
+ </td>
441
+ <td>55.0
442
+ </td>
443
+ <td>53.8
444
+ </td>
445
+ <td>61.6
446
+ </td>
447
+ </tr>
448
+ <tr>
449
+ <td>AGIEval English
450
+ </td>
451
+ <td>3-5
452
+ </td>
453
+ <td>average/acc_char
454
+ </td>
455
+ <td>47.1
456
+ </td>
457
+ <td>47.8
458
+ </td>
459
+ <td>63.0
460
+ </td>
461
+ <td>64.6
462
+ </td>
463
+ <td>71.6
464
+ </td>
465
+ </tr>
466
+ <tr>
467
+ <td>CommonSenseQA
468
+ </td>
469
+ <td>7
470
+ </td>
471
+ <td>acc_char
472
+ </td>
473
+ <td>72.6
474
+ </td>
475
+ <td>75.0
476
+ </td>
477
+ <td>83.8
478
+ </td>
479
+ <td>84.1
480
+ </td>
481
+ <td>85.8
482
+ </td>
483
+ </tr>
484
+ <tr>
485
+ <td>Winogrande
486
+ </td>
487
+ <td>5
488
+ </td>
489
+ <td>acc_char
490
+ </td>
491
+ <td>-
492
+ </td>
493
+ <td>60.5
494
+ </td>
495
+ <td>-
496
+ </td>
497
+ <td>83.3
498
+ </td>
499
+ <td>86.7
500
+ </td>
501
+ </tr>
502
+ <tr>
503
+ <td>BIG-Bench Hard (CoT)
504
+ </td>
505
+ <td>3
506
+ </td>
507
+ <td>average/em
508
+ </td>
509
+ <td>61.1
510
+ </td>
511
+ <td>64.2
512
+ </td>
513
+ <td>81.3
514
+ </td>
515
+ <td>81.6
516
+ </td>
517
+ <td>85.9
518
+ </td>
519
+ </tr>
520
+ <tr>
521
+ <td>ARC-Challenge
522
+ </td>
523
+ <td>25
524
+ </td>
525
+ <td>acc_char
526
+ </td>
527
+ <td>79.4
528
+ </td>
529
+ <td>79.7
530
+ </td>
531
+ <td>93.1
532
+ </td>
533
+ <td>92.9
534
+ </td>
535
+ <td>96.1
536
+ </td>
537
+ </tr>
538
+ <tr>
539
+ <td>Knowledge reasoning
540
+ </td>
541
+ <td>TriviaQA-Wiki
542
+ </td>
543
+ <td>5
544
+ </td>
545
+ <td>em
546
+ </td>
547
+ <td>78.5
548
+ </td>
549
+ <td>77.6
550
+ </td>
551
+ <td>89.7
552
+ </td>
553
+ <td>89.8
554
+ </td>
555
+ <td>91.8
556
+ </td>
557
+ </tr>
558
+ <tr>
559
+ <td rowspan="4" >Reading comprehension
560
+ </td>
561
+ <td>SQuAD
562
+ </td>
563
+ <td>1
564
+ </td>
565
+ <td>em
566
+ </td>
567
+ <td>76.4
568
+ </td>
569
+ <td>77.0
570
+ </td>
571
+ <td>85.6
572
+ </td>
573
+ <td>81.8
574
+ </td>
575
+ <td>89.3
576
+ </td>
577
+ </tr>
578
+ <tr>
579
+ <td>QuAC (F1)
580
+ </td>
581
+ <td>1
582
+ </td>
583
+ <td>f1
584
+ </td>
585
+ <td>44.4
586
+ </td>
587
+ <td>44.9
588
+ </td>
589
+ <td>51.1
590
+ </td>
591
+ <td>51.1
592
+ </td>
593
+ <td>53.6
594
+ </td>
595
+ </tr>
596
+ <tr>
597
+ <td>BoolQ
598
+ </td>
599
+ <td>0
600
+ </td>
601
+ <td>acc_char
602
+ </td>
603
+ <td>75.7
604
+ </td>
605
+ <td>75.0
606
+ </td>
607
+ <td>79.0
608
+ </td>
609
+ <td>79.4
610
+ </td>
611
+ <td>80.0
612
+ </td>
613
+ </tr>
614
+ <tr>
615
+ <td>DROP (F1)
616
+ </td>
617
+ <td>3
618
+ </td>
619
+ <td>f1
620
+ </td>
621
+ <td>58.4
622
+ </td>
623
+ <td>59.5
624
+ </td>
625
+ <td>79.7
626
+ </td>
627
+ <td>79.6
628
+ </td>
629
+ <td>84.8
630
+ </td>
631
+ </tr>
632
+ </table>
633
+
634
+
635
+
636
+ ### Instruction tuned models
637
+
638
+
639
+ <table>
640
+ <tr>
641
+ <td><strong>Category</strong>
642
+ </td>
643
+ <td><strong>Benchmark</strong>
644
+ </td>
645
+ <td><strong># Shots</strong>
646
+ </td>
647
+ <td><strong>Metric</strong>
648
+ </td>
649
+ <td><strong>Llama 3 8B Instruct</strong>
650
+ </td>
651
+ <td><strong>Llama 3.1 8B Instruct</strong>
652
+ </td>
653
+ <td><strong>Llama 3 70B Instruct</strong>
654
+ </td>
655
+ <td><strong>Llama 3.1 70B Instruct</strong>
656
+ </td>
657
+ <td><strong>Llama 3.1 405B Instruct</strong>
658
+ </td>
659
+ </tr>
660
+ <tr>
661
+ <td rowspan="4" >General
662
+ </td>
663
+ <td>MMLU
664
+ </td>
665
+ <td>5
666
+ </td>
667
+ <td>macro_avg/acc
668
+ </td>
669
+ <td>68.5
670
+ </td>
671
+ <td>69.4
672
+ </td>
673
+ <td>82.0
674
+ </td>
675
+ <td>83.6
676
+ </td>
677
+ <td>87.3
678
+ </td>
679
+ </tr>
680
+ <tr>
681
+ <td>MMLU (CoT)
682
+ </td>
683
+ <td>0
684
+ </td>
685
+ <td>macro_avg/acc
686
+ </td>
687
+ <td>65.3
688
+ </td>
689
+ <td>73.0
690
+ </td>
691
+ <td>80.9
692
+ </td>
693
+ <td>86.0
694
+ </td>
695
+ <td>88.6
696
+ </td>
697
+ </tr>
698
+ <tr>
699
+ <td>MMLU-Pro (CoT)
700
+ </td>
701
+ <td>5
702
+ </td>
703
+ <td>micro_avg/acc_char
704
+ </td>
705
+ <td>45.5
706
+ </td>
707
+ <td>48.3
708
+ </td>
709
+ <td>63.4
710
+ </td>
711
+ <td>66.4
712
+ </td>
713
+ <td>73.3
714
+ </td>
715
+ </tr>
716
+ <tr>
717
+ <td>IFEval
718
+ </td>
719
+ <td>
720
+ </td>
721
+ <td>
722
+ </td>
723
+ <td>76.8
724
+ </td>
725
+ <td>80.4
726
+ </td>
727
+ <td>82.9
728
+ </td>
729
+ <td>87.5
730
+ </td>
731
+ <td>88.6
732
+ </td>
733
+ </tr>
734
+ <tr>
735
+ <td rowspan="2" >Reasoning
736
+ </td>
737
+ <td>ARC-C
738
+ </td>
739
+ <td>0
740
+ </td>
741
+ <td>acc
742
+ </td>
743
+ <td>82.4
744
+ </td>
745
+ <td>83.4
746
+ </td>
747
+ <td>94.4
748
+ </td>
749
+ <td>94.8
750
+ </td>
751
+ <td>96.9
752
+ </td>
753
+ </tr>
754
+ <tr>
755
+ <td>GPQA
756
+ </td>
757
+ <td>0
758
+ </td>
759
+ <td>em
760
+ </td>
761
+ <td>34.6
762
+ </td>
763
+ <td>30.4
764
+ </td>
765
+ <td>39.5
766
+ </td>
767
+ <td>41.7
768
+ </td>
769
+ <td>50.7
770
+ </td>
771
+ </tr>
772
+ <tr>
773
+ <td rowspan="4" >Code
774
+ </td>
775
+ <td>HumanEval
776
+ </td>
777
+ <td>0
778
+ </td>
779
+ <td>pass@1
780
+ </td>
781
+ <td>60.4
782
+ </td>
783
+ <td>72.6
784
+ </td>
785
+ <td>81.7
786
+ </td>
787
+ <td>80.5
788
+ </td>
789
+ <td>89.0
790
+ </td>
791
+ </tr>
792
+ <tr>
793
+ <td>MBPP ++ base version
794
+ </td>
795
+ <td>0
796
+ </td>
797
+ <td>pass@1
798
+ </td>
799
+ <td>70.6
800
+ </td>
801
+ <td>72.8
802
+ </td>
803
+ <td>82.5
804
+ </td>
805
+ <td>86.0
806
+ </td>
807
+ <td>88.6
808
+ </td>
809
+ </tr>
810
+ <tr>
811
+ <td>Multipl-E HumanEval
812
+ </td>
813
+ <td>0
814
+ </td>
815
+ <td>pass@1
816
+ </td>
817
+ <td>-
818
+ </td>
819
+ <td>50.8
820
+ </td>
821
+ <td>-
822
+ </td>
823
+ <td>65.5
824
+ </td>
825
+ <td>75.2
826
+ </td>
827
+ </tr>
828
+ <tr>
829
+ <td>Multipl-E MBPP
830
+ </td>
831
+ <td>0
832
+ </td>
833
+ <td>pass@1
834
+ </td>
835
+ <td>-
836
+ </td>
837
+ <td>52.4
838
+ </td>
839
+ <td>-
840
+ </td>
841
+ <td>62.0
842
+ </td>
843
+ <td>65.7
844
+ </td>
845
+ </tr>
846
+ <tr>
847
+ <td rowspan="2" >Math
848
+ </td>
849
+ <td>GSM-8K (CoT)
850
+ </td>
851
+ <td>8
852
+ </td>
853
+ <td>em_maj1@1
854
+ </td>
855
+ <td>80.6
856
+ </td>
857
+ <td>84.5
858
+ </td>
859
+ <td>93.0
860
+ </td>
861
+ <td>95.1
862
+ </td>
863
+ <td>96.8
864
+ </td>
865
+ </tr>
866
+ <tr>
867
+ <td>MATH (CoT)
868
+ </td>
869
+ <td>0
870
+ </td>
871
+ <td>final_em
872
+ </td>
873
+ <td>29.1
874
+ </td>
875
+ <td>51.9
876
+ </td>
877
+ <td>51.0
878
+ </td>
879
+ <td>68.0
880
+ </td>
881
+ <td>73.8
882
+ </td>
883
+ </tr>
884
+ <tr>
885
+ <td rowspan="4" >Tool Use
886
+ </td>
887
+ <td>API-Bank
888
+ </td>
889
+ <td>0
890
+ </td>
891
+ <td>acc
892
+ </td>
893
+ <td>48.3
894
+ </td>
895
+ <td>82.6
896
+ </td>
897
+ <td>85.1
898
+ </td>
899
+ <td>90.0
900
+ </td>
901
+ <td>92.0
902
+ </td>
903
+ </tr>
904
+ <tr>
905
+ <td>BFCL
906
+ </td>
907
+ <td>0
908
+ </td>
909
+ <td>acc
910
+ </td>
911
+ <td>60.3
912
+ </td>
913
+ <td>76.1
914
+ </td>
915
+ <td>83.0
916
+ </td>
917
+ <td>84.8
918
+ </td>
919
+ <td>88.5
920
+ </td>
921
+ </tr>
922
+ <tr>
923
+ <td>Gorilla Benchmark API Bench
924
+ </td>
925
+ <td>0
926
+ </td>
927
+ <td>acc
928
+ </td>
929
+ <td>1.7
930
+ </td>
931
+ <td>8.2
932
+ </td>
933
+ <td>14.7
934
+ </td>
935
+ <td>29.7
936
+ </td>
937
+ <td>35.3
938
+ </td>
939
+ </tr>
940
+ <tr>
941
+ <td>Nexus (0-shot)
942
+ </td>
943
+ <td>0
944
+ </td>
945
+ <td>macro_avg/acc
946
+ </td>
947
+ <td>18.1
948
+ </td>
949
+ <td>38.5
950
+ </td>
951
+ <td>47.8
952
+ </td>
953
+ <td>56.7
954
+ </td>
955
+ <td>58.7
956
+ </td>
957
+ </tr>
958
+ <tr>
959
+ <td>Multilingual
960
+ </td>
961
+ <td>Multilingual MGSM (CoT)
962
+ </td>
963
+ <td>0
964
+ </td>
965
+ <td>em
966
+ </td>
967
+ <td>-
968
+ </td>
969
+ <td>68.9
970
+ </td>
971
+ <td>-
972
+ </td>
973
+ <td>86.9
974
+ </td>
975
+ <td>91.6
976
+ </td>
977
+ </tr>
978
+ </table>
979
+
980
+ #### Multilingual benchmarks
981
+
982
+ <table>
983
+ <tr>
984
+ <td><strong>Category</strong>
985
+ </td>
986
+ <td><strong>Benchmark</strong>
987
+ </td>
988
+ <td><strong>Language</strong>
989
+ </td>
990
+ <td><strong>Llama 3.1 8B</strong>
991
+ </td>
992
+ <td><strong>Llama 3.1 70B</strong>
993
+ </td>
994
+ <td><strong>Llama 3.1 405B</strong>
995
+ </td>
996
+ </tr>
997
+ <tr>
998
+ <td rowspan="9" ><strong>General</strong>
999
+ </td>
1000
+ <td rowspan="9" ><strong>MMLU (5-shot, macro_avg/acc)</strong>
1001
+ </td>
1002
+ <td>Portuguese
1003
+ </td>
1004
+ <td>62.12
1005
+ </td>
1006
+ <td>80.13
1007
+ </td>
1008
+ <td>84.95
1009
+ </td>
1010
+ </tr>
1011
+ <tr>
1012
+ <td>Spanish
1013
+ </td>
1014
+ <td>62.45
1015
+ </td>
1016
+ <td>80.05
1017
+ </td>
1018
+ <td>85.08
1019
+ </td>
1020
+ </tr>
1021
+ <tr>
1022
+ <td>Italian
1023
+ </td>
1024
+ <td>61.63
1025
+ </td>
1026
+ <td>80.4
1027
+ </td>
1028
+ <td>85.04
1029
+ </td>
1030
+ </tr>
1031
+ <tr>
1032
+ <td>German
1033
+ </td>
1034
+ <td>60.59
1035
+ </td>
1036
+ <td>79.27
1037
+ </td>
1038
+ <td>84.36
1039
+ </td>
1040
+ </tr>
1041
+ <tr>
1042
+ <td>French
1043
+ </td>
1044
+ <td>62.34
1045
+ </td>
1046
+ <td>79.82
1047
+ </td>
1048
+ <td>84.66
1049
+ </td>
1050
+ </tr>
1051
+ <tr>
1052
+ <td>Hindi
1053
+ </td>
1054
+ <td>50.88
1055
+ </td>
1056
+ <td>74.52
1057
+ </td>
1058
+ <td>80.31
1059
+ </td>
1060
+ </tr>
1061
+ <tr>
1062
+ <td>Thai
1063
+ </td>
1064
+ <td>50.32
1065
+ </td>
1066
+ <td>72.95
1067
+ </td>
1068
+ <td>78.21
1069
+ </td>
1070
+ </tr>
1071
+ </table>
1072
+
1073
+
1074
+
1075
+ ## Responsibility & Safety
1076
+
1077
+ As part of our Responsible release approach, we followed a three-pronged strategy to managing trust & safety risks:
1078
+
1079
+
1080
+
1081
+ * Enable developers to deploy helpful, safe and flexible experiences for their target audience and for the use cases supported by Llama.
1082
+ * Protect developers against adversarial users aiming to exploit Llama capabilities to potentially cause harm.
1083
+ * Provide protections for the community to help prevent the misuse of our models.
1084
+
1085
+
1086
+ ### Responsible deployment
1087
+
1088
+ Llama is a foundational technology designed to be used in a variety of use cases, examples on how Meta’s Llama models have been responsibly deployed can be found in our [Community Stories webpage](https://llama.meta.com/community-stories/). Our approach is to build the most helpful models enabling the world to benefit from the technology power, by aligning our model safety for the generic use cases addressing a standard set of harms. Developers are then in the driver seat to tailor safety for their use case, defining their own policy and deploying the models with the necessary safeguards in their Llama systems. Llama 3.1 was developed following the best practices outlined in our Responsible Use Guide, you can refer to the [Responsible Use Guide](https://llama.meta.com/responsible-use-guide/) to learn more.
1089
+
1090
+
1091
+ #### Llama 3.1 instruct
1092
+
1093
+ Our main objectives for conducting safety fine-tuning are to provide the research community with a valuable resource for studying the robustness of safety fine-tuning, as well as to offer developers a readily available, safe, and powerful model for various applications to reduce the developer workload to deploy safe AI systems. For more details on the safety mitigations implemented please read the Llama 3 paper.
1094
+
1095
+ **Fine-tuning data**
1096
+
1097
+ We employ a multi-faceted approach to data collection, combining human-generated data from our vendors with synthetic data to mitigate potential safety risks. We’ve developed many large language model (LLM)-based classifiers that enable us to thoughtfully select high-quality prompts and responses, enhancing data quality control.
1098
+
1099
+ **Refusals and Tone**
1100
+
1101
+ Building on the work we started with Llama 3, we put a great emphasis on model refusals to benign prompts as well as refusal tone. We included both borderline and adversarial prompts in our safety data strategy, and modified our safety data responses to follow tone guidelines.
1102
+
1103
+
1104
+ #### Llama 3.1 systems
1105
+
1106
+ **Large language models, including Llama 3.1, are not designed to be deployed in isolation but instead should be deployed as part of an overall AI system with additional safety guardrails as required.** Developers are expected to deploy system safeguards when building agentic systems. Safeguards are key to achieve the right helpfulness-safety alignment as well as mitigating safety and security risks inherent to the system and any integration of the model or system with external tools.
1107
+
1108
+ As part of our responsible release approach, we provide the community with [safeguards](https://llama.meta.com/trust-and-safety/) that developers should deploy with Llama models or other LLMs, including Llama Guard 3, Prompt Guard and Code Shield. All our [reference implementations](https://github.com/meta-llama/llama-agentic-system) demos contain these safeguards by default so developers can benefit from system-level safety out-of-the-box.
1109
+
1110
+
1111
+ #### New capabilities
1112
+
1113
+ Note that this release introduces new capabilities, including a longer context window, multilingual inputs and outputs and possible integrations by developers with third party tools. Building with these new capabilities requires specific considerations in addition to the best practices that generally apply across all Generative AI use cases.
1114
+
1115
+ **Tool-use**: Just like in standard software development, developers are responsible for the integration of the LLM with the tools and services of their choice. They should define a clear policy for their use case and assess the integrity of the third party services they use to be aware of the safety and security limitations when using this capability. Refer to the Responsible Use Guide for best practices on the safe deployment of the third party safeguards.
1116
+
1117
+ **Multilinguality**: Llama 3.1 supports 7 languages in addition to English: French, German, Hindi, Italian, Portuguese, Spanish, and Thai. Llama may be able to output text in other languages than those that meet performance thresholds for safety and helpfulness. We strongly discourage developers from using this model to converse in non-supported languages without implementing finetuning and system controls in alignment with their policies and the best practices shared in the Responsible Use Guide.
1118
+
1119
+
1120
+ ### Evaluations
1121
+
1122
+ We evaluated Llama models for common use cases as well as specific capabilities. Common use cases evaluations measure safety risks of systems for most commonly built applications including chat bot, coding assistant, tool calls. We built dedicated, adversarial evaluation datasets and evaluated systems composed of Llama models and Llama Guard 3 to filter input prompt and output response. It is important to evaluate applications in context, and we recommend building dedicated evaluation dataset for your use case. Prompt Guard and Code Shield are also available if relevant to the application.
1123
+
1124
+ Capability evaluations measure vulnerabilities of Llama models inherent to specific capabilities, for which were crafted dedicated benchmarks including long context, multilingual, tools calls, coding or memorization.
1125
+
1126
+ **Red teaming**
1127
+
1128
+ For both scenarios, we conducted recurring red teaming exercises with the goal of discovering risks via adversarial prompting and we used the learnings to improve our benchmarks and safety tuning datasets.
1129
+
1130
+ We partnered early with subject-matter experts in critical risk areas to understand the nature of these real-world harms and how such models may lead to unintended harm for society. Based on these conversations, we derived a set of adversarial goals for the red team to attempt to achieve, such as extracting harmful information or reprogramming the model to act in a potentially harmful capacity. The red team consisted of experts in cybersecurity, adversarial machine learning, responsible AI, and integrity in addition to multilingual content specialists with background in integrity issues in specific geographic markets.
1131
+
1132
+
1133
+ ### Critical and other risks
1134
+
1135
+ We specifically focused our efforts on mitigating the following critical risk areas:
1136
+
1137
+ **1- CBRNE (Chemical, Biological, Radiological, Nuclear, and Explosive materials) helpfulness**
1138
+
1139
+ To assess risks related to proliferation of chemical and biological weapons, we performed uplift testing designed to assess whether use of Llama 3.1 models could meaningfully increase the capabilities of malicious actors to plan or carry out attacks using these types of weapons.
1140
+
1141
+
1142
+ **2. Child Safety**
1143
+
1144
+ Child Safety risk assessments were conducted using a team of experts, to assess the model’s capability to produce outputs that could result in Child Safety risks and inform on any necessary and appropriate risk mitigations via fine tuning. We leveraged those expert red teaming sessions to expand the coverage of our evaluation benchmarks through Llama 3 model development. For Llama 3, we conducted new in-depth sessions using objective based methodologies to assess the model risks along multiple attack vectors including the additional languages Llama 3 is trained on. We also partnered with content specialists to perform red teaming exercises assessing potentially violating content while taking account of market specific nuances or experiences.
1145
+
1146
+ **3. Cyber attack enablement**
1147
+
1148
+ Our cyber attack uplift study investigated whether LLMs can enhance human capabilities in hacking tasks, both in terms of skill level and speed.
1149
+
1150
+ Our attack automation study focused on evaluating the capabilities of LLMs when used as autonomous agents in cyber offensive operations, specifically in the context of ransomware attacks. This evaluation was distinct from previous studies that considered LLMs as interactive assistants. The primary objective was to assess whether these models could effectively function as independent agents in executing complex cyber-attacks without human intervention.
1151
+
1152
+ Our study of Llama-3.1-405B’s social engineering uplift for cyber attackers was conducted to assess the effectiveness of AI models in aiding cyber threat actors in spear phishing campaigns. Please read our Llama 3.1 Cyber security whitepaper to learn more.
1153
+
1154
+
1155
+ ### Community
1156
+
1157
+ Generative AI safety requires expertise and tooling, and we believe in the strength of the open community to accelerate its progress. We are active members of open consortiums, including the AI Alliance, Partnership on AI and MLCommons, actively contributing to safety standardization and transparency. We encourage the community to adopt taxonomies like the MLCommons Proof of Concept evaluation to facilitate collaboration and transparency on safety and content evaluations. Our Purple Llama tools are open sourced for the community to use and widely distributed across ecosystem partners including cloud service providers. We encourage community contributions to our [Github repository](https://github.com/meta-llama/PurpleLlama).
1158
+
1159
+ We also set up the [Llama Impact Grants](https://llama.meta.com/llama-impact-grants/) program to identify and support the most compelling applications of Meta’s Llama model for societal benefit across three categories: education, climate and open innovation. The 20 finalists from the hundreds of applications can be found [here](https://llama.meta.com/llama-impact-grants/#finalists).
1160
+
1161
+ Finally, we put in place a set of resources including an [output reporting mechanism](https://developers.facebook.com/llama_output_feedback) and [bug bounty program](https://www.facebook.com/whitehat) to continuously improve the Llama technology with the help of the community.
1162
+
1163
+
1164
+ ## Ethical Considerations and Limitations
1165
+
1166
+ The core values of Llama 3.1 are openness, inclusivity and helpfulness. It is meant to serve everyone, and to work for a wide range of use cases. It is thus designed to be accessible to people across many different backgrounds, experiences and perspectives. Llama 3.1 addresses users and their needs as they are, without insertion unnecessary judgment or normativity, while reflecting the understanding that even content that may appear problematic in some cases can serve valuable purposes in others. It respects the dignity and autonomy of all users, especially in terms of the values of free thought and expression that power innovation and progress.
1167
+
1168
+ But Llama 3.1 is a new technology, and like any new technology, there are risks associated with its use. Testing conducted to date has not covered, nor could it cover, all scenarios. For these reasons, as with all LLMs, Llama 3.1’s potential outputs cannot be predicted in advance, and the model may in some instances produce inaccurate, biased or other objectionable responses to user prompts. Therefore, before deploying any applications of Llama 3.1 models, developers should perform safety testing and tuning tailored to their specific applications of the model. Please refer to available resources including our [Responsible Use Guide](https://llama.meta.com/responsible-use-guide), [Trust and Safety](https://llama.meta.com/trust-and-safety/) solutions, and other [resources](https://llama.meta.com/docs/get-started/) to learn more about responsible development.
USE_POLICY.md ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Llama 3.1 Acceptable Use Policy
2
+
3
+ Meta is committed to promoting safe and fair use of its tools and features, including Llama 3.1. If you
4
+ access or use Llama 3.1, you agree to this Acceptable Use Policy (“Policy”). The most recent copy of
5
+ this policy can be found at [https://llama.meta.com/llama3_1/use-policy](https://llama.meta.com/llama3_1/use-policy)
6
+
7
+ ## Prohibited Uses
8
+
9
+ We want everyone to use Llama 3.1 safely and responsibly. You agree you will not use, or allow
10
+ others to use, Llama 3.1 to:
11
+
12
+ 1. Violate the law or others’ rights, including to:
13
+ 1. Engage in, promote, generate, contribute to, encourage, plan, incite, or further illegal or unlawful activity or content, such as:
14
+ 1. Violence or terrorism
15
+ 2. Exploitation or harm to children, including the solicitation, creation, acquisition, or dissemination of child exploitative content or failure to report Child Sexual Abuse Material
16
+ 3. Human trafficking, exploitation, and sexual violence
17
+ 4. The illegal distribution of information or materials to minors, including obscene materials, or failure to employ legally required age-gating in connection with such information or materials.
18
+ 5. Sexual solicitation
19
+ 6. Any other criminal activity
20
+ 3. Engage in, promote, incite, or facilitate the harassment, abuse, threatening, or bullying of individuals or groups of individuals
21
+ 4. Engage in, promote, incite, or facilitate discrimination or other unlawful or harmful conduct in the provision of employment, employment benefits, credit, housing, other economic benefits, or other essential goods and services
22
+ 5. Engage in the unauthorized or unlicensed practice of any profession including, but not limited to, financial, legal, medical/health, or related professional practices
23
+ 6. Collect, process, disclose, generate, or infer health, demographic, or other sensitive personal or private information about individuals without rights and consents required by applicable laws
24
+ 7. Engage in or facilitate any action or generate any content that infringes, misappropriates, or otherwise violates any third-party rights, including the outputs or results of any products or services using the Llama Materials
25
+ 8. Create, generate, or facilitate the creation of malicious code, malware, computer viruses or do anything else that could disable, overburden, interfere with or impair the proper working, integrity, operation or appearance of a website or computer system
26
+
27
+ 2. Engage in, promote, incite, facilitate, or assist in the planning or development of activities that present a risk of death or bodily harm to individuals, including use of Llama 3.1 related to the following:
28
+ 1. Military, warfare, nuclear industries or applications, espionage, use for materials or activities that are subject to the International Traffic Arms Regulations (ITAR) maintained by the United States Department of State
29
+ 2. Guns and illegal weapons (including weapon development)
30
+ 3. Illegal drugs and regulated/controlled substances
31
+ 4. Operation of critical infrastructure, transportation technologies, or heavy machinery
32
+ 5. Self-harm or harm to others, including suicide, cutting, and eating disorders
33
+ 6. Any content intended to incite or promote violence, abuse, or any infliction of bodily harm to an individual
34
+
35
+ 3. Intentionally deceive or mislead others, including use of Llama 3.1 related to the following:
36
+ 1. Generating, promoting, or furthering fraud or the creation or promotion of disinformation
37
+ 2. Generating, promoting, or furthering defamatory content, including the creation of defamatory statements, images, or other content
38
+ 3. Generating, promoting, or further distributing spam
39
+ 4. Impersonating another individual without consent, authorization, or legal right
40
+ 5. Representing that the use of Llama 3.1 or outputs are human-generated
41
+ 6. Generating or facilitating false online engagement, including fake reviews and other means of fake online engagement
42
+
43
+ 4. Fail to appropriately disclose to end users any known dangers of your AI system
44
+
45
+ Please report any violation of this Policy, software “bug,” or other problems that could lead to a violation
46
+ of this Policy through one of the following means:
47
+
48
+ * Reporting issues with the model: [https://github.com/meta-llama/llama-models/issues](https://github.com/meta-llama/llama-models/issues)
49
+ * Reporting risky content generated by the model: developers.facebook.com/llama_output_feedback
50
+ * Reporting bugs and security concerns: facebook.com/whitehat/info
51
+ * Reporting violations of the Acceptable Use Policy or unlicensed uses of Llama 3.1: LlamaUseReport@meta.com
config.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 128000,
8
+ "eos_token_id": [
9
+ 128001,
10
+ 128008,
11
+ 128009
12
+ ],
13
+ "hidden_act": "silu",
14
+ "hidden_size": 16384,
15
+ "initializer_range": 0.02,
16
+ "intermediate_size": 53248,
17
+ "max_position_embeddings": 131072,
18
+ "mlp_bias": false,
19
+ "model_type": "llama",
20
+ "num_attention_heads": 128,
21
+ "num_hidden_layers": 126,
22
+ "num_key_value_heads": 16,
23
+ "pretraining_tp": 1,
24
+ "rms_norm_eps": 1e-05,
25
+ "rope_scaling": {
26
+ "factor": 8.0,
27
+ "low_freq_factor": 1.0,
28
+ "high_freq_factor": 4.0,
29
+ "original_max_position_embeddings": 8192,
30
+ "rope_type": "llama3"
31
+ },
32
+ "rope_theta": 500000.0,
33
+ "tie_word_embeddings": false,
34
+ "torch_dtype": "bfloat16",
35
+ "transformers_version": "4.42.3",
36
+ "use_cache": true,
37
+ "vocab_size": 128256,
38
+ "quantization_config": {
39
+ "quant_method": "exl2",
40
+ "version": "0.1.8",
41
+ "bits": 6.0,
42
+ "head_bits": 6,
43
+ "calibration": {
44
+ "rows": 115,
45
+ "length": 2048,
46
+ "dataset": "(default)"
47
+ }
48
+ }
49
+ }
generation_config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 128000,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 128001,
6
+ 128008,
7
+ 128009
8
+ ],
9
+ "temperature": 0.6,
10
+ "top_p": 0.9,
11
+ "transformers_version": "4.42.3"
12
+ }
model.safetensors.index.json ADDED
@@ -0,0 +1,1144 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 820162494464
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00191-of-00191.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00191.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00003-of-00191.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00003-of-00191.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00002-of-00191.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00002-of-00191.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00003-of-00191.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00191.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00002-of-00191.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00191.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00191.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00004-of-00191.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00004-of-00191.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00003-of-00191.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00004-of-00191.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00004-of-00191.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00003-of-00191.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00003-of-00191.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00003-of-00191.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00003-of-00191.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00018-of-00191.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00018-of-00191.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00017-of-00191.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00017-of-00191.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00018-of-00191.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00016-of-00191.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00016-of-00191.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00016-of-00191.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00016-of-00191.safetensors",
35
+ "model.layers.100.input_layernorm.weight": "model-00153-of-00191.safetensors",
36
+ "model.layers.100.mlp.down_proj.weight": "model-00153-of-00191.safetensors",
37
+ "model.layers.100.mlp.gate_proj.weight": "model-00152-of-00191.safetensors",
38
+ "model.layers.100.mlp.up_proj.weight": "model-00152-of-00191.safetensors",
39
+ "model.layers.100.post_attention_layernorm.weight": "model-00153-of-00191.safetensors",
40
+ "model.layers.100.self_attn.k_proj.weight": "model-00151-of-00191.safetensors",
41
+ "model.layers.100.self_attn.o_proj.weight": "model-00151-of-00191.safetensors",
42
+ "model.layers.100.self_attn.q_proj.weight": "model-00151-of-00191.safetensors",
43
+ "model.layers.100.self_attn.v_proj.weight": "model-00151-of-00191.safetensors",
44
+ "model.layers.101.input_layernorm.weight": "model-00154-of-00191.safetensors",
45
+ "model.layers.101.mlp.down_proj.weight": "model-00154-of-00191.safetensors",
46
+ "model.layers.101.mlp.gate_proj.weight": "model-00153-of-00191.safetensors",
47
+ "model.layers.101.mlp.up_proj.weight": "model-00154-of-00191.safetensors",
48
+ "model.layers.101.post_attention_layernorm.weight": "model-00154-of-00191.safetensors",
49
+ "model.layers.101.self_attn.k_proj.weight": "model-00153-of-00191.safetensors",
50
+ "model.layers.101.self_attn.o_proj.weight": "model-00153-of-00191.safetensors",
51
+ "model.layers.101.self_attn.q_proj.weight": "model-00153-of-00191.safetensors",
52
+ "model.layers.101.self_attn.v_proj.weight": "model-00153-of-00191.safetensors",
53
+ "model.layers.102.input_layernorm.weight": "model-00156-of-00191.safetensors",
54
+ "model.layers.102.mlp.down_proj.weight": "model-00156-of-00191.safetensors",
55
+ "model.layers.102.mlp.gate_proj.weight": "model-00155-of-00191.safetensors",
56
+ "model.layers.102.mlp.up_proj.weight": "model-00155-of-00191.safetensors",
57
+ "model.layers.102.post_attention_layernorm.weight": "model-00156-of-00191.safetensors",
58
+ "model.layers.102.self_attn.k_proj.weight": "model-00154-of-00191.safetensors",
59
+ "model.layers.102.self_attn.o_proj.weight": "model-00154-of-00191.safetensors",
60
+ "model.layers.102.self_attn.q_proj.weight": "model-00154-of-00191.safetensors",
61
+ "model.layers.102.self_attn.v_proj.weight": "model-00154-of-00191.safetensors",
62
+ "model.layers.103.input_layernorm.weight": "model-00157-of-00191.safetensors",
63
+ "model.layers.103.mlp.down_proj.weight": "model-00157-of-00191.safetensors",
64
+ "model.layers.103.mlp.gate_proj.weight": "model-00156-of-00191.safetensors",
65
+ "model.layers.103.mlp.up_proj.weight": "model-00157-of-00191.safetensors",
66
+ "model.layers.103.post_attention_layernorm.weight": "model-00157-of-00191.safetensors",
67
+ "model.layers.103.self_attn.k_proj.weight": "model-00156-of-00191.safetensors",
68
+ "model.layers.103.self_attn.o_proj.weight": "model-00156-of-00191.safetensors",
69
+ "model.layers.103.self_attn.q_proj.weight": "model-00156-of-00191.safetensors",
70
+ "model.layers.103.self_attn.v_proj.weight": "model-00156-of-00191.safetensors",
71
+ "model.layers.104.input_layernorm.weight": "model-00159-of-00191.safetensors",
72
+ "model.layers.104.mlp.down_proj.weight": "model-00159-of-00191.safetensors",
73
+ "model.layers.104.mlp.gate_proj.weight": "model-00158-of-00191.safetensors",
74
+ "model.layers.104.mlp.up_proj.weight": "model-00158-of-00191.safetensors",
75
+ "model.layers.104.post_attention_layernorm.weight": "model-00159-of-00191.safetensors",
76
+ "model.layers.104.self_attn.k_proj.weight": "model-00157-of-00191.safetensors",
77
+ "model.layers.104.self_attn.o_proj.weight": "model-00157-of-00191.safetensors",
78
+ "model.layers.104.self_attn.q_proj.weight": "model-00157-of-00191.safetensors",
79
+ "model.layers.104.self_attn.v_proj.weight": "model-00157-of-00191.safetensors",
80
+ "model.layers.105.input_layernorm.weight": "model-00160-of-00191.safetensors",
81
+ "model.layers.105.mlp.down_proj.weight": "model-00160-of-00191.safetensors",
82
+ "model.layers.105.mlp.gate_proj.weight": "model-00159-of-00191.safetensors",
83
+ "model.layers.105.mlp.up_proj.weight": "model-00160-of-00191.safetensors",
84
+ "model.layers.105.post_attention_layernorm.weight": "model-00160-of-00191.safetensors",
85
+ "model.layers.105.self_attn.k_proj.weight": "model-00159-of-00191.safetensors",
86
+ "model.layers.105.self_attn.o_proj.weight": "model-00159-of-00191.safetensors",
87
+ "model.layers.105.self_attn.q_proj.weight": "model-00159-of-00191.safetensors",
88
+ "model.layers.105.self_attn.v_proj.weight": "model-00159-of-00191.safetensors",
89
+ "model.layers.106.input_layernorm.weight": "model-00162-of-00191.safetensors",
90
+ "model.layers.106.mlp.down_proj.weight": "model-00162-of-00191.safetensors",
91
+ "model.layers.106.mlp.gate_proj.weight": "model-00161-of-00191.safetensors",
92
+ "model.layers.106.mlp.up_proj.weight": "model-00161-of-00191.safetensors",
93
+ "model.layers.106.post_attention_layernorm.weight": "model-00162-of-00191.safetensors",
94
+ "model.layers.106.self_attn.k_proj.weight": "model-00160-of-00191.safetensors",
95
+ "model.layers.106.self_attn.o_proj.weight": "model-00160-of-00191.safetensors",
96
+ "model.layers.106.self_attn.q_proj.weight": "model-00160-of-00191.safetensors",
97
+ "model.layers.106.self_attn.v_proj.weight": "model-00160-of-00191.safetensors",
98
+ "model.layers.107.input_layernorm.weight": "model-00163-of-00191.safetensors",
99
+ "model.layers.107.mlp.down_proj.weight": "model-00163-of-00191.safetensors",
100
+ "model.layers.107.mlp.gate_proj.weight": "model-00162-of-00191.safetensors",
101
+ "model.layers.107.mlp.up_proj.weight": "model-00163-of-00191.safetensors",
102
+ "model.layers.107.post_attention_layernorm.weight": "model-00163-of-00191.safetensors",
103
+ "model.layers.107.self_attn.k_proj.weight": "model-00162-of-00191.safetensors",
104
+ "model.layers.107.self_attn.o_proj.weight": "model-00162-of-00191.safetensors",
105
+ "model.layers.107.self_attn.q_proj.weight": "model-00162-of-00191.safetensors",
106
+ "model.layers.107.self_attn.v_proj.weight": "model-00162-of-00191.safetensors",
107
+ "model.layers.108.input_layernorm.weight": "model-00165-of-00191.safetensors",
108
+ "model.layers.108.mlp.down_proj.weight": "model-00165-of-00191.safetensors",
109
+ "model.layers.108.mlp.gate_proj.weight": "model-00164-of-00191.safetensors",
110
+ "model.layers.108.mlp.up_proj.weight": "model-00164-of-00191.safetensors",
111
+ "model.layers.108.post_attention_layernorm.weight": "model-00165-of-00191.safetensors",
112
+ "model.layers.108.self_attn.k_proj.weight": "model-00163-of-00191.safetensors",
113
+ "model.layers.108.self_attn.o_proj.weight": "model-00163-of-00191.safetensors",
114
+ "model.layers.108.self_attn.q_proj.weight": "model-00163-of-00191.safetensors",
115
+ "model.layers.108.self_attn.v_proj.weight": "model-00163-of-00191.safetensors",
116
+ "model.layers.109.input_layernorm.weight": "model-00166-of-00191.safetensors",
117
+ "model.layers.109.mlp.down_proj.weight": "model-00166-of-00191.safetensors",
118
+ "model.layers.109.mlp.gate_proj.weight": "model-00165-of-00191.safetensors",
119
+ "model.layers.109.mlp.up_proj.weight": "model-00166-of-00191.safetensors",
120
+ "model.layers.109.post_attention_layernorm.weight": "model-00166-of-00191.safetensors",
121
+ "model.layers.109.self_attn.k_proj.weight": "model-00165-of-00191.safetensors",
122
+ "model.layers.109.self_attn.o_proj.weight": "model-00165-of-00191.safetensors",
123
+ "model.layers.109.self_attn.q_proj.weight": "model-00165-of-00191.safetensors",
124
+ "model.layers.109.self_attn.v_proj.weight": "model-00165-of-00191.safetensors",
125
+ "model.layers.11.input_layernorm.weight": "model-00019-of-00191.safetensors",
126
+ "model.layers.11.mlp.down_proj.weight": "model-00019-of-00191.safetensors",
127
+ "model.layers.11.mlp.gate_proj.weight": "model-00018-of-00191.safetensors",
128
+ "model.layers.11.mlp.up_proj.weight": "model-00019-of-00191.safetensors",
129
+ "model.layers.11.post_attention_layernorm.weight": "model-00019-of-00191.safetensors",
130
+ "model.layers.11.self_attn.k_proj.weight": "model-00018-of-00191.safetensors",
131
+ "model.layers.11.self_attn.o_proj.weight": "model-00018-of-00191.safetensors",
132
+ "model.layers.11.self_attn.q_proj.weight": "model-00018-of-00191.safetensors",
133
+ "model.layers.11.self_attn.v_proj.weight": "model-00018-of-00191.safetensors",
134
+ "model.layers.110.input_layernorm.weight": "model-00168-of-00191.safetensors",
135
+ "model.layers.110.mlp.down_proj.weight": "model-00168-of-00191.safetensors",
136
+ "model.layers.110.mlp.gate_proj.weight": "model-00167-of-00191.safetensors",
137
+ "model.layers.110.mlp.up_proj.weight": "model-00167-of-00191.safetensors",
138
+ "model.layers.110.post_attention_layernorm.weight": "model-00168-of-00191.safetensors",
139
+ "model.layers.110.self_attn.k_proj.weight": "model-00166-of-00191.safetensors",
140
+ "model.layers.110.self_attn.o_proj.weight": "model-00166-of-00191.safetensors",
141
+ "model.layers.110.self_attn.q_proj.weight": "model-00166-of-00191.safetensors",
142
+ "model.layers.110.self_attn.v_proj.weight": "model-00166-of-00191.safetensors",
143
+ "model.layers.111.input_layernorm.weight": "model-00169-of-00191.safetensors",
144
+ "model.layers.111.mlp.down_proj.weight": "model-00169-of-00191.safetensors",
145
+ "model.layers.111.mlp.gate_proj.weight": "model-00168-of-00191.safetensors",
146
+ "model.layers.111.mlp.up_proj.weight": "model-00169-of-00191.safetensors",
147
+ "model.layers.111.post_attention_layernorm.weight": "model-00169-of-00191.safetensors",
148
+ "model.layers.111.self_attn.k_proj.weight": "model-00168-of-00191.safetensors",
149
+ "model.layers.111.self_attn.o_proj.weight": "model-00168-of-00191.safetensors",
150
+ "model.layers.111.self_attn.q_proj.weight": "model-00168-of-00191.safetensors",
151
+ "model.layers.111.self_attn.v_proj.weight": "model-00168-of-00191.safetensors",
152
+ "model.layers.112.input_layernorm.weight": "model-00171-of-00191.safetensors",
153
+ "model.layers.112.mlp.down_proj.weight": "model-00171-of-00191.safetensors",
154
+ "model.layers.112.mlp.gate_proj.weight": "model-00170-of-00191.safetensors",
155
+ "model.layers.112.mlp.up_proj.weight": "model-00170-of-00191.safetensors",
156
+ "model.layers.112.post_attention_layernorm.weight": "model-00171-of-00191.safetensors",
157
+ "model.layers.112.self_attn.k_proj.weight": "model-00169-of-00191.safetensors",
158
+ "model.layers.112.self_attn.o_proj.weight": "model-00169-of-00191.safetensors",
159
+ "model.layers.112.self_attn.q_proj.weight": "model-00169-of-00191.safetensors",
160
+ "model.layers.112.self_attn.v_proj.weight": "model-00169-of-00191.safetensors",
161
+ "model.layers.113.input_layernorm.weight": "model-00172-of-00191.safetensors",
162
+ "model.layers.113.mlp.down_proj.weight": "model-00172-of-00191.safetensors",
163
+ "model.layers.113.mlp.gate_proj.weight": "model-00171-of-00191.safetensors",
164
+ "model.layers.113.mlp.up_proj.weight": "model-00172-of-00191.safetensors",
165
+ "model.layers.113.post_attention_layernorm.weight": "model-00172-of-00191.safetensors",
166
+ "model.layers.113.self_attn.k_proj.weight": "model-00171-of-00191.safetensors",
167
+ "model.layers.113.self_attn.o_proj.weight": "model-00171-of-00191.safetensors",
168
+ "model.layers.113.self_attn.q_proj.weight": "model-00171-of-00191.safetensors",
169
+ "model.layers.113.self_attn.v_proj.weight": "model-00171-of-00191.safetensors",
170
+ "model.layers.114.input_layernorm.weight": "model-00174-of-00191.safetensors",
171
+ "model.layers.114.mlp.down_proj.weight": "model-00174-of-00191.safetensors",
172
+ "model.layers.114.mlp.gate_proj.weight": "model-00173-of-00191.safetensors",
173
+ "model.layers.114.mlp.up_proj.weight": "model-00173-of-00191.safetensors",
174
+ "model.layers.114.post_attention_layernorm.weight": "model-00174-of-00191.safetensors",
175
+ "model.layers.114.self_attn.k_proj.weight": "model-00172-of-00191.safetensors",
176
+ "model.layers.114.self_attn.o_proj.weight": "model-00172-of-00191.safetensors",
177
+ "model.layers.114.self_attn.q_proj.weight": "model-00172-of-00191.safetensors",
178
+ "model.layers.114.self_attn.v_proj.weight": "model-00172-of-00191.safetensors",
179
+ "model.layers.115.input_layernorm.weight": "model-00175-of-00191.safetensors",
180
+ "model.layers.115.mlp.down_proj.weight": "model-00175-of-00191.safetensors",
181
+ "model.layers.115.mlp.gate_proj.weight": "model-00174-of-00191.safetensors",
182
+ "model.layers.115.mlp.up_proj.weight": "model-00175-of-00191.safetensors",
183
+ "model.layers.115.post_attention_layernorm.weight": "model-00175-of-00191.safetensors",
184
+ "model.layers.115.self_attn.k_proj.weight": "model-00174-of-00191.safetensors",
185
+ "model.layers.115.self_attn.o_proj.weight": "model-00174-of-00191.safetensors",
186
+ "model.layers.115.self_attn.q_proj.weight": "model-00174-of-00191.safetensors",
187
+ "model.layers.115.self_attn.v_proj.weight": "model-00174-of-00191.safetensors",
188
+ "model.layers.116.input_layernorm.weight": "model-00177-of-00191.safetensors",
189
+ "model.layers.116.mlp.down_proj.weight": "model-00177-of-00191.safetensors",
190
+ "model.layers.116.mlp.gate_proj.weight": "model-00176-of-00191.safetensors",
191
+ "model.layers.116.mlp.up_proj.weight": "model-00176-of-00191.safetensors",
192
+ "model.layers.116.post_attention_layernorm.weight": "model-00177-of-00191.safetensors",
193
+ "model.layers.116.self_attn.k_proj.weight": "model-00175-of-00191.safetensors",
194
+ "model.layers.116.self_attn.o_proj.weight": "model-00175-of-00191.safetensors",
195
+ "model.layers.116.self_attn.q_proj.weight": "model-00175-of-00191.safetensors",
196
+ "model.layers.116.self_attn.v_proj.weight": "model-00175-of-00191.safetensors",
197
+ "model.layers.117.input_layernorm.weight": "model-00178-of-00191.safetensors",
198
+ "model.layers.117.mlp.down_proj.weight": "model-00178-of-00191.safetensors",
199
+ "model.layers.117.mlp.gate_proj.weight": "model-00177-of-00191.safetensors",
200
+ "model.layers.117.mlp.up_proj.weight": "model-00178-of-00191.safetensors",
201
+ "model.layers.117.post_attention_layernorm.weight": "model-00178-of-00191.safetensors",
202
+ "model.layers.117.self_attn.k_proj.weight": "model-00177-of-00191.safetensors",
203
+ "model.layers.117.self_attn.o_proj.weight": "model-00177-of-00191.safetensors",
204
+ "model.layers.117.self_attn.q_proj.weight": "model-00177-of-00191.safetensors",
205
+ "model.layers.117.self_attn.v_proj.weight": "model-00177-of-00191.safetensors",
206
+ "model.layers.118.input_layernorm.weight": "model-00180-of-00191.safetensors",
207
+ "model.layers.118.mlp.down_proj.weight": "model-00180-of-00191.safetensors",
208
+ "model.layers.118.mlp.gate_proj.weight": "model-00179-of-00191.safetensors",
209
+ "model.layers.118.mlp.up_proj.weight": "model-00179-of-00191.safetensors",
210
+ "model.layers.118.post_attention_layernorm.weight": "model-00180-of-00191.safetensors",
211
+ "model.layers.118.self_attn.k_proj.weight": "model-00178-of-00191.safetensors",
212
+ "model.layers.118.self_attn.o_proj.weight": "model-00178-of-00191.safetensors",
213
+ "model.layers.118.self_attn.q_proj.weight": "model-00178-of-00191.safetensors",
214
+ "model.layers.118.self_attn.v_proj.weight": "model-00178-of-00191.safetensors",
215
+ "model.layers.119.input_layernorm.weight": "model-00181-of-00191.safetensors",
216
+ "model.layers.119.mlp.down_proj.weight": "model-00181-of-00191.safetensors",
217
+ "model.layers.119.mlp.gate_proj.weight": "model-00180-of-00191.safetensors",
218
+ "model.layers.119.mlp.up_proj.weight": "model-00181-of-00191.safetensors",
219
+ "model.layers.119.post_attention_layernorm.weight": "model-00181-of-00191.safetensors",
220
+ "model.layers.119.self_attn.k_proj.weight": "model-00180-of-00191.safetensors",
221
+ "model.layers.119.self_attn.o_proj.weight": "model-00180-of-00191.safetensors",
222
+ "model.layers.119.self_attn.q_proj.weight": "model-00180-of-00191.safetensors",
223
+ "model.layers.119.self_attn.v_proj.weight": "model-00180-of-00191.safetensors",
224
+ "model.layers.12.input_layernorm.weight": "model-00021-of-00191.safetensors",
225
+ "model.layers.12.mlp.down_proj.weight": "model-00021-of-00191.safetensors",
226
+ "model.layers.12.mlp.gate_proj.weight": "model-00020-of-00191.safetensors",
227
+ "model.layers.12.mlp.up_proj.weight": "model-00020-of-00191.safetensors",
228
+ "model.layers.12.post_attention_layernorm.weight": "model-00021-of-00191.safetensors",
229
+ "model.layers.12.self_attn.k_proj.weight": "model-00019-of-00191.safetensors",
230
+ "model.layers.12.self_attn.o_proj.weight": "model-00019-of-00191.safetensors",
231
+ "model.layers.12.self_attn.q_proj.weight": "model-00019-of-00191.safetensors",
232
+ "model.layers.12.self_attn.v_proj.weight": "model-00019-of-00191.safetensors",
233
+ "model.layers.120.input_layernorm.weight": "model-00183-of-00191.safetensors",
234
+ "model.layers.120.mlp.down_proj.weight": "model-00183-of-00191.safetensors",
235
+ "model.layers.120.mlp.gate_proj.weight": "model-00182-of-00191.safetensors",
236
+ "model.layers.120.mlp.up_proj.weight": "model-00182-of-00191.safetensors",
237
+ "model.layers.120.post_attention_layernorm.weight": "model-00183-of-00191.safetensors",
238
+ "model.layers.120.self_attn.k_proj.weight": "model-00181-of-00191.safetensors",
239
+ "model.layers.120.self_attn.o_proj.weight": "model-00181-of-00191.safetensors",
240
+ "model.layers.120.self_attn.q_proj.weight": "model-00181-of-00191.safetensors",
241
+ "model.layers.120.self_attn.v_proj.weight": "model-00181-of-00191.safetensors",
242
+ "model.layers.121.input_layernorm.weight": "model-00184-of-00191.safetensors",
243
+ "model.layers.121.mlp.down_proj.weight": "model-00184-of-00191.safetensors",
244
+ "model.layers.121.mlp.gate_proj.weight": "model-00183-of-00191.safetensors",
245
+ "model.layers.121.mlp.up_proj.weight": "model-00184-of-00191.safetensors",
246
+ "model.layers.121.post_attention_layernorm.weight": "model-00184-of-00191.safetensors",
247
+ "model.layers.121.self_attn.k_proj.weight": "model-00183-of-00191.safetensors",
248
+ "model.layers.121.self_attn.o_proj.weight": "model-00183-of-00191.safetensors",
249
+ "model.layers.121.self_attn.q_proj.weight": "model-00183-of-00191.safetensors",
250
+ "model.layers.121.self_attn.v_proj.weight": "model-00183-of-00191.safetensors",
251
+ "model.layers.122.input_layernorm.weight": "model-00186-of-00191.safetensors",
252
+ "model.layers.122.mlp.down_proj.weight": "model-00186-of-00191.safetensors",
253
+ "model.layers.122.mlp.gate_proj.weight": "model-00185-of-00191.safetensors",
254
+ "model.layers.122.mlp.up_proj.weight": "model-00185-of-00191.safetensors",
255
+ "model.layers.122.post_attention_layernorm.weight": "model-00186-of-00191.safetensors",
256
+ "model.layers.122.self_attn.k_proj.weight": "model-00184-of-00191.safetensors",
257
+ "model.layers.122.self_attn.o_proj.weight": "model-00184-of-00191.safetensors",
258
+ "model.layers.122.self_attn.q_proj.weight": "model-00184-of-00191.safetensors",
259
+ "model.layers.122.self_attn.v_proj.weight": "model-00184-of-00191.safetensors",
260
+ "model.layers.123.input_layernorm.weight": "model-00187-of-00191.safetensors",
261
+ "model.layers.123.mlp.down_proj.weight": "model-00187-of-00191.safetensors",
262
+ "model.layers.123.mlp.gate_proj.weight": "model-00186-of-00191.safetensors",
263
+ "model.layers.123.mlp.up_proj.weight": "model-00187-of-00191.safetensors",
264
+ "model.layers.123.post_attention_layernorm.weight": "model-00187-of-00191.safetensors",
265
+ "model.layers.123.self_attn.k_proj.weight": "model-00186-of-00191.safetensors",
266
+ "model.layers.123.self_attn.o_proj.weight": "model-00186-of-00191.safetensors",
267
+ "model.layers.123.self_attn.q_proj.weight": "model-00186-of-00191.safetensors",
268
+ "model.layers.123.self_attn.v_proj.weight": "model-00186-of-00191.safetensors",
269
+ "model.layers.124.input_layernorm.weight": "model-00189-of-00191.safetensors",
270
+ "model.layers.124.mlp.down_proj.weight": "model-00189-of-00191.safetensors",
271
+ "model.layers.124.mlp.gate_proj.weight": "model-00188-of-00191.safetensors",
272
+ "model.layers.124.mlp.up_proj.weight": "model-00188-of-00191.safetensors",
273
+ "model.layers.124.post_attention_layernorm.weight": "model-00189-of-00191.safetensors",
274
+ "model.layers.124.self_attn.k_proj.weight": "model-00187-of-00191.safetensors",
275
+ "model.layers.124.self_attn.o_proj.weight": "model-00187-of-00191.safetensors",
276
+ "model.layers.124.self_attn.q_proj.weight": "model-00187-of-00191.safetensors",
277
+ "model.layers.124.self_attn.v_proj.weight": "model-00187-of-00191.safetensors",
278
+ "model.layers.125.input_layernorm.weight": "model-00190-of-00191.safetensors",
279
+ "model.layers.125.mlp.down_proj.weight": "model-00190-of-00191.safetensors",
280
+ "model.layers.125.mlp.gate_proj.weight": "model-00189-of-00191.safetensors",
281
+ "model.layers.125.mlp.up_proj.weight": "model-00190-of-00191.safetensors",
282
+ "model.layers.125.post_attention_layernorm.weight": "model-00190-of-00191.safetensors",
283
+ "model.layers.125.self_attn.k_proj.weight": "model-00189-of-00191.safetensors",
284
+ "model.layers.125.self_attn.o_proj.weight": "model-00189-of-00191.safetensors",
285
+ "model.layers.125.self_attn.q_proj.weight": "model-00189-of-00191.safetensors",
286
+ "model.layers.125.self_attn.v_proj.weight": "model-00189-of-00191.safetensors",
287
+ "model.layers.13.input_layernorm.weight": "model-00022-of-00191.safetensors",
288
+ "model.layers.13.mlp.down_proj.weight": "model-00022-of-00191.safetensors",
289
+ "model.layers.13.mlp.gate_proj.weight": "model-00021-of-00191.safetensors",
290
+ "model.layers.13.mlp.up_proj.weight": "model-00022-of-00191.safetensors",
291
+ "model.layers.13.post_attention_layernorm.weight": "model-00022-of-00191.safetensors",
292
+ "model.layers.13.self_attn.k_proj.weight": "model-00021-of-00191.safetensors",
293
+ "model.layers.13.self_attn.o_proj.weight": "model-00021-of-00191.safetensors",
294
+ "model.layers.13.self_attn.q_proj.weight": "model-00021-of-00191.safetensors",
295
+ "model.layers.13.self_attn.v_proj.weight": "model-00021-of-00191.safetensors",
296
+ "model.layers.14.input_layernorm.weight": "model-00024-of-00191.safetensors",
297
+ "model.layers.14.mlp.down_proj.weight": "model-00024-of-00191.safetensors",
298
+ "model.layers.14.mlp.gate_proj.weight": "model-00023-of-00191.safetensors",
299
+ "model.layers.14.mlp.up_proj.weight": "model-00023-of-00191.safetensors",
300
+ "model.layers.14.post_attention_layernorm.weight": "model-00024-of-00191.safetensors",
301
+ "model.layers.14.self_attn.k_proj.weight": "model-00022-of-00191.safetensors",
302
+ "model.layers.14.self_attn.o_proj.weight": "model-00022-of-00191.safetensors",
303
+ "model.layers.14.self_attn.q_proj.weight": "model-00022-of-00191.safetensors",
304
+ "model.layers.14.self_attn.v_proj.weight": "model-00022-of-00191.safetensors",
305
+ "model.layers.15.input_layernorm.weight": "model-00025-of-00191.safetensors",
306
+ "model.layers.15.mlp.down_proj.weight": "model-00025-of-00191.safetensors",
307
+ "model.layers.15.mlp.gate_proj.weight": "model-00024-of-00191.safetensors",
308
+ "model.layers.15.mlp.up_proj.weight": "model-00025-of-00191.safetensors",
309
+ "model.layers.15.post_attention_layernorm.weight": "model-00025-of-00191.safetensors",
310
+ "model.layers.15.self_attn.k_proj.weight": "model-00024-of-00191.safetensors",
311
+ "model.layers.15.self_attn.o_proj.weight": "model-00024-of-00191.safetensors",
312
+ "model.layers.15.self_attn.q_proj.weight": "model-00024-of-00191.safetensors",
313
+ "model.layers.15.self_attn.v_proj.weight": "model-00024-of-00191.safetensors",
314
+ "model.layers.16.input_layernorm.weight": "model-00027-of-00191.safetensors",
315
+ "model.layers.16.mlp.down_proj.weight": "model-00027-of-00191.safetensors",
316
+ "model.layers.16.mlp.gate_proj.weight": "model-00026-of-00191.safetensors",
317
+ "model.layers.16.mlp.up_proj.weight": "model-00026-of-00191.safetensors",
318
+ "model.layers.16.post_attention_layernorm.weight": "model-00027-of-00191.safetensors",
319
+ "model.layers.16.self_attn.k_proj.weight": "model-00025-of-00191.safetensors",
320
+ "model.layers.16.self_attn.o_proj.weight": "model-00025-of-00191.safetensors",
321
+ "model.layers.16.self_attn.q_proj.weight": "model-00025-of-00191.safetensors",
322
+ "model.layers.16.self_attn.v_proj.weight": "model-00025-of-00191.safetensors",
323
+ "model.layers.17.input_layernorm.weight": "model-00028-of-00191.safetensors",
324
+ "model.layers.17.mlp.down_proj.weight": "model-00028-of-00191.safetensors",
325
+ "model.layers.17.mlp.gate_proj.weight": "model-00027-of-00191.safetensors",
326
+ "model.layers.17.mlp.up_proj.weight": "model-00028-of-00191.safetensors",
327
+ "model.layers.17.post_attention_layernorm.weight": "model-00028-of-00191.safetensors",
328
+ "model.layers.17.self_attn.k_proj.weight": "model-00027-of-00191.safetensors",
329
+ "model.layers.17.self_attn.o_proj.weight": "model-00027-of-00191.safetensors",
330
+ "model.layers.17.self_attn.q_proj.weight": "model-00027-of-00191.safetensors",
331
+ "model.layers.17.self_attn.v_proj.weight": "model-00027-of-00191.safetensors",
332
+ "model.layers.18.input_layernorm.weight": "model-00030-of-00191.safetensors",
333
+ "model.layers.18.mlp.down_proj.weight": "model-00030-of-00191.safetensors",
334
+ "model.layers.18.mlp.gate_proj.weight": "model-00029-of-00191.safetensors",
335
+ "model.layers.18.mlp.up_proj.weight": "model-00029-of-00191.safetensors",
336
+ "model.layers.18.post_attention_layernorm.weight": "model-00030-of-00191.safetensors",
337
+ "model.layers.18.self_attn.k_proj.weight": "model-00028-of-00191.safetensors",
338
+ "model.layers.18.self_attn.o_proj.weight": "model-00028-of-00191.safetensors",
339
+ "model.layers.18.self_attn.q_proj.weight": "model-00028-of-00191.safetensors",
340
+ "model.layers.18.self_attn.v_proj.weight": "model-00028-of-00191.safetensors",
341
+ "model.layers.19.input_layernorm.weight": "model-00031-of-00191.safetensors",
342
+ "model.layers.19.mlp.down_proj.weight": "model-00031-of-00191.safetensors",
343
+ "model.layers.19.mlp.gate_proj.weight": "model-00030-of-00191.safetensors",
344
+ "model.layers.19.mlp.up_proj.weight": "model-00031-of-00191.safetensors",
345
+ "model.layers.19.post_attention_layernorm.weight": "model-00031-of-00191.safetensors",
346
+ "model.layers.19.self_attn.k_proj.weight": "model-00030-of-00191.safetensors",
347
+ "model.layers.19.self_attn.o_proj.weight": "model-00030-of-00191.safetensors",
348
+ "model.layers.19.self_attn.q_proj.weight": "model-00030-of-00191.safetensors",
349
+ "model.layers.19.self_attn.v_proj.weight": "model-00030-of-00191.safetensors",
350
+ "model.layers.2.input_layernorm.weight": "model-00006-of-00191.safetensors",
351
+ "model.layers.2.mlp.down_proj.weight": "model-00006-of-00191.safetensors",
352
+ "model.layers.2.mlp.gate_proj.weight": "model-00005-of-00191.safetensors",
353
+ "model.layers.2.mlp.up_proj.weight": "model-00005-of-00191.safetensors",
354
+ "model.layers.2.post_attention_layernorm.weight": "model-00006-of-00191.safetensors",
355
+ "model.layers.2.self_attn.k_proj.weight": "model-00004-of-00191.safetensors",
356
+ "model.layers.2.self_attn.o_proj.weight": "model-00004-of-00191.safetensors",
357
+ "model.layers.2.self_attn.q_proj.weight": "model-00004-of-00191.safetensors",
358
+ "model.layers.2.self_attn.v_proj.weight": "model-00004-of-00191.safetensors",
359
+ "model.layers.20.input_layernorm.weight": "model-00033-of-00191.safetensors",
360
+ "model.layers.20.mlp.down_proj.weight": "model-00033-of-00191.safetensors",
361
+ "model.layers.20.mlp.gate_proj.weight": "model-00032-of-00191.safetensors",
362
+ "model.layers.20.mlp.up_proj.weight": "model-00032-of-00191.safetensors",
363
+ "model.layers.20.post_attention_layernorm.weight": "model-00033-of-00191.safetensors",
364
+ "model.layers.20.self_attn.k_proj.weight": "model-00031-of-00191.safetensors",
365
+ "model.layers.20.self_attn.o_proj.weight": "model-00031-of-00191.safetensors",
366
+ "model.layers.20.self_attn.q_proj.weight": "model-00031-of-00191.safetensors",
367
+ "model.layers.20.self_attn.v_proj.weight": "model-00031-of-00191.safetensors",
368
+ "model.layers.21.input_layernorm.weight": "model-00034-of-00191.safetensors",
369
+ "model.layers.21.mlp.down_proj.weight": "model-00034-of-00191.safetensors",
370
+ "model.layers.21.mlp.gate_proj.weight": "model-00033-of-00191.safetensors",
371
+ "model.layers.21.mlp.up_proj.weight": "model-00034-of-00191.safetensors",
372
+ "model.layers.21.post_attention_layernorm.weight": "model-00034-of-00191.safetensors",
373
+ "model.layers.21.self_attn.k_proj.weight": "model-00033-of-00191.safetensors",
374
+ "model.layers.21.self_attn.o_proj.weight": "model-00033-of-00191.safetensors",
375
+ "model.layers.21.self_attn.q_proj.weight": "model-00033-of-00191.safetensors",
376
+ "model.layers.21.self_attn.v_proj.weight": "model-00033-of-00191.safetensors",
377
+ "model.layers.22.input_layernorm.weight": "model-00036-of-00191.safetensors",
378
+ "model.layers.22.mlp.down_proj.weight": "model-00036-of-00191.safetensors",
379
+ "model.layers.22.mlp.gate_proj.weight": "model-00035-of-00191.safetensors",
380
+ "model.layers.22.mlp.up_proj.weight": "model-00035-of-00191.safetensors",
381
+ "model.layers.22.post_attention_layernorm.weight": "model-00036-of-00191.safetensors",
382
+ "model.layers.22.self_attn.k_proj.weight": "model-00034-of-00191.safetensors",
383
+ "model.layers.22.self_attn.o_proj.weight": "model-00034-of-00191.safetensors",
384
+ "model.layers.22.self_attn.q_proj.weight": "model-00034-of-00191.safetensors",
385
+ "model.layers.22.self_attn.v_proj.weight": "model-00034-of-00191.safetensors",
386
+ "model.layers.23.input_layernorm.weight": "model-00037-of-00191.safetensors",
387
+ "model.layers.23.mlp.down_proj.weight": "model-00037-of-00191.safetensors",
388
+ "model.layers.23.mlp.gate_proj.weight": "model-00036-of-00191.safetensors",
389
+ "model.layers.23.mlp.up_proj.weight": "model-00037-of-00191.safetensors",
390
+ "model.layers.23.post_attention_layernorm.weight": "model-00037-of-00191.safetensors",
391
+ "model.layers.23.self_attn.k_proj.weight": "model-00036-of-00191.safetensors",
392
+ "model.layers.23.self_attn.o_proj.weight": "model-00036-of-00191.safetensors",
393
+ "model.layers.23.self_attn.q_proj.weight": "model-00036-of-00191.safetensors",
394
+ "model.layers.23.self_attn.v_proj.weight": "model-00036-of-00191.safetensors",
395
+ "model.layers.24.input_layernorm.weight": "model-00039-of-00191.safetensors",
396
+ "model.layers.24.mlp.down_proj.weight": "model-00039-of-00191.safetensors",
397
+ "model.layers.24.mlp.gate_proj.weight": "model-00038-of-00191.safetensors",
398
+ "model.layers.24.mlp.up_proj.weight": "model-00038-of-00191.safetensors",
399
+ "model.layers.24.post_attention_layernorm.weight": "model-00039-of-00191.safetensors",
400
+ "model.layers.24.self_attn.k_proj.weight": "model-00037-of-00191.safetensors",
401
+ "model.layers.24.self_attn.o_proj.weight": "model-00037-of-00191.safetensors",
402
+ "model.layers.24.self_attn.q_proj.weight": "model-00037-of-00191.safetensors",
403
+ "model.layers.24.self_attn.v_proj.weight": "model-00037-of-00191.safetensors",
404
+ "model.layers.25.input_layernorm.weight": "model-00040-of-00191.safetensors",
405
+ "model.layers.25.mlp.down_proj.weight": "model-00040-of-00191.safetensors",
406
+ "model.layers.25.mlp.gate_proj.weight": "model-00039-of-00191.safetensors",
407
+ "model.layers.25.mlp.up_proj.weight": "model-00040-of-00191.safetensors",
408
+ "model.layers.25.post_attention_layernorm.weight": "model-00040-of-00191.safetensors",
409
+ "model.layers.25.self_attn.k_proj.weight": "model-00039-of-00191.safetensors",
410
+ "model.layers.25.self_attn.o_proj.weight": "model-00039-of-00191.safetensors",
411
+ "model.layers.25.self_attn.q_proj.weight": "model-00039-of-00191.safetensors",
412
+ "model.layers.25.self_attn.v_proj.weight": "model-00039-of-00191.safetensors",
413
+ "model.layers.26.input_layernorm.weight": "model-00042-of-00191.safetensors",
414
+ "model.layers.26.mlp.down_proj.weight": "model-00042-of-00191.safetensors",
415
+ "model.layers.26.mlp.gate_proj.weight": "model-00041-of-00191.safetensors",
416
+ "model.layers.26.mlp.up_proj.weight": "model-00041-of-00191.safetensors",
417
+ "model.layers.26.post_attention_layernorm.weight": "model-00042-of-00191.safetensors",
418
+ "model.layers.26.self_attn.k_proj.weight": "model-00040-of-00191.safetensors",
419
+ "model.layers.26.self_attn.o_proj.weight": "model-00040-of-00191.safetensors",
420
+ "model.layers.26.self_attn.q_proj.weight": "model-00040-of-00191.safetensors",
421
+ "model.layers.26.self_attn.v_proj.weight": "model-00040-of-00191.safetensors",
422
+ "model.layers.27.input_layernorm.weight": "model-00043-of-00191.safetensors",
423
+ "model.layers.27.mlp.down_proj.weight": "model-00043-of-00191.safetensors",
424
+ "model.layers.27.mlp.gate_proj.weight": "model-00042-of-00191.safetensors",
425
+ "model.layers.27.mlp.up_proj.weight": "model-00043-of-00191.safetensors",
426
+ "model.layers.27.post_attention_layernorm.weight": "model-00043-of-00191.safetensors",
427
+ "model.layers.27.self_attn.k_proj.weight": "model-00042-of-00191.safetensors",
428
+ "model.layers.27.self_attn.o_proj.weight": "model-00042-of-00191.safetensors",
429
+ "model.layers.27.self_attn.q_proj.weight": "model-00042-of-00191.safetensors",
430
+ "model.layers.27.self_attn.v_proj.weight": "model-00042-of-00191.safetensors",
431
+ "model.layers.28.input_layernorm.weight": "model-00045-of-00191.safetensors",
432
+ "model.layers.28.mlp.down_proj.weight": "model-00045-of-00191.safetensors",
433
+ "model.layers.28.mlp.gate_proj.weight": "model-00044-of-00191.safetensors",
434
+ "model.layers.28.mlp.up_proj.weight": "model-00044-of-00191.safetensors",
435
+ "model.layers.28.post_attention_layernorm.weight": "model-00045-of-00191.safetensors",
436
+ "model.layers.28.self_attn.k_proj.weight": "model-00043-of-00191.safetensors",
437
+ "model.layers.28.self_attn.o_proj.weight": "model-00043-of-00191.safetensors",
438
+ "model.layers.28.self_attn.q_proj.weight": "model-00043-of-00191.safetensors",
439
+ "model.layers.28.self_attn.v_proj.weight": "model-00043-of-00191.safetensors",
440
+ "model.layers.29.input_layernorm.weight": "model-00046-of-00191.safetensors",
441
+ "model.layers.29.mlp.down_proj.weight": "model-00046-of-00191.safetensors",
442
+ "model.layers.29.mlp.gate_proj.weight": "model-00045-of-00191.safetensors",
443
+ "model.layers.29.mlp.up_proj.weight": "model-00046-of-00191.safetensors",
444
+ "model.layers.29.post_attention_layernorm.weight": "model-00046-of-00191.safetensors",
445
+ "model.layers.29.self_attn.k_proj.weight": "model-00045-of-00191.safetensors",
446
+ "model.layers.29.self_attn.o_proj.weight": "model-00045-of-00191.safetensors",
447
+ "model.layers.29.self_attn.q_proj.weight": "model-00045-of-00191.safetensors",
448
+ "model.layers.29.self_attn.v_proj.weight": "model-00045-of-00191.safetensors",
449
+ "model.layers.3.input_layernorm.weight": "model-00007-of-00191.safetensors",
450
+ "model.layers.3.mlp.down_proj.weight": "model-00007-of-00191.safetensors",
451
+ "model.layers.3.mlp.gate_proj.weight": "model-00006-of-00191.safetensors",
452
+ "model.layers.3.mlp.up_proj.weight": "model-00007-of-00191.safetensors",
453
+ "model.layers.3.post_attention_layernorm.weight": "model-00007-of-00191.safetensors",
454
+ "model.layers.3.self_attn.k_proj.weight": "model-00006-of-00191.safetensors",
455
+ "model.layers.3.self_attn.o_proj.weight": "model-00006-of-00191.safetensors",
456
+ "model.layers.3.self_attn.q_proj.weight": "model-00006-of-00191.safetensors",
457
+ "model.layers.3.self_attn.v_proj.weight": "model-00006-of-00191.safetensors",
458
+ "model.layers.30.input_layernorm.weight": "model-00048-of-00191.safetensors",
459
+ "model.layers.30.mlp.down_proj.weight": "model-00048-of-00191.safetensors",
460
+ "model.layers.30.mlp.gate_proj.weight": "model-00047-of-00191.safetensors",
461
+ "model.layers.30.mlp.up_proj.weight": "model-00047-of-00191.safetensors",
462
+ "model.layers.30.post_attention_layernorm.weight": "model-00048-of-00191.safetensors",
463
+ "model.layers.30.self_attn.k_proj.weight": "model-00046-of-00191.safetensors",
464
+ "model.layers.30.self_attn.o_proj.weight": "model-00046-of-00191.safetensors",
465
+ "model.layers.30.self_attn.q_proj.weight": "model-00046-of-00191.safetensors",
466
+ "model.layers.30.self_attn.v_proj.weight": "model-00046-of-00191.safetensors",
467
+ "model.layers.31.input_layernorm.weight": "model-00049-of-00191.safetensors",
468
+ "model.layers.31.mlp.down_proj.weight": "model-00049-of-00191.safetensors",
469
+ "model.layers.31.mlp.gate_proj.weight": "model-00048-of-00191.safetensors",
470
+ "model.layers.31.mlp.up_proj.weight": "model-00049-of-00191.safetensors",
471
+ "model.layers.31.post_attention_layernorm.weight": "model-00049-of-00191.safetensors",
472
+ "model.layers.31.self_attn.k_proj.weight": "model-00048-of-00191.safetensors",
473
+ "model.layers.31.self_attn.o_proj.weight": "model-00048-of-00191.safetensors",
474
+ "model.layers.31.self_attn.q_proj.weight": "model-00048-of-00191.safetensors",
475
+ "model.layers.31.self_attn.v_proj.weight": "model-00048-of-00191.safetensors",
476
+ "model.layers.32.input_layernorm.weight": "model-00051-of-00191.safetensors",
477
+ "model.layers.32.mlp.down_proj.weight": "model-00051-of-00191.safetensors",
478
+ "model.layers.32.mlp.gate_proj.weight": "model-00050-of-00191.safetensors",
479
+ "model.layers.32.mlp.up_proj.weight": "model-00050-of-00191.safetensors",
480
+ "model.layers.32.post_attention_layernorm.weight": "model-00051-of-00191.safetensors",
481
+ "model.layers.32.self_attn.k_proj.weight": "model-00049-of-00191.safetensors",
482
+ "model.layers.32.self_attn.o_proj.weight": "model-00049-of-00191.safetensors",
483
+ "model.layers.32.self_attn.q_proj.weight": "model-00049-of-00191.safetensors",
484
+ "model.layers.32.self_attn.v_proj.weight": "model-00049-of-00191.safetensors",
485
+ "model.layers.33.input_layernorm.weight": "model-00052-of-00191.safetensors",
486
+ "model.layers.33.mlp.down_proj.weight": "model-00052-of-00191.safetensors",
487
+ "model.layers.33.mlp.gate_proj.weight": "model-00051-of-00191.safetensors",
488
+ "model.layers.33.mlp.up_proj.weight": "model-00052-of-00191.safetensors",
489
+ "model.layers.33.post_attention_layernorm.weight": "model-00052-of-00191.safetensors",
490
+ "model.layers.33.self_attn.k_proj.weight": "model-00051-of-00191.safetensors",
491
+ "model.layers.33.self_attn.o_proj.weight": "model-00051-of-00191.safetensors",
492
+ "model.layers.33.self_attn.q_proj.weight": "model-00051-of-00191.safetensors",
493
+ "model.layers.33.self_attn.v_proj.weight": "model-00051-of-00191.safetensors",
494
+ "model.layers.34.input_layernorm.weight": "model-00054-of-00191.safetensors",
495
+ "model.layers.34.mlp.down_proj.weight": "model-00054-of-00191.safetensors",
496
+ "model.layers.34.mlp.gate_proj.weight": "model-00053-of-00191.safetensors",
497
+ "model.layers.34.mlp.up_proj.weight": "model-00053-of-00191.safetensors",
498
+ "model.layers.34.post_attention_layernorm.weight": "model-00054-of-00191.safetensors",
499
+ "model.layers.34.self_attn.k_proj.weight": "model-00052-of-00191.safetensors",
500
+ "model.layers.34.self_attn.o_proj.weight": "model-00052-of-00191.safetensors",
501
+ "model.layers.34.self_attn.q_proj.weight": "model-00052-of-00191.safetensors",
502
+ "model.layers.34.self_attn.v_proj.weight": "model-00052-of-00191.safetensors",
503
+ "model.layers.35.input_layernorm.weight": "model-00055-of-00191.safetensors",
504
+ "model.layers.35.mlp.down_proj.weight": "model-00055-of-00191.safetensors",
505
+ "model.layers.35.mlp.gate_proj.weight": "model-00054-of-00191.safetensors",
506
+ "model.layers.35.mlp.up_proj.weight": "model-00055-of-00191.safetensors",
507
+ "model.layers.35.post_attention_layernorm.weight": "model-00055-of-00191.safetensors",
508
+ "model.layers.35.self_attn.k_proj.weight": "model-00054-of-00191.safetensors",
509
+ "model.layers.35.self_attn.o_proj.weight": "model-00054-of-00191.safetensors",
510
+ "model.layers.35.self_attn.q_proj.weight": "model-00054-of-00191.safetensors",
511
+ "model.layers.35.self_attn.v_proj.weight": "model-00054-of-00191.safetensors",
512
+ "model.layers.36.input_layernorm.weight": "model-00057-of-00191.safetensors",
513
+ "model.layers.36.mlp.down_proj.weight": "model-00057-of-00191.safetensors",
514
+ "model.layers.36.mlp.gate_proj.weight": "model-00056-of-00191.safetensors",
515
+ "model.layers.36.mlp.up_proj.weight": "model-00056-of-00191.safetensors",
516
+ "model.layers.36.post_attention_layernorm.weight": "model-00057-of-00191.safetensors",
517
+ "model.layers.36.self_attn.k_proj.weight": "model-00055-of-00191.safetensors",
518
+ "model.layers.36.self_attn.o_proj.weight": "model-00055-of-00191.safetensors",
519
+ "model.layers.36.self_attn.q_proj.weight": "model-00055-of-00191.safetensors",
520
+ "model.layers.36.self_attn.v_proj.weight": "model-00055-of-00191.safetensors",
521
+ "model.layers.37.input_layernorm.weight": "model-00058-of-00191.safetensors",
522
+ "model.layers.37.mlp.down_proj.weight": "model-00058-of-00191.safetensors",
523
+ "model.layers.37.mlp.gate_proj.weight": "model-00057-of-00191.safetensors",
524
+ "model.layers.37.mlp.up_proj.weight": "model-00058-of-00191.safetensors",
525
+ "model.layers.37.post_attention_layernorm.weight": "model-00058-of-00191.safetensors",
526
+ "model.layers.37.self_attn.k_proj.weight": "model-00057-of-00191.safetensors",
527
+ "model.layers.37.self_attn.o_proj.weight": "model-00057-of-00191.safetensors",
528
+ "model.layers.37.self_attn.q_proj.weight": "model-00057-of-00191.safetensors",
529
+ "model.layers.37.self_attn.v_proj.weight": "model-00057-of-00191.safetensors",
530
+ "model.layers.38.input_layernorm.weight": "model-00060-of-00191.safetensors",
531
+ "model.layers.38.mlp.down_proj.weight": "model-00060-of-00191.safetensors",
532
+ "model.layers.38.mlp.gate_proj.weight": "model-00059-of-00191.safetensors",
533
+ "model.layers.38.mlp.up_proj.weight": "model-00059-of-00191.safetensors",
534
+ "model.layers.38.post_attention_layernorm.weight": "model-00060-of-00191.safetensors",
535
+ "model.layers.38.self_attn.k_proj.weight": "model-00058-of-00191.safetensors",
536
+ "model.layers.38.self_attn.o_proj.weight": "model-00058-of-00191.safetensors",
537
+ "model.layers.38.self_attn.q_proj.weight": "model-00058-of-00191.safetensors",
538
+ "model.layers.38.self_attn.v_proj.weight": "model-00058-of-00191.safetensors",
539
+ "model.layers.39.input_layernorm.weight": "model-00061-of-00191.safetensors",
540
+ "model.layers.39.mlp.down_proj.weight": "model-00061-of-00191.safetensors",
541
+ "model.layers.39.mlp.gate_proj.weight": "model-00060-of-00191.safetensors",
542
+ "model.layers.39.mlp.up_proj.weight": "model-00061-of-00191.safetensors",
543
+ "model.layers.39.post_attention_layernorm.weight": "model-00061-of-00191.safetensors",
544
+ "model.layers.39.self_attn.k_proj.weight": "model-00060-of-00191.safetensors",
545
+ "model.layers.39.self_attn.o_proj.weight": "model-00060-of-00191.safetensors",
546
+ "model.layers.39.self_attn.q_proj.weight": "model-00060-of-00191.safetensors",
547
+ "model.layers.39.self_attn.v_proj.weight": "model-00060-of-00191.safetensors",
548
+ "model.layers.4.input_layernorm.weight": "model-00009-of-00191.safetensors",
549
+ "model.layers.4.mlp.down_proj.weight": "model-00009-of-00191.safetensors",
550
+ "model.layers.4.mlp.gate_proj.weight": "model-00008-of-00191.safetensors",
551
+ "model.layers.4.mlp.up_proj.weight": "model-00008-of-00191.safetensors",
552
+ "model.layers.4.post_attention_layernorm.weight": "model-00009-of-00191.safetensors",
553
+ "model.layers.4.self_attn.k_proj.weight": "model-00007-of-00191.safetensors",
554
+ "model.layers.4.self_attn.o_proj.weight": "model-00007-of-00191.safetensors",
555
+ "model.layers.4.self_attn.q_proj.weight": "model-00007-of-00191.safetensors",
556
+ "model.layers.4.self_attn.v_proj.weight": "model-00007-of-00191.safetensors",
557
+ "model.layers.40.input_layernorm.weight": "model-00063-of-00191.safetensors",
558
+ "model.layers.40.mlp.down_proj.weight": "model-00063-of-00191.safetensors",
559
+ "model.layers.40.mlp.gate_proj.weight": "model-00062-of-00191.safetensors",
560
+ "model.layers.40.mlp.up_proj.weight": "model-00062-of-00191.safetensors",
561
+ "model.layers.40.post_attention_layernorm.weight": "model-00063-of-00191.safetensors",
562
+ "model.layers.40.self_attn.k_proj.weight": "model-00061-of-00191.safetensors",
563
+ "model.layers.40.self_attn.o_proj.weight": "model-00061-of-00191.safetensors",
564
+ "model.layers.40.self_attn.q_proj.weight": "model-00061-of-00191.safetensors",
565
+ "model.layers.40.self_attn.v_proj.weight": "model-00061-of-00191.safetensors",
566
+ "model.layers.41.input_layernorm.weight": "model-00064-of-00191.safetensors",
567
+ "model.layers.41.mlp.down_proj.weight": "model-00064-of-00191.safetensors",
568
+ "model.layers.41.mlp.gate_proj.weight": "model-00063-of-00191.safetensors",
569
+ "model.layers.41.mlp.up_proj.weight": "model-00064-of-00191.safetensors",
570
+ "model.layers.41.post_attention_layernorm.weight": "model-00064-of-00191.safetensors",
571
+ "model.layers.41.self_attn.k_proj.weight": "model-00063-of-00191.safetensors",
572
+ "model.layers.41.self_attn.o_proj.weight": "model-00063-of-00191.safetensors",
573
+ "model.layers.41.self_attn.q_proj.weight": "model-00063-of-00191.safetensors",
574
+ "model.layers.41.self_attn.v_proj.weight": "model-00063-of-00191.safetensors",
575
+ "model.layers.42.input_layernorm.weight": "model-00066-of-00191.safetensors",
576
+ "model.layers.42.mlp.down_proj.weight": "model-00066-of-00191.safetensors",
577
+ "model.layers.42.mlp.gate_proj.weight": "model-00065-of-00191.safetensors",
578
+ "model.layers.42.mlp.up_proj.weight": "model-00065-of-00191.safetensors",
579
+ "model.layers.42.post_attention_layernorm.weight": "model-00066-of-00191.safetensors",
580
+ "model.layers.42.self_attn.k_proj.weight": "model-00064-of-00191.safetensors",
581
+ "model.layers.42.self_attn.o_proj.weight": "model-00064-of-00191.safetensors",
582
+ "model.layers.42.self_attn.q_proj.weight": "model-00064-of-00191.safetensors",
583
+ "model.layers.42.self_attn.v_proj.weight": "model-00064-of-00191.safetensors",
584
+ "model.layers.43.input_layernorm.weight": "model-00067-of-00191.safetensors",
585
+ "model.layers.43.mlp.down_proj.weight": "model-00067-of-00191.safetensors",
586
+ "model.layers.43.mlp.gate_proj.weight": "model-00066-of-00191.safetensors",
587
+ "model.layers.43.mlp.up_proj.weight": "model-00067-of-00191.safetensors",
588
+ "model.layers.43.post_attention_layernorm.weight": "model-00067-of-00191.safetensors",
589
+ "model.layers.43.self_attn.k_proj.weight": "model-00066-of-00191.safetensors",
590
+ "model.layers.43.self_attn.o_proj.weight": "model-00066-of-00191.safetensors",
591
+ "model.layers.43.self_attn.q_proj.weight": "model-00066-of-00191.safetensors",
592
+ "model.layers.43.self_attn.v_proj.weight": "model-00066-of-00191.safetensors",
593
+ "model.layers.44.input_layernorm.weight": "model-00069-of-00191.safetensors",
594
+ "model.layers.44.mlp.down_proj.weight": "model-00069-of-00191.safetensors",
595
+ "model.layers.44.mlp.gate_proj.weight": "model-00068-of-00191.safetensors",
596
+ "model.layers.44.mlp.up_proj.weight": "model-00068-of-00191.safetensors",
597
+ "model.layers.44.post_attention_layernorm.weight": "model-00069-of-00191.safetensors",
598
+ "model.layers.44.self_attn.k_proj.weight": "model-00067-of-00191.safetensors",
599
+ "model.layers.44.self_attn.o_proj.weight": "model-00067-of-00191.safetensors",
600
+ "model.layers.44.self_attn.q_proj.weight": "model-00067-of-00191.safetensors",
601
+ "model.layers.44.self_attn.v_proj.weight": "model-00067-of-00191.safetensors",
602
+ "model.layers.45.input_layernorm.weight": "model-00070-of-00191.safetensors",
603
+ "model.layers.45.mlp.down_proj.weight": "model-00070-of-00191.safetensors",
604
+ "model.layers.45.mlp.gate_proj.weight": "model-00069-of-00191.safetensors",
605
+ "model.layers.45.mlp.up_proj.weight": "model-00070-of-00191.safetensors",
606
+ "model.layers.45.post_attention_layernorm.weight": "model-00070-of-00191.safetensors",
607
+ "model.layers.45.self_attn.k_proj.weight": "model-00069-of-00191.safetensors",
608
+ "model.layers.45.self_attn.o_proj.weight": "model-00069-of-00191.safetensors",
609
+ "model.layers.45.self_attn.q_proj.weight": "model-00069-of-00191.safetensors",
610
+ "model.layers.45.self_attn.v_proj.weight": "model-00069-of-00191.safetensors",
611
+ "model.layers.46.input_layernorm.weight": "model-00072-of-00191.safetensors",
612
+ "model.layers.46.mlp.down_proj.weight": "model-00072-of-00191.safetensors",
613
+ "model.layers.46.mlp.gate_proj.weight": "model-00071-of-00191.safetensors",
614
+ "model.layers.46.mlp.up_proj.weight": "model-00071-of-00191.safetensors",
615
+ "model.layers.46.post_attention_layernorm.weight": "model-00072-of-00191.safetensors",
616
+ "model.layers.46.self_attn.k_proj.weight": "model-00070-of-00191.safetensors",
617
+ "model.layers.46.self_attn.o_proj.weight": "model-00070-of-00191.safetensors",
618
+ "model.layers.46.self_attn.q_proj.weight": "model-00070-of-00191.safetensors",
619
+ "model.layers.46.self_attn.v_proj.weight": "model-00070-of-00191.safetensors",
620
+ "model.layers.47.input_layernorm.weight": "model-00073-of-00191.safetensors",
621
+ "model.layers.47.mlp.down_proj.weight": "model-00073-of-00191.safetensors",
622
+ "model.layers.47.mlp.gate_proj.weight": "model-00072-of-00191.safetensors",
623
+ "model.layers.47.mlp.up_proj.weight": "model-00073-of-00191.safetensors",
624
+ "model.layers.47.post_attention_layernorm.weight": "model-00073-of-00191.safetensors",
625
+ "model.layers.47.self_attn.k_proj.weight": "model-00072-of-00191.safetensors",
626
+ "model.layers.47.self_attn.o_proj.weight": "model-00072-of-00191.safetensors",
627
+ "model.layers.47.self_attn.q_proj.weight": "model-00072-of-00191.safetensors",
628
+ "model.layers.47.self_attn.v_proj.weight": "model-00072-of-00191.safetensors",
629
+ "model.layers.48.input_layernorm.weight": "model-00075-of-00191.safetensors",
630
+ "model.layers.48.mlp.down_proj.weight": "model-00075-of-00191.safetensors",
631
+ "model.layers.48.mlp.gate_proj.weight": "model-00074-of-00191.safetensors",
632
+ "model.layers.48.mlp.up_proj.weight": "model-00074-of-00191.safetensors",
633
+ "model.layers.48.post_attention_layernorm.weight": "model-00075-of-00191.safetensors",
634
+ "model.layers.48.self_attn.k_proj.weight": "model-00073-of-00191.safetensors",
635
+ "model.layers.48.self_attn.o_proj.weight": "model-00073-of-00191.safetensors",
636
+ "model.layers.48.self_attn.q_proj.weight": "model-00073-of-00191.safetensors",
637
+ "model.layers.48.self_attn.v_proj.weight": "model-00073-of-00191.safetensors",
638
+ "model.layers.49.input_layernorm.weight": "model-00076-of-00191.safetensors",
639
+ "model.layers.49.mlp.down_proj.weight": "model-00076-of-00191.safetensors",
640
+ "model.layers.49.mlp.gate_proj.weight": "model-00075-of-00191.safetensors",
641
+ "model.layers.49.mlp.up_proj.weight": "model-00076-of-00191.safetensors",
642
+ "model.layers.49.post_attention_layernorm.weight": "model-00076-of-00191.safetensors",
643
+ "model.layers.49.self_attn.k_proj.weight": "model-00075-of-00191.safetensors",
644
+ "model.layers.49.self_attn.o_proj.weight": "model-00075-of-00191.safetensors",
645
+ "model.layers.49.self_attn.q_proj.weight": "model-00075-of-00191.safetensors",
646
+ "model.layers.49.self_attn.v_proj.weight": "model-00075-of-00191.safetensors",
647
+ "model.layers.5.input_layernorm.weight": "model-00010-of-00191.safetensors",
648
+ "model.layers.5.mlp.down_proj.weight": "model-00010-of-00191.safetensors",
649
+ "model.layers.5.mlp.gate_proj.weight": "model-00009-of-00191.safetensors",
650
+ "model.layers.5.mlp.up_proj.weight": "model-00010-of-00191.safetensors",
651
+ "model.layers.5.post_attention_layernorm.weight": "model-00010-of-00191.safetensors",
652
+ "model.layers.5.self_attn.k_proj.weight": "model-00009-of-00191.safetensors",
653
+ "model.layers.5.self_attn.o_proj.weight": "model-00009-of-00191.safetensors",
654
+ "model.layers.5.self_attn.q_proj.weight": "model-00009-of-00191.safetensors",
655
+ "model.layers.5.self_attn.v_proj.weight": "model-00009-of-00191.safetensors",
656
+ "model.layers.50.input_layernorm.weight": "model-00078-of-00191.safetensors",
657
+ "model.layers.50.mlp.down_proj.weight": "model-00078-of-00191.safetensors",
658
+ "model.layers.50.mlp.gate_proj.weight": "model-00077-of-00191.safetensors",
659
+ "model.layers.50.mlp.up_proj.weight": "model-00077-of-00191.safetensors",
660
+ "model.layers.50.post_attention_layernorm.weight": "model-00078-of-00191.safetensors",
661
+ "model.layers.50.self_attn.k_proj.weight": "model-00076-of-00191.safetensors",
662
+ "model.layers.50.self_attn.o_proj.weight": "model-00076-of-00191.safetensors",
663
+ "model.layers.50.self_attn.q_proj.weight": "model-00076-of-00191.safetensors",
664
+ "model.layers.50.self_attn.v_proj.weight": "model-00076-of-00191.safetensors",
665
+ "model.layers.51.input_layernorm.weight": "model-00079-of-00191.safetensors",
666
+ "model.layers.51.mlp.down_proj.weight": "model-00079-of-00191.safetensors",
667
+ "model.layers.51.mlp.gate_proj.weight": "model-00078-of-00191.safetensors",
668
+ "model.layers.51.mlp.up_proj.weight": "model-00079-of-00191.safetensors",
669
+ "model.layers.51.post_attention_layernorm.weight": "model-00079-of-00191.safetensors",
670
+ "model.layers.51.self_attn.k_proj.weight": "model-00078-of-00191.safetensors",
671
+ "model.layers.51.self_attn.o_proj.weight": "model-00078-of-00191.safetensors",
672
+ "model.layers.51.self_attn.q_proj.weight": "model-00078-of-00191.safetensors",
673
+ "model.layers.51.self_attn.v_proj.weight": "model-00078-of-00191.safetensors",
674
+ "model.layers.52.input_layernorm.weight": "model-00081-of-00191.safetensors",
675
+ "model.layers.52.mlp.down_proj.weight": "model-00081-of-00191.safetensors",
676
+ "model.layers.52.mlp.gate_proj.weight": "model-00080-of-00191.safetensors",
677
+ "model.layers.52.mlp.up_proj.weight": "model-00080-of-00191.safetensors",
678
+ "model.layers.52.post_attention_layernorm.weight": "model-00081-of-00191.safetensors",
679
+ "model.layers.52.self_attn.k_proj.weight": "model-00079-of-00191.safetensors",
680
+ "model.layers.52.self_attn.o_proj.weight": "model-00079-of-00191.safetensors",
681
+ "model.layers.52.self_attn.q_proj.weight": "model-00079-of-00191.safetensors",
682
+ "model.layers.52.self_attn.v_proj.weight": "model-00079-of-00191.safetensors",
683
+ "model.layers.53.input_layernorm.weight": "model-00082-of-00191.safetensors",
684
+ "model.layers.53.mlp.down_proj.weight": "model-00082-of-00191.safetensors",
685
+ "model.layers.53.mlp.gate_proj.weight": "model-00081-of-00191.safetensors",
686
+ "model.layers.53.mlp.up_proj.weight": "model-00082-of-00191.safetensors",
687
+ "model.layers.53.post_attention_layernorm.weight": "model-00082-of-00191.safetensors",
688
+ "model.layers.53.self_attn.k_proj.weight": "model-00081-of-00191.safetensors",
689
+ "model.layers.53.self_attn.o_proj.weight": "model-00081-of-00191.safetensors",
690
+ "model.layers.53.self_attn.q_proj.weight": "model-00081-of-00191.safetensors",
691
+ "model.layers.53.self_attn.v_proj.weight": "model-00081-of-00191.safetensors",
692
+ "model.layers.54.input_layernorm.weight": "model-00084-of-00191.safetensors",
693
+ "model.layers.54.mlp.down_proj.weight": "model-00084-of-00191.safetensors",
694
+ "model.layers.54.mlp.gate_proj.weight": "model-00083-of-00191.safetensors",
695
+ "model.layers.54.mlp.up_proj.weight": "model-00083-of-00191.safetensors",
696
+ "model.layers.54.post_attention_layernorm.weight": "model-00084-of-00191.safetensors",
697
+ "model.layers.54.self_attn.k_proj.weight": "model-00082-of-00191.safetensors",
698
+ "model.layers.54.self_attn.o_proj.weight": "model-00082-of-00191.safetensors",
699
+ "model.layers.54.self_attn.q_proj.weight": "model-00082-of-00191.safetensors",
700
+ "model.layers.54.self_attn.v_proj.weight": "model-00082-of-00191.safetensors",
701
+ "model.layers.55.input_layernorm.weight": "model-00085-of-00191.safetensors",
702
+ "model.layers.55.mlp.down_proj.weight": "model-00085-of-00191.safetensors",
703
+ "model.layers.55.mlp.gate_proj.weight": "model-00084-of-00191.safetensors",
704
+ "model.layers.55.mlp.up_proj.weight": "model-00085-of-00191.safetensors",
705
+ "model.layers.55.post_attention_layernorm.weight": "model-00085-of-00191.safetensors",
706
+ "model.layers.55.self_attn.k_proj.weight": "model-00084-of-00191.safetensors",
707
+ "model.layers.55.self_attn.o_proj.weight": "model-00084-of-00191.safetensors",
708
+ "model.layers.55.self_attn.q_proj.weight": "model-00084-of-00191.safetensors",
709
+ "model.layers.55.self_attn.v_proj.weight": "model-00084-of-00191.safetensors",
710
+ "model.layers.56.input_layernorm.weight": "model-00087-of-00191.safetensors",
711
+ "model.layers.56.mlp.down_proj.weight": "model-00087-of-00191.safetensors",
712
+ "model.layers.56.mlp.gate_proj.weight": "model-00086-of-00191.safetensors",
713
+ "model.layers.56.mlp.up_proj.weight": "model-00086-of-00191.safetensors",
714
+ "model.layers.56.post_attention_layernorm.weight": "model-00087-of-00191.safetensors",
715
+ "model.layers.56.self_attn.k_proj.weight": "model-00085-of-00191.safetensors",
716
+ "model.layers.56.self_attn.o_proj.weight": "model-00085-of-00191.safetensors",
717
+ "model.layers.56.self_attn.q_proj.weight": "model-00085-of-00191.safetensors",
718
+ "model.layers.56.self_attn.v_proj.weight": "model-00085-of-00191.safetensors",
719
+ "model.layers.57.input_layernorm.weight": "model-00088-of-00191.safetensors",
720
+ "model.layers.57.mlp.down_proj.weight": "model-00088-of-00191.safetensors",
721
+ "model.layers.57.mlp.gate_proj.weight": "model-00087-of-00191.safetensors",
722
+ "model.layers.57.mlp.up_proj.weight": "model-00088-of-00191.safetensors",
723
+ "model.layers.57.post_attention_layernorm.weight": "model-00088-of-00191.safetensors",
724
+ "model.layers.57.self_attn.k_proj.weight": "model-00087-of-00191.safetensors",
725
+ "model.layers.57.self_attn.o_proj.weight": "model-00087-of-00191.safetensors",
726
+ "model.layers.57.self_attn.q_proj.weight": "model-00087-of-00191.safetensors",
727
+ "model.layers.57.self_attn.v_proj.weight": "model-00087-of-00191.safetensors",
728
+ "model.layers.58.input_layernorm.weight": "model-00090-of-00191.safetensors",
729
+ "model.layers.58.mlp.down_proj.weight": "model-00090-of-00191.safetensors",
730
+ "model.layers.58.mlp.gate_proj.weight": "model-00089-of-00191.safetensors",
731
+ "model.layers.58.mlp.up_proj.weight": "model-00089-of-00191.safetensors",
732
+ "model.layers.58.post_attention_layernorm.weight": "model-00090-of-00191.safetensors",
733
+ "model.layers.58.self_attn.k_proj.weight": "model-00088-of-00191.safetensors",
734
+ "model.layers.58.self_attn.o_proj.weight": "model-00088-of-00191.safetensors",
735
+ "model.layers.58.self_attn.q_proj.weight": "model-00088-of-00191.safetensors",
736
+ "model.layers.58.self_attn.v_proj.weight": "model-00088-of-00191.safetensors",
737
+ "model.layers.59.input_layernorm.weight": "model-00091-of-00191.safetensors",
738
+ "model.layers.59.mlp.down_proj.weight": "model-00091-of-00191.safetensors",
739
+ "model.layers.59.mlp.gate_proj.weight": "model-00090-of-00191.safetensors",
740
+ "model.layers.59.mlp.up_proj.weight": "model-00091-of-00191.safetensors",
741
+ "model.layers.59.post_attention_layernorm.weight": "model-00091-of-00191.safetensors",
742
+ "model.layers.59.self_attn.k_proj.weight": "model-00090-of-00191.safetensors",
743
+ "model.layers.59.self_attn.o_proj.weight": "model-00090-of-00191.safetensors",
744
+ "model.layers.59.self_attn.q_proj.weight": "model-00090-of-00191.safetensors",
745
+ "model.layers.59.self_attn.v_proj.weight": "model-00090-of-00191.safetensors",
746
+ "model.layers.6.input_layernorm.weight": "model-00012-of-00191.safetensors",
747
+ "model.layers.6.mlp.down_proj.weight": "model-00012-of-00191.safetensors",
748
+ "model.layers.6.mlp.gate_proj.weight": "model-00011-of-00191.safetensors",
749
+ "model.layers.6.mlp.up_proj.weight": "model-00011-of-00191.safetensors",
750
+ "model.layers.6.post_attention_layernorm.weight": "model-00012-of-00191.safetensors",
751
+ "model.layers.6.self_attn.k_proj.weight": "model-00010-of-00191.safetensors",
752
+ "model.layers.6.self_attn.o_proj.weight": "model-00010-of-00191.safetensors",
753
+ "model.layers.6.self_attn.q_proj.weight": "model-00010-of-00191.safetensors",
754
+ "model.layers.6.self_attn.v_proj.weight": "model-00010-of-00191.safetensors",
755
+ "model.layers.60.input_layernorm.weight": "model-00093-of-00191.safetensors",
756
+ "model.layers.60.mlp.down_proj.weight": "model-00093-of-00191.safetensors",
757
+ "model.layers.60.mlp.gate_proj.weight": "model-00092-of-00191.safetensors",
758
+ "model.layers.60.mlp.up_proj.weight": "model-00092-of-00191.safetensors",
759
+ "model.layers.60.post_attention_layernorm.weight": "model-00093-of-00191.safetensors",
760
+ "model.layers.60.self_attn.k_proj.weight": "model-00091-of-00191.safetensors",
761
+ "model.layers.60.self_attn.o_proj.weight": "model-00091-of-00191.safetensors",
762
+ "model.layers.60.self_attn.q_proj.weight": "model-00091-of-00191.safetensors",
763
+ "model.layers.60.self_attn.v_proj.weight": "model-00091-of-00191.safetensors",
764
+ "model.layers.61.input_layernorm.weight": "model-00094-of-00191.safetensors",
765
+ "model.layers.61.mlp.down_proj.weight": "model-00094-of-00191.safetensors",
766
+ "model.layers.61.mlp.gate_proj.weight": "model-00093-of-00191.safetensors",
767
+ "model.layers.61.mlp.up_proj.weight": "model-00094-of-00191.safetensors",
768
+ "model.layers.61.post_attention_layernorm.weight": "model-00094-of-00191.safetensors",
769
+ "model.layers.61.self_attn.k_proj.weight": "model-00093-of-00191.safetensors",
770
+ "model.layers.61.self_attn.o_proj.weight": "model-00093-of-00191.safetensors",
771
+ "model.layers.61.self_attn.q_proj.weight": "model-00093-of-00191.safetensors",
772
+ "model.layers.61.self_attn.v_proj.weight": "model-00093-of-00191.safetensors",
773
+ "model.layers.62.input_layernorm.weight": "model-00096-of-00191.safetensors",
774
+ "model.layers.62.mlp.down_proj.weight": "model-00096-of-00191.safetensors",
775
+ "model.layers.62.mlp.gate_proj.weight": "model-00095-of-00191.safetensors",
776
+ "model.layers.62.mlp.up_proj.weight": "model-00095-of-00191.safetensors",
777
+ "model.layers.62.post_attention_layernorm.weight": "model-00096-of-00191.safetensors",
778
+ "model.layers.62.self_attn.k_proj.weight": "model-00094-of-00191.safetensors",
779
+ "model.layers.62.self_attn.o_proj.weight": "model-00094-of-00191.safetensors",
780
+ "model.layers.62.self_attn.q_proj.weight": "model-00094-of-00191.safetensors",
781
+ "model.layers.62.self_attn.v_proj.weight": "model-00094-of-00191.safetensors",
782
+ "model.layers.63.input_layernorm.weight": "model-00097-of-00191.safetensors",
783
+ "model.layers.63.mlp.down_proj.weight": "model-00097-of-00191.safetensors",
784
+ "model.layers.63.mlp.gate_proj.weight": "model-00096-of-00191.safetensors",
785
+ "model.layers.63.mlp.up_proj.weight": "model-00097-of-00191.safetensors",
786
+ "model.layers.63.post_attention_layernorm.weight": "model-00097-of-00191.safetensors",
787
+ "model.layers.63.self_attn.k_proj.weight": "model-00096-of-00191.safetensors",
788
+ "model.layers.63.self_attn.o_proj.weight": "model-00096-of-00191.safetensors",
789
+ "model.layers.63.self_attn.q_proj.weight": "model-00096-of-00191.safetensors",
790
+ "model.layers.63.self_attn.v_proj.weight": "model-00096-of-00191.safetensors",
791
+ "model.layers.64.input_layernorm.weight": "model-00099-of-00191.safetensors",
792
+ "model.layers.64.mlp.down_proj.weight": "model-00099-of-00191.safetensors",
793
+ "model.layers.64.mlp.gate_proj.weight": "model-00098-of-00191.safetensors",
794
+ "model.layers.64.mlp.up_proj.weight": "model-00098-of-00191.safetensors",
795
+ "model.layers.64.post_attention_layernorm.weight": "model-00099-of-00191.safetensors",
796
+ "model.layers.64.self_attn.k_proj.weight": "model-00097-of-00191.safetensors",
797
+ "model.layers.64.self_attn.o_proj.weight": "model-00097-of-00191.safetensors",
798
+ "model.layers.64.self_attn.q_proj.weight": "model-00097-of-00191.safetensors",
799
+ "model.layers.64.self_attn.v_proj.weight": "model-00097-of-00191.safetensors",
800
+ "model.layers.65.input_layernorm.weight": "model-00100-of-00191.safetensors",
801
+ "model.layers.65.mlp.down_proj.weight": "model-00100-of-00191.safetensors",
802
+ "model.layers.65.mlp.gate_proj.weight": "model-00099-of-00191.safetensors",
803
+ "model.layers.65.mlp.up_proj.weight": "model-00100-of-00191.safetensors",
804
+ "model.layers.65.post_attention_layernorm.weight": "model-00100-of-00191.safetensors",
805
+ "model.layers.65.self_attn.k_proj.weight": "model-00099-of-00191.safetensors",
806
+ "model.layers.65.self_attn.o_proj.weight": "model-00099-of-00191.safetensors",
807
+ "model.layers.65.self_attn.q_proj.weight": "model-00099-of-00191.safetensors",
808
+ "model.layers.65.self_attn.v_proj.weight": "model-00099-of-00191.safetensors",
809
+ "model.layers.66.input_layernorm.weight": "model-00102-of-00191.safetensors",
810
+ "model.layers.66.mlp.down_proj.weight": "model-00102-of-00191.safetensors",
811
+ "model.layers.66.mlp.gate_proj.weight": "model-00101-of-00191.safetensors",
812
+ "model.layers.66.mlp.up_proj.weight": "model-00101-of-00191.safetensors",
813
+ "model.layers.66.post_attention_layernorm.weight": "model-00102-of-00191.safetensors",
814
+ "model.layers.66.self_attn.k_proj.weight": "model-00100-of-00191.safetensors",
815
+ "model.layers.66.self_attn.o_proj.weight": "model-00100-of-00191.safetensors",
816
+ "model.layers.66.self_attn.q_proj.weight": "model-00100-of-00191.safetensors",
817
+ "model.layers.66.self_attn.v_proj.weight": "model-00100-of-00191.safetensors",
818
+ "model.layers.67.input_layernorm.weight": "model-00103-of-00191.safetensors",
819
+ "model.layers.67.mlp.down_proj.weight": "model-00103-of-00191.safetensors",
820
+ "model.layers.67.mlp.gate_proj.weight": "model-00102-of-00191.safetensors",
821
+ "model.layers.67.mlp.up_proj.weight": "model-00103-of-00191.safetensors",
822
+ "model.layers.67.post_attention_layernorm.weight": "model-00103-of-00191.safetensors",
823
+ "model.layers.67.self_attn.k_proj.weight": "model-00102-of-00191.safetensors",
824
+ "model.layers.67.self_attn.o_proj.weight": "model-00102-of-00191.safetensors",
825
+ "model.layers.67.self_attn.q_proj.weight": "model-00102-of-00191.safetensors",
826
+ "model.layers.67.self_attn.v_proj.weight": "model-00102-of-00191.safetensors",
827
+ "model.layers.68.input_layernorm.weight": "model-00105-of-00191.safetensors",
828
+ "model.layers.68.mlp.down_proj.weight": "model-00105-of-00191.safetensors",
829
+ "model.layers.68.mlp.gate_proj.weight": "model-00104-of-00191.safetensors",
830
+ "model.layers.68.mlp.up_proj.weight": "model-00104-of-00191.safetensors",
831
+ "model.layers.68.post_attention_layernorm.weight": "model-00105-of-00191.safetensors",
832
+ "model.layers.68.self_attn.k_proj.weight": "model-00103-of-00191.safetensors",
833
+ "model.layers.68.self_attn.o_proj.weight": "model-00103-of-00191.safetensors",
834
+ "model.layers.68.self_attn.q_proj.weight": "model-00103-of-00191.safetensors",
835
+ "model.layers.68.self_attn.v_proj.weight": "model-00103-of-00191.safetensors",
836
+ "model.layers.69.input_layernorm.weight": "model-00106-of-00191.safetensors",
837
+ "model.layers.69.mlp.down_proj.weight": "model-00106-of-00191.safetensors",
838
+ "model.layers.69.mlp.gate_proj.weight": "model-00105-of-00191.safetensors",
839
+ "model.layers.69.mlp.up_proj.weight": "model-00106-of-00191.safetensors",
840
+ "model.layers.69.post_attention_layernorm.weight": "model-00106-of-00191.safetensors",
841
+ "model.layers.69.self_attn.k_proj.weight": "model-00105-of-00191.safetensors",
842
+ "model.layers.69.self_attn.o_proj.weight": "model-00105-of-00191.safetensors",
843
+ "model.layers.69.self_attn.q_proj.weight": "model-00105-of-00191.safetensors",
844
+ "model.layers.69.self_attn.v_proj.weight": "model-00105-of-00191.safetensors",
845
+ "model.layers.7.input_layernorm.weight": "model-00013-of-00191.safetensors",
846
+ "model.layers.7.mlp.down_proj.weight": "model-00013-of-00191.safetensors",
847
+ "model.layers.7.mlp.gate_proj.weight": "model-00012-of-00191.safetensors",
848
+ "model.layers.7.mlp.up_proj.weight": "model-00013-of-00191.safetensors",
849
+ "model.layers.7.post_attention_layernorm.weight": "model-00013-of-00191.safetensors",
850
+ "model.layers.7.self_attn.k_proj.weight": "model-00012-of-00191.safetensors",
851
+ "model.layers.7.self_attn.o_proj.weight": "model-00012-of-00191.safetensors",
852
+ "model.layers.7.self_attn.q_proj.weight": "model-00012-of-00191.safetensors",
853
+ "model.layers.7.self_attn.v_proj.weight": "model-00012-of-00191.safetensors",
854
+ "model.layers.70.input_layernorm.weight": "model-00108-of-00191.safetensors",
855
+ "model.layers.70.mlp.down_proj.weight": "model-00108-of-00191.safetensors",
856
+ "model.layers.70.mlp.gate_proj.weight": "model-00107-of-00191.safetensors",
857
+ "model.layers.70.mlp.up_proj.weight": "model-00107-of-00191.safetensors",
858
+ "model.layers.70.post_attention_layernorm.weight": "model-00108-of-00191.safetensors",
859
+ "model.layers.70.self_attn.k_proj.weight": "model-00106-of-00191.safetensors",
860
+ "model.layers.70.self_attn.o_proj.weight": "model-00106-of-00191.safetensors",
861
+ "model.layers.70.self_attn.q_proj.weight": "model-00106-of-00191.safetensors",
862
+ "model.layers.70.self_attn.v_proj.weight": "model-00106-of-00191.safetensors",
863
+ "model.layers.71.input_layernorm.weight": "model-00109-of-00191.safetensors",
864
+ "model.layers.71.mlp.down_proj.weight": "model-00109-of-00191.safetensors",
865
+ "model.layers.71.mlp.gate_proj.weight": "model-00108-of-00191.safetensors",
866
+ "model.layers.71.mlp.up_proj.weight": "model-00109-of-00191.safetensors",
867
+ "model.layers.71.post_attention_layernorm.weight": "model-00109-of-00191.safetensors",
868
+ "model.layers.71.self_attn.k_proj.weight": "model-00108-of-00191.safetensors",
869
+ "model.layers.71.self_attn.o_proj.weight": "model-00108-of-00191.safetensors",
870
+ "model.layers.71.self_attn.q_proj.weight": "model-00108-of-00191.safetensors",
871
+ "model.layers.71.self_attn.v_proj.weight": "model-00108-of-00191.safetensors",
872
+ "model.layers.72.input_layernorm.weight": "model-00111-of-00191.safetensors",
873
+ "model.layers.72.mlp.down_proj.weight": "model-00111-of-00191.safetensors",
874
+ "model.layers.72.mlp.gate_proj.weight": "model-00110-of-00191.safetensors",
875
+ "model.layers.72.mlp.up_proj.weight": "model-00110-of-00191.safetensors",
876
+ "model.layers.72.post_attention_layernorm.weight": "model-00111-of-00191.safetensors",
877
+ "model.layers.72.self_attn.k_proj.weight": "model-00109-of-00191.safetensors",
878
+ "model.layers.72.self_attn.o_proj.weight": "model-00109-of-00191.safetensors",
879
+ "model.layers.72.self_attn.q_proj.weight": "model-00109-of-00191.safetensors",
880
+ "model.layers.72.self_attn.v_proj.weight": "model-00109-of-00191.safetensors",
881
+ "model.layers.73.input_layernorm.weight": "model-00112-of-00191.safetensors",
882
+ "model.layers.73.mlp.down_proj.weight": "model-00112-of-00191.safetensors",
883
+ "model.layers.73.mlp.gate_proj.weight": "model-00111-of-00191.safetensors",
884
+ "model.layers.73.mlp.up_proj.weight": "model-00112-of-00191.safetensors",
885
+ "model.layers.73.post_attention_layernorm.weight": "model-00112-of-00191.safetensors",
886
+ "model.layers.73.self_attn.k_proj.weight": "model-00111-of-00191.safetensors",
887
+ "model.layers.73.self_attn.o_proj.weight": "model-00111-of-00191.safetensors",
888
+ "model.layers.73.self_attn.q_proj.weight": "model-00111-of-00191.safetensors",
889
+ "model.layers.73.self_attn.v_proj.weight": "model-00111-of-00191.safetensors",
890
+ "model.layers.74.input_layernorm.weight": "model-00114-of-00191.safetensors",
891
+ "model.layers.74.mlp.down_proj.weight": "model-00114-of-00191.safetensors",
892
+ "model.layers.74.mlp.gate_proj.weight": "model-00113-of-00191.safetensors",
893
+ "model.layers.74.mlp.up_proj.weight": "model-00113-of-00191.safetensors",
894
+ "model.layers.74.post_attention_layernorm.weight": "model-00114-of-00191.safetensors",
895
+ "model.layers.74.self_attn.k_proj.weight": "model-00112-of-00191.safetensors",
896
+ "model.layers.74.self_attn.o_proj.weight": "model-00112-of-00191.safetensors",
897
+ "model.layers.74.self_attn.q_proj.weight": "model-00112-of-00191.safetensors",
898
+ "model.layers.74.self_attn.v_proj.weight": "model-00112-of-00191.safetensors",
899
+ "model.layers.75.input_layernorm.weight": "model-00115-of-00191.safetensors",
900
+ "model.layers.75.mlp.down_proj.weight": "model-00115-of-00191.safetensors",
901
+ "model.layers.75.mlp.gate_proj.weight": "model-00114-of-00191.safetensors",
902
+ "model.layers.75.mlp.up_proj.weight": "model-00115-of-00191.safetensors",
903
+ "model.layers.75.post_attention_layernorm.weight": "model-00115-of-00191.safetensors",
904
+ "model.layers.75.self_attn.k_proj.weight": "model-00114-of-00191.safetensors",
905
+ "model.layers.75.self_attn.o_proj.weight": "model-00114-of-00191.safetensors",
906
+ "model.layers.75.self_attn.q_proj.weight": "model-00114-of-00191.safetensors",
907
+ "model.layers.75.self_attn.v_proj.weight": "model-00114-of-00191.safetensors",
908
+ "model.layers.76.input_layernorm.weight": "model-00117-of-00191.safetensors",
909
+ "model.layers.76.mlp.down_proj.weight": "model-00117-of-00191.safetensors",
910
+ "model.layers.76.mlp.gate_proj.weight": "model-00116-of-00191.safetensors",
911
+ "model.layers.76.mlp.up_proj.weight": "model-00116-of-00191.safetensors",
912
+ "model.layers.76.post_attention_layernorm.weight": "model-00117-of-00191.safetensors",
913
+ "model.layers.76.self_attn.k_proj.weight": "model-00115-of-00191.safetensors",
914
+ "model.layers.76.self_attn.o_proj.weight": "model-00115-of-00191.safetensors",
915
+ "model.layers.76.self_attn.q_proj.weight": "model-00115-of-00191.safetensors",
916
+ "model.layers.76.self_attn.v_proj.weight": "model-00115-of-00191.safetensors",
917
+ "model.layers.77.input_layernorm.weight": "model-00118-of-00191.safetensors",
918
+ "model.layers.77.mlp.down_proj.weight": "model-00118-of-00191.safetensors",
919
+ "model.layers.77.mlp.gate_proj.weight": "model-00117-of-00191.safetensors",
920
+ "model.layers.77.mlp.up_proj.weight": "model-00118-of-00191.safetensors",
921
+ "model.layers.77.post_attention_layernorm.weight": "model-00118-of-00191.safetensors",
922
+ "model.layers.77.self_attn.k_proj.weight": "model-00117-of-00191.safetensors",
923
+ "model.layers.77.self_attn.o_proj.weight": "model-00117-of-00191.safetensors",
924
+ "model.layers.77.self_attn.q_proj.weight": "model-00117-of-00191.safetensors",
925
+ "model.layers.77.self_attn.v_proj.weight": "model-00117-of-00191.safetensors",
926
+ "model.layers.78.input_layernorm.weight": "model-00120-of-00191.safetensors",
927
+ "model.layers.78.mlp.down_proj.weight": "model-00120-of-00191.safetensors",
928
+ "model.layers.78.mlp.gate_proj.weight": "model-00119-of-00191.safetensors",
929
+ "model.layers.78.mlp.up_proj.weight": "model-00119-of-00191.safetensors",
930
+ "model.layers.78.post_attention_layernorm.weight": "model-00120-of-00191.safetensors",
931
+ "model.layers.78.self_attn.k_proj.weight": "model-00118-of-00191.safetensors",
932
+ "model.layers.78.self_attn.o_proj.weight": "model-00118-of-00191.safetensors",
933
+ "model.layers.78.self_attn.q_proj.weight": "model-00118-of-00191.safetensors",
934
+ "model.layers.78.self_attn.v_proj.weight": "model-00118-of-00191.safetensors",
935
+ "model.layers.79.input_layernorm.weight": "model-00121-of-00191.safetensors",
936
+ "model.layers.79.mlp.down_proj.weight": "model-00121-of-00191.safetensors",
937
+ "model.layers.79.mlp.gate_proj.weight": "model-00120-of-00191.safetensors",
938
+ "model.layers.79.mlp.up_proj.weight": "model-00121-of-00191.safetensors",
939
+ "model.layers.79.post_attention_layernorm.weight": "model-00121-of-00191.safetensors",
940
+ "model.layers.79.self_attn.k_proj.weight": "model-00120-of-00191.safetensors",
941
+ "model.layers.79.self_attn.o_proj.weight": "model-00120-of-00191.safetensors",
942
+ "model.layers.79.self_attn.q_proj.weight": "model-00120-of-00191.safetensors",
943
+ "model.layers.79.self_attn.v_proj.weight": "model-00120-of-00191.safetensors",
944
+ "model.layers.8.input_layernorm.weight": "model-00015-of-00191.safetensors",
945
+ "model.layers.8.mlp.down_proj.weight": "model-00015-of-00191.safetensors",
946
+ "model.layers.8.mlp.gate_proj.weight": "model-00014-of-00191.safetensors",
947
+ "model.layers.8.mlp.up_proj.weight": "model-00014-of-00191.safetensors",
948
+ "model.layers.8.post_attention_layernorm.weight": "model-00015-of-00191.safetensors",
949
+ "model.layers.8.self_attn.k_proj.weight": "model-00013-of-00191.safetensors",
950
+ "model.layers.8.self_attn.o_proj.weight": "model-00013-of-00191.safetensors",
951
+ "model.layers.8.self_attn.q_proj.weight": "model-00013-of-00191.safetensors",
952
+ "model.layers.8.self_attn.v_proj.weight": "model-00013-of-00191.safetensors",
953
+ "model.layers.80.input_layernorm.weight": "model-00123-of-00191.safetensors",
954
+ "model.layers.80.mlp.down_proj.weight": "model-00123-of-00191.safetensors",
955
+ "model.layers.80.mlp.gate_proj.weight": "model-00122-of-00191.safetensors",
956
+ "model.layers.80.mlp.up_proj.weight": "model-00122-of-00191.safetensors",
957
+ "model.layers.80.post_attention_layernorm.weight": "model-00123-of-00191.safetensors",
958
+ "model.layers.80.self_attn.k_proj.weight": "model-00121-of-00191.safetensors",
959
+ "model.layers.80.self_attn.o_proj.weight": "model-00121-of-00191.safetensors",
960
+ "model.layers.80.self_attn.q_proj.weight": "model-00121-of-00191.safetensors",
961
+ "model.layers.80.self_attn.v_proj.weight": "model-00121-of-00191.safetensors",
962
+ "model.layers.81.input_layernorm.weight": "model-00124-of-00191.safetensors",
963
+ "model.layers.81.mlp.down_proj.weight": "model-00124-of-00191.safetensors",
964
+ "model.layers.81.mlp.gate_proj.weight": "model-00123-of-00191.safetensors",
965
+ "model.layers.81.mlp.up_proj.weight": "model-00124-of-00191.safetensors",
966
+ "model.layers.81.post_attention_layernorm.weight": "model-00124-of-00191.safetensors",
967
+ "model.layers.81.self_attn.k_proj.weight": "model-00123-of-00191.safetensors",
968
+ "model.layers.81.self_attn.o_proj.weight": "model-00123-of-00191.safetensors",
969
+ "model.layers.81.self_attn.q_proj.weight": "model-00123-of-00191.safetensors",
970
+ "model.layers.81.self_attn.v_proj.weight": "model-00123-of-00191.safetensors",
971
+ "model.layers.82.input_layernorm.weight": "model-00126-of-00191.safetensors",
972
+ "model.layers.82.mlp.down_proj.weight": "model-00126-of-00191.safetensors",
973
+ "model.layers.82.mlp.gate_proj.weight": "model-00125-of-00191.safetensors",
974
+ "model.layers.82.mlp.up_proj.weight": "model-00125-of-00191.safetensors",
975
+ "model.layers.82.post_attention_layernorm.weight": "model-00126-of-00191.safetensors",
976
+ "model.layers.82.self_attn.k_proj.weight": "model-00124-of-00191.safetensors",
977
+ "model.layers.82.self_attn.o_proj.weight": "model-00124-of-00191.safetensors",
978
+ "model.layers.82.self_attn.q_proj.weight": "model-00124-of-00191.safetensors",
979
+ "model.layers.82.self_attn.v_proj.weight": "model-00124-of-00191.safetensors",
980
+ "model.layers.83.input_layernorm.weight": "model-00127-of-00191.safetensors",
981
+ "model.layers.83.mlp.down_proj.weight": "model-00127-of-00191.safetensors",
982
+ "model.layers.83.mlp.gate_proj.weight": "model-00126-of-00191.safetensors",
983
+ "model.layers.83.mlp.up_proj.weight": "model-00127-of-00191.safetensors",
984
+ "model.layers.83.post_attention_layernorm.weight": "model-00127-of-00191.safetensors",
985
+ "model.layers.83.self_attn.k_proj.weight": "model-00126-of-00191.safetensors",
986
+ "model.layers.83.self_attn.o_proj.weight": "model-00126-of-00191.safetensors",
987
+ "model.layers.83.self_attn.q_proj.weight": "model-00126-of-00191.safetensors",
988
+ "model.layers.83.self_attn.v_proj.weight": "model-00126-of-00191.safetensors",
989
+ "model.layers.84.input_layernorm.weight": "model-00129-of-00191.safetensors",
990
+ "model.layers.84.mlp.down_proj.weight": "model-00129-of-00191.safetensors",
991
+ "model.layers.84.mlp.gate_proj.weight": "model-00128-of-00191.safetensors",
992
+ "model.layers.84.mlp.up_proj.weight": "model-00128-of-00191.safetensors",
993
+ "model.layers.84.post_attention_layernorm.weight": "model-00129-of-00191.safetensors",
994
+ "model.layers.84.self_attn.k_proj.weight": "model-00127-of-00191.safetensors",
995
+ "model.layers.84.self_attn.o_proj.weight": "model-00127-of-00191.safetensors",
996
+ "model.layers.84.self_attn.q_proj.weight": "model-00127-of-00191.safetensors",
997
+ "model.layers.84.self_attn.v_proj.weight": "model-00127-of-00191.safetensors",
998
+ "model.layers.85.input_layernorm.weight": "model-00130-of-00191.safetensors",
999
+ "model.layers.85.mlp.down_proj.weight": "model-00130-of-00191.safetensors",
1000
+ "model.layers.85.mlp.gate_proj.weight": "model-00129-of-00191.safetensors",
1001
+ "model.layers.85.mlp.up_proj.weight": "model-00130-of-00191.safetensors",
1002
+ "model.layers.85.post_attention_layernorm.weight": "model-00130-of-00191.safetensors",
1003
+ "model.layers.85.self_attn.k_proj.weight": "model-00129-of-00191.safetensors",
1004
+ "model.layers.85.self_attn.o_proj.weight": "model-00129-of-00191.safetensors",
1005
+ "model.layers.85.self_attn.q_proj.weight": "model-00129-of-00191.safetensors",
1006
+ "model.layers.85.self_attn.v_proj.weight": "model-00129-of-00191.safetensors",
1007
+ "model.layers.86.input_layernorm.weight": "model-00132-of-00191.safetensors",
1008
+ "model.layers.86.mlp.down_proj.weight": "model-00132-of-00191.safetensors",
1009
+ "model.layers.86.mlp.gate_proj.weight": "model-00131-of-00191.safetensors",
1010
+ "model.layers.86.mlp.up_proj.weight": "model-00131-of-00191.safetensors",
1011
+ "model.layers.86.post_attention_layernorm.weight": "model-00132-of-00191.safetensors",
1012
+ "model.layers.86.self_attn.k_proj.weight": "model-00130-of-00191.safetensors",
1013
+ "model.layers.86.self_attn.o_proj.weight": "model-00130-of-00191.safetensors",
1014
+ "model.layers.86.self_attn.q_proj.weight": "model-00130-of-00191.safetensors",
1015
+ "model.layers.86.self_attn.v_proj.weight": "model-00130-of-00191.safetensors",
1016
+ "model.layers.87.input_layernorm.weight": "model-00133-of-00191.safetensors",
1017
+ "model.layers.87.mlp.down_proj.weight": "model-00133-of-00191.safetensors",
1018
+ "model.layers.87.mlp.gate_proj.weight": "model-00132-of-00191.safetensors",
1019
+ "model.layers.87.mlp.up_proj.weight": "model-00133-of-00191.safetensors",
1020
+ "model.layers.87.post_attention_layernorm.weight": "model-00133-of-00191.safetensors",
1021
+ "model.layers.87.self_attn.k_proj.weight": "model-00132-of-00191.safetensors",
1022
+ "model.layers.87.self_attn.o_proj.weight": "model-00132-of-00191.safetensors",
1023
+ "model.layers.87.self_attn.q_proj.weight": "model-00132-of-00191.safetensors",
1024
+ "model.layers.87.self_attn.v_proj.weight": "model-00132-of-00191.safetensors",
1025
+ "model.layers.88.input_layernorm.weight": "model-00135-of-00191.safetensors",
1026
+ "model.layers.88.mlp.down_proj.weight": "model-00135-of-00191.safetensors",
1027
+ "model.layers.88.mlp.gate_proj.weight": "model-00134-of-00191.safetensors",
1028
+ "model.layers.88.mlp.up_proj.weight": "model-00134-of-00191.safetensors",
1029
+ "model.layers.88.post_attention_layernorm.weight": "model-00135-of-00191.safetensors",
1030
+ "model.layers.88.self_attn.k_proj.weight": "model-00133-of-00191.safetensors",
1031
+ "model.layers.88.self_attn.o_proj.weight": "model-00133-of-00191.safetensors",
1032
+ "model.layers.88.self_attn.q_proj.weight": "model-00133-of-00191.safetensors",
1033
+ "model.layers.88.self_attn.v_proj.weight": "model-00133-of-00191.safetensors",
1034
+ "model.layers.89.input_layernorm.weight": "model-00136-of-00191.safetensors",
1035
+ "model.layers.89.mlp.down_proj.weight": "model-00136-of-00191.safetensors",
1036
+ "model.layers.89.mlp.gate_proj.weight": "model-00135-of-00191.safetensors",
1037
+ "model.layers.89.mlp.up_proj.weight": "model-00136-of-00191.safetensors",
1038
+ "model.layers.89.post_attention_layernorm.weight": "model-00136-of-00191.safetensors",
1039
+ "model.layers.89.self_attn.k_proj.weight": "model-00135-of-00191.safetensors",
1040
+ "model.layers.89.self_attn.o_proj.weight": "model-00135-of-00191.safetensors",
1041
+ "model.layers.89.self_attn.q_proj.weight": "model-00135-of-00191.safetensors",
1042
+ "model.layers.89.self_attn.v_proj.weight": "model-00135-of-00191.safetensors",
1043
+ "model.layers.9.input_layernorm.weight": "model-00016-of-00191.safetensors",
1044
+ "model.layers.9.mlp.down_proj.weight": "model-00016-of-00191.safetensors",
1045
+ "model.layers.9.mlp.gate_proj.weight": "model-00015-of-00191.safetensors",
1046
+ "model.layers.9.mlp.up_proj.weight": "model-00016-of-00191.safetensors",
1047
+ "model.layers.9.post_attention_layernorm.weight": "model-00016-of-00191.safetensors",
1048
+ "model.layers.9.self_attn.k_proj.weight": "model-00015-of-00191.safetensors",
1049
+ "model.layers.9.self_attn.o_proj.weight": "model-00015-of-00191.safetensors",
1050
+ "model.layers.9.self_attn.q_proj.weight": "model-00015-of-00191.safetensors",
1051
+ "model.layers.9.self_attn.v_proj.weight": "model-00015-of-00191.safetensors",
1052
+ "model.layers.90.input_layernorm.weight": "model-00138-of-00191.safetensors",
1053
+ "model.layers.90.mlp.down_proj.weight": "model-00138-of-00191.safetensors",
1054
+ "model.layers.90.mlp.gate_proj.weight": "model-00137-of-00191.safetensors",
1055
+ "model.layers.90.mlp.up_proj.weight": "model-00137-of-00191.safetensors",
1056
+ "model.layers.90.post_attention_layernorm.weight": "model-00138-of-00191.safetensors",
1057
+ "model.layers.90.self_attn.k_proj.weight": "model-00136-of-00191.safetensors",
1058
+ "model.layers.90.self_attn.o_proj.weight": "model-00136-of-00191.safetensors",
1059
+ "model.layers.90.self_attn.q_proj.weight": "model-00136-of-00191.safetensors",
1060
+ "model.layers.90.self_attn.v_proj.weight": "model-00136-of-00191.safetensors",
1061
+ "model.layers.91.input_layernorm.weight": "model-00139-of-00191.safetensors",
1062
+ "model.layers.91.mlp.down_proj.weight": "model-00139-of-00191.safetensors",
1063
+ "model.layers.91.mlp.gate_proj.weight": "model-00138-of-00191.safetensors",
1064
+ "model.layers.91.mlp.up_proj.weight": "model-00139-of-00191.safetensors",
1065
+ "model.layers.91.post_attention_layernorm.weight": "model-00139-of-00191.safetensors",
1066
+ "model.layers.91.self_attn.k_proj.weight": "model-00138-of-00191.safetensors",
1067
+ "model.layers.91.self_attn.o_proj.weight": "model-00138-of-00191.safetensors",
1068
+ "model.layers.91.self_attn.q_proj.weight": "model-00138-of-00191.safetensors",
1069
+ "model.layers.91.self_attn.v_proj.weight": "model-00138-of-00191.safetensors",
1070
+ "model.layers.92.input_layernorm.weight": "model-00141-of-00191.safetensors",
1071
+ "model.layers.92.mlp.down_proj.weight": "model-00141-of-00191.safetensors",
1072
+ "model.layers.92.mlp.gate_proj.weight": "model-00140-of-00191.safetensors",
1073
+ "model.layers.92.mlp.up_proj.weight": "model-00140-of-00191.safetensors",
1074
+ "model.layers.92.post_attention_layernorm.weight": "model-00141-of-00191.safetensors",
1075
+ "model.layers.92.self_attn.k_proj.weight": "model-00139-of-00191.safetensors",
1076
+ "model.layers.92.self_attn.o_proj.weight": "model-00139-of-00191.safetensors",
1077
+ "model.layers.92.self_attn.q_proj.weight": "model-00139-of-00191.safetensors",
1078
+ "model.layers.92.self_attn.v_proj.weight": "model-00139-of-00191.safetensors",
1079
+ "model.layers.93.input_layernorm.weight": "model-00142-of-00191.safetensors",
1080
+ "model.layers.93.mlp.down_proj.weight": "model-00142-of-00191.safetensors",
1081
+ "model.layers.93.mlp.gate_proj.weight": "model-00141-of-00191.safetensors",
1082
+ "model.layers.93.mlp.up_proj.weight": "model-00142-of-00191.safetensors",
1083
+ "model.layers.93.post_attention_layernorm.weight": "model-00142-of-00191.safetensors",
1084
+ "model.layers.93.self_attn.k_proj.weight": "model-00141-of-00191.safetensors",
1085
+ "model.layers.93.self_attn.o_proj.weight": "model-00141-of-00191.safetensors",
1086
+ "model.layers.93.self_attn.q_proj.weight": "model-00141-of-00191.safetensors",
1087
+ "model.layers.93.self_attn.v_proj.weight": "model-00141-of-00191.safetensors",
1088
+ "model.layers.94.input_layernorm.weight": "model-00144-of-00191.safetensors",
1089
+ "model.layers.94.mlp.down_proj.weight": "model-00144-of-00191.safetensors",
1090
+ "model.layers.94.mlp.gate_proj.weight": "model-00143-of-00191.safetensors",
1091
+ "model.layers.94.mlp.up_proj.weight": "model-00143-of-00191.safetensors",
1092
+ "model.layers.94.post_attention_layernorm.weight": "model-00144-of-00191.safetensors",
1093
+ "model.layers.94.self_attn.k_proj.weight": "model-00142-of-00191.safetensors",
1094
+ "model.layers.94.self_attn.o_proj.weight": "model-00142-of-00191.safetensors",
1095
+ "model.layers.94.self_attn.q_proj.weight": "model-00142-of-00191.safetensors",
1096
+ "model.layers.94.self_attn.v_proj.weight": "model-00142-of-00191.safetensors",
1097
+ "model.layers.95.input_layernorm.weight": "model-00145-of-00191.safetensors",
1098
+ "model.layers.95.mlp.down_proj.weight": "model-00145-of-00191.safetensors",
1099
+ "model.layers.95.mlp.gate_proj.weight": "model-00144-of-00191.safetensors",
1100
+ "model.layers.95.mlp.up_proj.weight": "model-00145-of-00191.safetensors",
1101
+ "model.layers.95.post_attention_layernorm.weight": "model-00145-of-00191.safetensors",
1102
+ "model.layers.95.self_attn.k_proj.weight": "model-00144-of-00191.safetensors",
1103
+ "model.layers.95.self_attn.o_proj.weight": "model-00144-of-00191.safetensors",
1104
+ "model.layers.95.self_attn.q_proj.weight": "model-00144-of-00191.safetensors",
1105
+ "model.layers.95.self_attn.v_proj.weight": "model-00144-of-00191.safetensors",
1106
+ "model.layers.96.input_layernorm.weight": "model-00147-of-00191.safetensors",
1107
+ "model.layers.96.mlp.down_proj.weight": "model-00147-of-00191.safetensors",
1108
+ "model.layers.96.mlp.gate_proj.weight": "model-00146-of-00191.safetensors",
1109
+ "model.layers.96.mlp.up_proj.weight": "model-00146-of-00191.safetensors",
1110
+ "model.layers.96.post_attention_layernorm.weight": "model-00147-of-00191.safetensors",
1111
+ "model.layers.96.self_attn.k_proj.weight": "model-00145-of-00191.safetensors",
1112
+ "model.layers.96.self_attn.o_proj.weight": "model-00145-of-00191.safetensors",
1113
+ "model.layers.96.self_attn.q_proj.weight": "model-00145-of-00191.safetensors",
1114
+ "model.layers.96.self_attn.v_proj.weight": "model-00145-of-00191.safetensors",
1115
+ "model.layers.97.input_layernorm.weight": "model-00148-of-00191.safetensors",
1116
+ "model.layers.97.mlp.down_proj.weight": "model-00148-of-00191.safetensors",
1117
+ "model.layers.97.mlp.gate_proj.weight": "model-00147-of-00191.safetensors",
1118
+ "model.layers.97.mlp.up_proj.weight": "model-00148-of-00191.safetensors",
1119
+ "model.layers.97.post_attention_layernorm.weight": "model-00148-of-00191.safetensors",
1120
+ "model.layers.97.self_attn.k_proj.weight": "model-00147-of-00191.safetensors",
1121
+ "model.layers.97.self_attn.o_proj.weight": "model-00147-of-00191.safetensors",
1122
+ "model.layers.97.self_attn.q_proj.weight": "model-00147-of-00191.safetensors",
1123
+ "model.layers.97.self_attn.v_proj.weight": "model-00147-of-00191.safetensors",
1124
+ "model.layers.98.input_layernorm.weight": "model-00150-of-00191.safetensors",
1125
+ "model.layers.98.mlp.down_proj.weight": "model-00150-of-00191.safetensors",
1126
+ "model.layers.98.mlp.gate_proj.weight": "model-00149-of-00191.safetensors",
1127
+ "model.layers.98.mlp.up_proj.weight": "model-00149-of-00191.safetensors",
1128
+ "model.layers.98.post_attention_layernorm.weight": "model-00150-of-00191.safetensors",
1129
+ "model.layers.98.self_attn.k_proj.weight": "model-00148-of-00191.safetensors",
1130
+ "model.layers.98.self_attn.o_proj.weight": "model-00148-of-00191.safetensors",
1131
+ "model.layers.98.self_attn.q_proj.weight": "model-00148-of-00191.safetensors",
1132
+ "model.layers.98.self_attn.v_proj.weight": "model-00148-of-00191.safetensors",
1133
+ "model.layers.99.input_layernorm.weight": "model-00151-of-00191.safetensors",
1134
+ "model.layers.99.mlp.down_proj.weight": "model-00151-of-00191.safetensors",
1135
+ "model.layers.99.mlp.gate_proj.weight": "model-00150-of-00191.safetensors",
1136
+ "model.layers.99.mlp.up_proj.weight": "model-00151-of-00191.safetensors",
1137
+ "model.layers.99.post_attention_layernorm.weight": "model-00151-of-00191.safetensors",
1138
+ "model.layers.99.self_attn.k_proj.weight": "model-00150-of-00191.safetensors",
1139
+ "model.layers.99.self_attn.o_proj.weight": "model-00150-of-00191.safetensors",
1140
+ "model.layers.99.self_attn.q_proj.weight": "model-00150-of-00191.safetensors",
1141
+ "model.layers.99.self_attn.v_proj.weight": "model-00150-of-00191.safetensors",
1142
+ "model.norm.weight": "model-00190-of-00191.safetensors"
1143
+ }
1144
+ }
output-00001-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b3c4d6a7730ce9db4b666bc8fef1d15b0ef85fe10d7fc6adea6a163946771c0a
3
+ size 8509584250
output-00002-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bfd4130b5aa68cf18b289dde8fe973a4e375ed707a3ce9faf051e01f7b055890
3
+ size 8364890534
output-00003-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:164946c3efa5accfc2e93b7bf274aa3e5a3e7c41b45b78e8ac1df476010814aa
3
+ size 8328237512
output-00004-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aae0de03347f8e9d93550a02ae8aabcab56fe26183c3cd638c7b183ac8eea4c6
3
+ size 8535841512
output-00005-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52e0037e574071fb1c4c06e1dcf32d463c43b94bef84299f811f0e48243119e0
3
+ size 8329875738
output-00006-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:612fabe60dbf59d1e928cd19bdcaec3e119ddd424fff094c5997153e3e3c1455
3
+ size 8138536446
output-00007-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23ee72c99fb2acbcf466c2118c00adab595492b6f9ba09758379afe2e645ee32
3
+ size 8221275044
output-00008-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5533f8183be0fb9e900f940edab20e4da22f3e09cfa46443f62d7219c7515a48
3
+ size 8041276366
output-00009-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a37f7fb55b7d74d853806bb73898c12795cfd83223567379774477ead03b568c
3
+ size 8092252390
output-00010-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ae55b315fdcc4b75e4a77d303d3cf0020b737612065bacb42895cedfab046c8
3
+ size 8497453682
output-00011-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecc2a016a48b6973f32aa4f32189e4be6b29a2d6ad8409304a61eb939aef6925
3
+ size 8589893988
output-00012-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7535594275a6c361a8d5e36779b341c8219e44d187ceddf7ab29bdf5e41cebfe
3
+ size 8252525438
output-00013-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:967771cd1ad0e09f635eb66b2e4f5a93dd9ed2ce54798eacc7f5426db2f799ea
3
+ size 8182172952
output-00014-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef687ff47cc367a56f9539fa5d51fcd5ba412d87ea87d137856e25e318a05e45
3
+ size 8248710722
output-00015-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:977b01e26d766c79f6a646d060cf75b0685cfd1631f3671aaa87fdd301bfb70b
3
+ size 8462587530
output-00016-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6007cd1c244a5fddcce002a50dc4a2aad548567b2cb77b327466bfd1c6c2951e
3
+ size 8269679354
output-00017-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b0723c4a94f39c57c2f5fdd31d9213930855fa2558fe5e81e98fcd2fb0fa0620
3
+ size 8410705360
output-00018-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:88efaa4acb997f90b04e17d97959a7ad582f4c4d61f5d664b22de2fbc9ddeeb0
3
+ size 8217066714
output-00019-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cac4fb3eabdf75ff64993e68178a1af6ad3d3ad520b7e981f558d16a3a225718
3
+ size 7859080472
output-00020-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07fb930c84a59eae68a1247b498636e736b7a2567e1749596662f7960752734e
3
+ size 8559562008
output-00021-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e61149d46ef5806e5d1620ebb3b970a49046959f41ceaf5d8bb6dc5849f58c8
3
+ size 8345729140
output-00022-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d50dbb2662e76f536777bdc5618c2cfb5b502a9162679cb34e9c4217e516a55
3
+ size 8459266760
output-00023-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0bc43860a0cd17ad5ca2a5ba41c5d9bf2d4232a46aba6892af3e6d8cf22caa9d
3
+ size 8393465418
output-00024-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee13bc9c3d4b013ca408d0458262d6d799ccab2c3a8ec428156497ffc4956172
3
+ size 7815080536
output-00025-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e55cc4d009c7c407776c7a0a141ad48908fa510ab53e51f585b2f0ea17fda6fe
3
+ size 8210315842
output-00026-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ccdc5e7b3d95b8e7dce04e37d8c58dd1500500545a45de1bb54e0acd3ab6485a
3
+ size 8486310360
output-00027-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:62bcb5a594d096d682c841086c88eb7c67077683ecfc4b4a0c1d9e7c17f9df75
3
+ size 7868175232
output-00028-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d86dcaca0a8b266db5fcab33331aeab692ff81fa349cc406a2df012f54d161c
3
+ size 7998362292
output-00029-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:049c703e5164b0c833cc91d81682a07af9f4ec1449659e54ca5aa43829390714
3
+ size 8457421266
output-00030-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:999bbdbd3684dd676410d93de20e3f6ff7ec270ba241fd96a2280c48458106bf
3
+ size 8216538232
output-00031-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c1622db52d69b85296d84b62005929a601b061e81241a3ad3f30184b30ffc9c4
3
+ size 8311564664
output-00032-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd4daa6cd3e0c87feb3437b8bc5fdeaaeff1db3154c38ace61149bd25d641359
3
+ size 8100547968
output-00033-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fabca77c54043508bdf8bfd6563776221ce3d5d93da8b2433af2ee10bfae1979
3
+ size 8583595384
output-00034-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26b4b2bbd30ee61f9571fba0b46bcd65713baab51699f0ab06964210f3a6c0ec
3
+ size 7951509768
output-00035-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa6d637cf386874a6a876c4be12da1132c82b6f78faf8dcb3ca7178a96304953
3
+ size 8163518728
output-00036-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d24178c1aa9cb7750cead981e80a47056fd211b4fa164cd77c1620dc3f0e6a4e
3
+ size 7854849512
output-00037-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:702ddbdcd20c5f242dd7403a6be0b83202503a0e112403e117aba165794b7a76
3
+ size 7807648912
output-00038-of-00038.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dacc96d47a98f2e58d71b38ead95879b8df940f33dfa1a8aa1700b6e637a246b
3
+ size 4283647608
patch.diff ADDED
@@ -0,0 +1,291 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ diff --git a/src/transformers/models/llama/convert_llama_weights_to_hf.py b/src/transformers/models/llama/convert_llama_weights_to_hf.py
2
+ index a0fbe4680..8b0ce2b13 100644
3
+ --- a/src/transformers/models/llama/convert_llama_weights_to_hf.py
4
+ +++ b/src/transformers/models/llama/convert_llama_weights_to_hf.py
5
+ @@ -17,10 +17,10 @@ import json
6
+ import os
7
+ import shutil
8
+ import warnings
9
+ -
10
+ +from typing import List
11
+ import torch
12
+
13
+ -from transformers import LlamaConfig, LlamaForCausalLM, LlamaTokenizer, PreTrainedTokenizerFast
14
+ +from transformers import LlamaConfig, LlamaForCausalLM, LlamaTokenizer, PreTrainedTokenizerFast, GenerationConfig
15
+ from transformers.convert_slow_tokenizer import TikTokenConverter
16
+
17
+
18
+ @@ -85,8 +85,12 @@ NUM_SHARDS = {
19
+ "65B": 8,
20
+ "70B": 8,
21
+ "70Bf": 8,
22
+ + "405B": 8,
23
+ + "405B-MP16": 16,
24
+ }
25
+
26
+ +CONTEXT_LENGTH_FOR_VERSION = {"3.1": 131072, "3": 8192, "2": 4096, "1": 2048}
27
+ +
28
+
29
+ def compute_intermediate_size(n, ffn_dim_multiplier=1, multiple_of=256):
30
+ return multiple_of * ((int(ffn_dim_multiplier * int(8 * n / 3)) + multiple_of - 1) // multiple_of)
31
+ @@ -107,9 +111,10 @@ def write_model(
32
+ input_base_path,
33
+ model_size=None,
34
+ safe_serialization=True,
35
+ - llama_version=1,
36
+ + llama_version="1",
37
+ vocab_size=None,
38
+ num_shards=None,
39
+ + instruct=False,
40
+ ):
41
+ os.makedirs(model_path, exist_ok=True)
42
+ tmp_model_path = os.path.join(model_path, "tmp")
43
+ @@ -125,18 +130,11 @@ def write_model(
44
+ dims_per_head = dim // n_heads
45
+ base = params.get("rope_theta", 10000.0)
46
+ inv_freq = 1.0 / (base ** (torch.arange(0, dims_per_head, 2).float() / dims_per_head))
47
+ - if base > 10000.0 and llama_version != 3:
48
+ + if base > 10000.0 and float(llama_version) < 3:
49
+ max_position_embeddings = 16384
50
+ else:
51
+ - # Depending on the Llama version, the default max_position_embeddings has different values.
52
+ - if llama_version == 1:
53
+ - max_position_embeddings = 2048
54
+ - elif llama_version == 2:
55
+ - max_position_embeddings = 4096
56
+ - elif llama_version == 3:
57
+ - max_position_embeddings = 8192
58
+ -
59
+ - vocab_size = vocab_size if vocab_size is not None else 32000
60
+ + max_position_embeddings = CONTEXT_LENGTH_FOR_VERSION[llama_version]
61
+ +
62
+ if params.get("n_kv_heads", None) is not None:
63
+ num_key_value_heads = params["n_kv_heads"] # for GQA / MQA
64
+ num_key_value_heads_per_shard = num_key_value_heads // num_shards
65
+ @@ -144,8 +142,7 @@ def write_model(
66
+ else: # compatibility with other checkpoints
67
+ num_key_value_heads = n_heads
68
+ num_key_value_heads_per_shard = n_heads_per_shard
69
+ - key_value_dim = dims_per_head * num_key_value_heads
70
+ - print(num_shards, num_key_value_heads, num_key_value_heads_per_shard, key_value_dim)
71
+ + key_value_dim = dim
72
+
73
+ # permute for sliced rotary
74
+ def permute(w, n_heads, dim1=dim, dim2=dim):
75
+ @@ -159,11 +156,9 @@ def write_model(
76
+ loaded = torch.load(os.path.join(input_base_path, "consolidated.00.pth"), map_location="cpu")
77
+ else:
78
+ # Sharded
79
+ - loaded = [
80
+ - torch.load(os.path.join(input_base_path, file), map_location="cpu")
81
+ - for file in os.listdir(input_base_path)
82
+ - if file.endswith(".pth")
83
+ - ]
84
+ + checkpoint_list = sorted([file for file in os.listdir(input_base_path) if file.endswith(".pth")])
85
+ + print("Loading in order:", checkpoint_list)
86
+ + loaded = [torch.load(os.path.join(input_base_path, file), map_location="cpu") for file in checkpoint_list]
87
+ param_count = 0
88
+ index_dict = {"weight_map": {}}
89
+ for layer_i in range(n_layers):
90
+ @@ -263,7 +258,7 @@ def write_model(
91
+ "lm_head.weight": loaded["output.weight"],
92
+ }
93
+ else:
94
+ - concat_dim = 0 if llama_version == 3 else 1
95
+ + concat_dim = 0 if llama_version in ['3', '3.1'] else 1
96
+ state_dict = {
97
+ "model.norm.weight": loaded[0]["norm.weight"],
98
+ "model.embed_tokens.weight": torch.cat(
99
+ @@ -282,6 +277,18 @@ def write_model(
100
+ write_json(index_dict, os.path.join(tmp_model_path, "pytorch_model.bin.index.json"))
101
+ ffn_dim_multiplier = params["ffn_dim_multiplier"] if "ffn_dim_multiplier" in params else 1
102
+ multiple_of = params["multiple_of"] if "multiple_of" in params else 256
103
+ +
104
+ + if llama_version in ['3', '3.1']:
105
+ + bos_token_id = 128000
106
+ +
107
+ + if instruct:
108
+ + eos_token_id = [128001, 128008, 128009]
109
+ + else:
110
+ + eos_token_id = 128001
111
+ + else:
112
+ + bos_token_id = 1
113
+ + eos_token_id = 2
114
+ +
115
+ config = LlamaConfig(
116
+ hidden_size=dim,
117
+ intermediate_size=compute_intermediate_size(dim, ffn_dim_multiplier, multiple_of),
118
+ @@ -292,11 +299,21 @@ def write_model(
119
+ vocab_size=vocab_size,
120
+ rope_theta=base,
121
+ max_position_embeddings=max_position_embeddings,
122
+ - bos_token_id=128000 if llama_version == 3 else 1,
123
+ - eos_token_id=128001 if llama_version == 3 else 2,
124
+ + bos_token_id=bos_token_id,
125
+ + eos_token_id=eos_token_id,
126
+ )
127
+ config.save_pretrained(tmp_model_path)
128
+
129
+ + if instruct:
130
+ + generation_config = GenerationConfig(
131
+ + do_sample=True,
132
+ + temperature=0.6,
133
+ + top_p=0.9,
134
+ + bos_token_id=bos_token_id,
135
+ + eos_token_id=eos_token_id,
136
+ + )
137
+ + generation_config.save_pretrained(tmp_model_path)
138
+ +
139
+ # Make space so we can load the model properly now.
140
+ del state_dict
141
+ del loaded
142
+ @@ -313,7 +330,7 @@ def write_model(
143
+
144
+
145
+ class Llama3Converter(TikTokenConverter):
146
+ - def __init__(self, vocab_file, num_reserved_special_tokens=256, **kwargs):
147
+ + def __init__(self, vocab_file, special_tokens=None, instruct=False, model_max_length=None, **kwargs):
148
+ super().__init__(vocab_file, **kwargs)
149
+ tokenizer = self.converted()
150
+ chat_template = (
151
+ @@ -327,34 +344,27 @@ class Llama3Converter(TikTokenConverter):
152
+ "{% endfor %}"
153
+ "{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}"
154
+ )
155
+ - num_reserved_special_tokens = 256
156
+ - special_tokens = [
157
+ - "<|begin_of_text|>",
158
+ - "<|end_of_text|>",
159
+ - "<|reserved_special_token_0|>",
160
+ - "<|reserved_special_token_1|>",
161
+ - "<|reserved_special_token_2|>",
162
+ - "<|reserved_special_token_3|>",
163
+ - "<|start_header_id|>",
164
+ - "<|end_header_id|>",
165
+ - "<|reserved_special_token_4|>",
166
+ - "<|eot_id|>", # end of turn
167
+ - ] + [f"<|reserved_special_token_{i}|>" for i in range(5, num_reserved_special_tokens - 5)]
168
+ tokenizer.add_special_tokens(special_tokens)
169
+
170
+ self.tokenizer = PreTrainedTokenizerFast(
171
+ tokenizer_object=tokenizer,
172
+ bos_token="<|begin_of_text|>",
173
+ - eos_token="<|end_of_text|>",
174
+ - chat_template=chat_template,
175
+ + eos_token="<|end_of_text|>" if not instruct else "<|eot_id|>",
176
+ + chat_template=chat_template if instruct else None,
177
+ model_input_names=["input_ids", "attention_mask"],
178
+ + model_max_length=model_max_length,
179
+ )
180
+
181
+
182
+ -def write_tokenizer(tokenizer_path, input_tokenizer_path, llama_version=2):
183
+ +def write_tokenizer(tokenizer_path, input_tokenizer_path, llama_version="2", special_tokens=None, instruct=False):
184
+ tokenizer_class = LlamaTokenizer if LlamaTokenizerFast is None else LlamaTokenizerFast
185
+ - if llama_version == 3:
186
+ - tokenizer = Llama3Converter(input_tokenizer_path).tokenizer
187
+ + if llama_version in ["3", "3.1"]:
188
+ + tokenizer = Llama3Converter(
189
+ + input_tokenizer_path,
190
+ + special_tokens,
191
+ + instruct,
192
+ + model_max_length=CONTEXT_LENGTH_FOR_VERSION[llama_version]
193
+ + ).tokenizer
194
+ else:
195
+ tokenizer = tokenizer_class(input_tokenizer_path)
196
+ print(f"Saving a {tokenizer_class.__name__} to {tokenizer_path}.")
197
+ @@ -362,6 +372,37 @@ def write_tokenizer(tokenizer_path, input_tokenizer_path, llama_version=2):
198
+ return tokenizer
199
+
200
+
201
+ +DEFAULT_LLAMA_SPECIAL_TOKENS = {
202
+ + "3": [
203
+ + "<|begin_of_text|>",
204
+ + "<|end_of_text|>",
205
+ + "<|reserved_special_token_0|>",
206
+ + "<|reserved_special_token_1|>",
207
+ + "<|reserved_special_token_2|>",
208
+ + "<|reserved_special_token_3|>",
209
+ + "<|start_header_id|>",
210
+ + "<|end_header_id|>",
211
+ + "<|reserved_special_token_4|>",
212
+ + "<|eot_id|>", # end of turn
213
+ + ]
214
+ + + [f"<|reserved_special_token_{i}|>" for i in range(5, 256 - 5)],
215
+ + "3.1": [
216
+ + "<|begin_of_text|>",
217
+ + "<|end_of_text|>",
218
+ + "<|reserved_special_token_0|>",
219
+ + "<|reserved_special_token_1|>",
220
+ + "<|finetune_right_pad_id|>",
221
+ + "<|reserved_special_token_2|>",
222
+ + "<|start_header_id|>",
223
+ + "<|end_header_id|>",
224
+ + "<|eom_id|>", # end of message
225
+ + "<|eot_id|>", # end of turn
226
+ + "<|python_tag|>",
227
+ + ]
228
+ + + [f"<|reserved_special_token_{i}|>" for i in range(3, 256 - 8)],
229
+ +}
230
+ +
231
+ +
232
+ def main():
233
+ parser = argparse.ArgumentParser()
234
+ parser.add_argument(
235
+ @@ -383,9 +424,9 @@ def main():
236
+ # Different Llama versions used different default values for max_position_embeddings, hence the need to be able to specify which version is being used.
237
+ parser.add_argument(
238
+ "--llama_version",
239
+ - choices=[1, 2, 3],
240
+ - default=1,
241
+ - type=int,
242
+ + choices=["1", "2", "3", "3.1"],
243
+ + default="1",
244
+ + type=str,
245
+ help="Version of the Llama model to convert. Currently supports Llama1 and Llama2. Controls the context size",
246
+ )
247
+ parser.add_argument(
248
+ @@ -394,11 +435,34 @@ def main():
249
+ type=int,
250
+ help="The number of individual shards used for the model. Does not have to be the same as the number of consolidated_xx.pth",
251
+ )
252
+ + parser.add_argument(
253
+ + "--special_tokens",
254
+ + default=None,
255
+ + type=List[str],
256
+ + help="The list of special tokens that should be added to the model.",
257
+ + )
258
+ + parser.add_argument(
259
+ + "--instruct",
260
+ + default=False,
261
+ + type=bool,
262
+ + help="Whether the model is an instruct model or not. Will affect special tokens for llama 3.1.",
263
+ + )
264
+ args = parser.parse_args()
265
+ if args.model_size is None and args.num_shards is None:
266
+ raise ValueError("You have to set at least `num_shards` if you are not giving the `model_size`")
267
+ + if args.special_tokens is None:
268
+ + args.special_tokens = DEFAULT_LLAMA_SPECIAL_TOKENS[str(args.llama_version)]
269
+ +
270
+ spm_path = os.path.join(args.input_dir, "tokenizer.model")
271
+ - vocab_size = len(write_tokenizer(args.output_dir, spm_path, llama_version=args.llama_version))
272
+ + vocab_size = len(
273
+ + write_tokenizer(
274
+ + args.output_dir,
275
+ + spm_path,
276
+ + llama_version=args.llama_version,
277
+ + special_tokens=args.special_tokens,
278
+ + instruct=args.instruct
279
+ + )
280
+ + )
281
+ if args.model_size != "tokenizer_only":
282
+ write_model(
283
+ model_path=args.output_dir,
284
+ @@ -408,6 +472,7 @@ def main():
285
+ llama_version=args.llama_version,
286
+ vocab_size=vocab_size,
287
+ num_shards=args.num_shards,
288
+ + instruct=args.instruct
289
+ )
290
+
291
+
special_tokens_map.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|begin_of_text|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|eot_id|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ }
16
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,2062 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ }
2051
+ },
2052
+ "bos_token": "<|begin_of_text|>",
2053
+ "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}",
2054
+ "clean_up_tokenization_spaces": true,
2055
+ "eos_token": "<|eot_id|>",
2056
+ "model_input_names": [
2057
+ "input_ids",
2058
+ "attention_mask"
2059
+ ],
2060
+ "model_max_length": 131072,
2061
+ "tokenizer_class": "PreTrainedTokenizerFast"
2062
+ }