Add files using upload-large-folder tool
Browse files- .gitattributes +1 -0
- LICENSE.md +51 -0
- README.md +242 -0
- SD3.5L_example_workflow.json +688 -0
- mmdit.png +0 -0
- model_index.json +40 -0
- scheduler/scheduler_config.json +6 -0
- sd3.5_large.safetensors +3 -0
- sd3.5_large_demo.png +3 -0
- text_encoder/config.json +24 -0
- text_encoder/model.fp16.safetensors +3 -0
- text_encoder/model.safetensors +3 -0
- text_encoder_2/config.json +24 -0
- text_encoder_2/model.fp16.safetensors +3 -0
- text_encoder_2/model.safetensors +3 -0
- text_encoder_3/config.json +31 -0
- text_encoder_3/model-00001-of-00002.safetensors +3 -0
- text_encoder_3/model-00002-of-00002.safetensors +3 -0
- text_encoder_3/model.fp16-00001-of-00002.safetensors +3 -0
- text_encoder_3/model.fp16-00002-of-00002.safetensors +3 -0
- text_encoder_3/model.safetensors.index.fp16.json +226 -0
- text_encoder_3/model.safetensors.index.json +226 -0
- text_encoders/README.md +11 -0
- text_encoders/clip_g.safetensors +3 -0
- text_encoders/clip_l.safetensors +3 -0
- text_encoders/t5xxl_fp16.safetensors +3 -0
- text_encoders/t5xxl_fp8_e4m3fn.safetensors +3 -0
- tokenizer/merges.txt +0 -0
- tokenizer/special_tokens_map.json +30 -0
- tokenizer/tokenizer_config.json +30 -0
- tokenizer/vocab.json +0 -0
- tokenizer_2/merges.txt +0 -0
- tokenizer_2/special_tokens_map.json +30 -0
- tokenizer_2/tokenizer_config.json +38 -0
- tokenizer_2/vocab.json +0 -0
- tokenizer_3/special_tokens_map.json +125 -0
- tokenizer_3/spiece.model +3 -0
- tokenizer_3/tokenizer.json +0 -0
- tokenizer_3/tokenizer_config.json +940 -0
- transformer/config.json +16 -0
- transformer/diffusion_pytorch_model-00001-of-00002.safetensors +3 -0
- transformer/diffusion_pytorch_model-00002-of-00002.safetensors +3 -0
- transformer/diffusion_pytorch_model.safetensors.index.json +0 -0
- vae/config.json +38 -0
- vae/diffusion_pytorch_model.safetensors +3 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
sd3.5_large_demo.png filter=lfs diff=lfs merge=lfs -text
|
LICENSE.md
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
STABILITY AI COMMUNITY LICENSE AGREEMENT
|
2 |
+
Last Updated: July 5, 2024
|
3 |
+
|
4 |
+
|
5 |
+
I. INTRODUCTION
|
6 |
+
|
7 |
+
This Agreement applies to any individual person or entity ("You", "Your" or "Licensee") that uses or distributes any portion or element of the Stability AI Materials or Derivative Works thereof for any Research & Non-Commercial or Commercial purpose. Capitalized terms not otherwise defined herein are defined in Section V below.
|
8 |
+
|
9 |
+
|
10 |
+
This Agreement is intended to allow research, non-commercial, and limited commercial uses of the Models free of charge. In order to ensure that certain limited commercial uses of the Models continue to be allowed, this Agreement preserves free access to the Models for people or organizations generating annual revenue of less than US $1,000,000 (or local currency equivalent).
|
11 |
+
|
12 |
+
|
13 |
+
By clicking "I Accept" or by using or distributing or using any portion or element of the Stability Materials or Derivative Works, You agree that You have read, understood and are bound by the terms of this Agreement. If You are acting on behalf of a company, organization or other entity, then "You" includes you and that entity, and You agree that You: (i) are an authorized representative of such entity with the authority to bind such entity to this Agreement, and (ii) You agree to the terms of this Agreement on that entity's behalf.
|
14 |
+
|
15 |
+
II. RESEARCH & NON-COMMERCIAL USE LICENSE
|
16 |
+
|
17 |
+
Subject to the terms of this Agreement, Stability AI grants You a non-exclusive, worldwide, non-transferable, non-sublicensable, revocable and royalty-free limited license under Stability AI's intellectual property or other rights owned by Stability AI embodied in the Stability AI Materials to use, reproduce, distribute, and create Derivative Works of, and make modifications to, the Stability AI Materials for any Research or Non-Commercial Purpose. "Research Purpose" means academic or scientific advancement, and in each case, is not primarily intended for commercial advantage or monetary compensation to You or others. "Non-Commercial Purpose" means any purpose other than a Research Purpose that is not primarily intended for commercial advantage or monetary compensation to You or others, such as personal use (i.e., hobbyist) or evaluation and testing.
|
18 |
+
|
19 |
+
III. COMMERCIAL USE LICENSE
|
20 |
+
|
21 |
+
Subject to the terms of this Agreement (including the remainder of this Section III), Stability AI grants You a non-exclusive, worldwide, non-transferable, non-sublicensable, revocable and royalty-free limited license under Stability AI's intellectual property or other rights owned by Stability AI embodied in the Stability AI Materials to use, reproduce, distribute, and create Derivative Works of, and make modifications to, the Stability AI Materials for any Commercial Purpose. "Commercial Purpose" means any purpose other than a Research Purpose or Non-Commercial Purpose that is primarily intended for commercial advantage or monetary compensation to You or others, including but not limited to, (i) creating, modifying, or distributing Your product or service, including via a hosted service or application programming interface, and (ii) for Your business's or organization's internal operations.
|
22 |
+
If You are using or distributing the Stability AI Materials for a Commercial Purpose, You must register with Stability AI at (https://stability.ai/community-license). If at any time You or Your Affiliate(s), either individually or in aggregate, generate more than USD $1,000,000 in annual revenue (or the equivalent thereof in Your local currency), regardless of whether that revenue is generated directly or indirectly from the Stability AI Materials or Derivative Works, any licenses granted to You under this Agreement shall terminate as of such date. You must request a license from Stability AI at (https://stability.ai/enterprise) , which Stability AI may grant to You in its sole discretion. If you receive Stability AI Materials, or any Derivative Works thereof, from a Licensee as part of an integrated end user product, then Section III of this Agreement will not apply to you.
|
23 |
+
|
24 |
+
IV. GENERAL TERMS
|
25 |
+
|
26 |
+
Your Research, Non-Commercial, and Commercial License(s) under this Agreement are subject to the following terms.
|
27 |
+
a. Distribution & Attribution. If You distribute or make available the Stability AI Materials or a Derivative Work to a third party, or a product or service that uses any portion of them, You shall: (i) provide a copy of this Agreement to that third party, (ii) retain the following attribution notice within a "Notice" text file distributed as a part of such copies: "This Stability AI Model is licensed under the Stability AI Community License, Copyright © Stability AI Ltd. All Rights Reserved", and (iii) prominently display "Powered by Stability AI" on a related website, user interface, blogpost, about page, or product documentation. If You create a Derivative Work, You may add your own attribution notice(s) to the "Notice" text file included with that Derivative Work, provided that You clearly indicate which attributions apply to the Stability AI Materials and state in the "Notice" text file that You changed the Stability AI Materials and how it was modified.
|
28 |
+
b. Use Restrictions. Your use of the Stability AI Materials and Derivative Works, including any output or results of the Stability AI Materials or Derivative Works, must comply with applicable laws and regulations (including Trade Control Laws and equivalent regulations) and adhere to the Documentation and Stability AI's AUP, which is hereby incorporated by reference. Furthermore, You will not use the Stability AI Materials or Derivative Works, or any output or results of the Stability AI Materials or Derivative Works, to create or improve any foundational generative AI model (excluding the Models or Derivative Works).
|
29 |
+
c. Intellectual Property.
|
30 |
+
(i) Trademark License. No trademark licenses are granted under this Agreement, and in connection with the Stability AI Materials or Derivative Works, You may not use any name or mark owned by or associated with Stability AI or any of its Affiliates, except as required under Section IV(a) herein.
|
31 |
+
(ii) Ownership of Derivative Works. As between You and Stability AI, You are the owner of Derivative Works You create, subject to Stability AI's ownership of the Stability AI Materials and any Derivative Works made by or for Stability AI.
|
32 |
+
(iii) Ownership of Outputs. As between You and Stability AI, You own any outputs generated from the Models or Derivative Works to the extent permitted by applicable law.
|
33 |
+
(iv) Disputes. If You or Your Affiliate(s) institute litigation or other proceedings against Stability AI (including a cross-claim or counterclaim in a lawsuit) alleging that the Stability AI Materials, Derivative Works or associated outputs or results, or any portion of any of the foregoing, constitutes infringement of intellectual property or other rights owned or licensable by You, then any licenses granted to You under this Agreement shall terminate as of the date such litigation or claim is filed or instituted. You will indemnify and hold harmless Stability AI from and against any claim by any third party arising out of or related to Your use or distribution of the Stability AI Materials or Derivative Works in violation of this Agreement.
|
34 |
+
(v) Feedback. From time to time, You may provide Stability AI with verbal and/or written suggestions, comments or other feedback related to Stability AI's existing or prospective technology, products or services (collectively, "Feedback"). You are not obligated to provide Stability AI with Feedback, but to the extent that You do, You hereby grant Stability AI a perpetual, irrevocable, royalty-free, fully-paid, sub-licensable, transferable, non-exclusive, worldwide right and license to exploit the Feedback in any manner without restriction. Your Feedback is provided "AS IS" and You make no warranties whatsoever about any Feedback.
|
35 |
+
d. Disclaimer Of Warranty. UNLESS REQUIRED BY APPLICABLE LAW, THE STABILITY AI MATERIALS AND ANY OUTPUT AND RESULTS THEREFROM ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE FOR DETERMINING THE APPROPRIATENESS OR LAWFULNESS OF USING OR REDISTRIBUTING THE STABILITY AI MATERIALS, DERIVATIVE WORKS OR ANY OUTPUT OR RESULTS AND ASSUME ANY RISKS ASSOCIATED WITH YOUR USE OF THE STABILITY AI MATERIALS, DERIVATIVE WORKS AND ANY OUTPUT AND RESULTS.
|
36 |
+
e. Limitation Of Liability. IN NO EVENT WILL STABILITY AI OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR OTHERWISE, ARISING OUT OF THIS AGREEMENT, FOR ANY LOST PROFITS OR ANY DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF STABILITY AI OR ITS AFFILIATES HAVE BEEN ADVISED OF THE POSSIBILITY OF ANY OF THE FOREGOING.
|
37 |
+
f. Term And Termination. The term of this Agreement will commence upon Your acceptance of this Agreement or access to the Stability AI Materials and will continue in full force and effect until terminated in accordance with the terms and conditions herein. Stability AI may terminate this Agreement if You are in breach of any term or condition of this Agreement. Upon termination of this Agreement, You shall delete and cease use of any Stability AI Materials or Derivative Works. Section IV(d), (e), and (g) shall survive the termination of this Agreement.
|
38 |
+
g. Governing Law. This Agreement will be governed by and constructed in accordance with the laws of the United States and the State of California without regard to choice of law principles, and the UN Convention on Contracts for International Sale of Goods does not apply to this Agreement.
|
39 |
+
|
40 |
+
V. DEFINITIONS
|
41 |
+
|
42 |
+
"Affiliate(s)" means any entity that directly or indirectly controls, is controlled by, or is under common control with the subject entity; for purposes of this definition, "control" means direct or indirect ownership or control of more than 50% of the voting interests of the subject entity.
|
43 |
+
"Agreement" means this Stability AI Community License Agreement.
|
44 |
+
"AUP" means the Stability AI Acceptable Use Policy available at https://stability.ai/use-policy, as may be updated from time to time.
|
45 |
+
"Derivative Work(s)" means (a) any derivative work of the Stability AI Materials as recognized by U.S. copyright laws and (b) any modifications to a Model, and any other model created which is based on or derived from the Model or the Model's output, including"fine tune" and "low-rank adaptation" models derived from a Model or a Model's output, but do not include the output of any Model.
|
46 |
+
"Documentation" means any specifications, manuals, documentation, and other written information provided by Stability AI related to the Software or Models.
|
47 |
+
"Model(s)" means, collectively, Stability AI's proprietary models and algorithms, including machine-learning models, trained model weights and other elements of the foregoing listed on Stability's Core Models Webpage available at, https://stability.ai/core-models, as may be updated from time to time.
|
48 |
+
"Stability AI" or "we" means Stability AI Ltd. and its Affiliates.
|
49 |
+
"Software" means Stability AI's proprietary software made available under this Agreement now or in the future.
|
50 |
+
"Stability AI Materials" means, collectively, Stability's proprietary Models, Software and Documentation (and any portion or combination thereof) made available under this Agreement.
|
51 |
+
"Trade Control Laws" means any applicable U.S. and non-U.S. export control and trade sanctions laws and regulations.
|
README.md
ADDED
@@ -0,0 +1,242 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: other
|
3 |
+
license_name: stabilityai-ai-community
|
4 |
+
license_link: LICENSE.md
|
5 |
+
tags:
|
6 |
+
- text-to-image
|
7 |
+
- stable-diffusion
|
8 |
+
- diffusers
|
9 |
+
inference: true
|
10 |
+
extra_gated_prompt: >-
|
11 |
+
By clicking "Agree", you agree to the [License
|
12 |
+
Agreement](https://huggingface.co/stabilityai/stable-diffusion-3.5-large/blob/main/LICENSE.md)
|
13 |
+
and acknowledge Stability AI's [Privacy
|
14 |
+
Policy](https://stability.ai/privacy-policy).
|
15 |
+
extra_gated_fields:
|
16 |
+
Name: text
|
17 |
+
Email: text
|
18 |
+
Country: country
|
19 |
+
Organization or Affiliation: text
|
20 |
+
Receive email updates and promotions on Stability AI products, services, and research?:
|
21 |
+
type: select
|
22 |
+
options:
|
23 |
+
- 'Yes'
|
24 |
+
- 'No'
|
25 |
+
What do you intend to use the model for?:
|
26 |
+
type: select
|
27 |
+
options:
|
28 |
+
- Research
|
29 |
+
- Personal use
|
30 |
+
- Creative Professional
|
31 |
+
- Startup
|
32 |
+
- Enterprise
|
33 |
+
I agree to the License Agreement and acknowledge Stability AI's Privacy Policy: checkbox
|
34 |
+
|
35 |
+
language:
|
36 |
+
- en
|
37 |
+
pipeline_tag: text-to-image
|
38 |
+
---
|
39 |
+
|
40 |
+
# Stable Diffusion 3.5 Large
|
41 |
+
![3.5 Large Demo Image](sd3.5_large_demo.png)
|
42 |
+
|
43 |
+
## Model
|
44 |
+
|
45 |
+
![MMDiT](mmdit.png)
|
46 |
+
|
47 |
+
|
48 |
+
[Stable Diffusion 3.5 Large](https://stability.ai/news/introducing-stable-diffusion-3-5) is a Multimodal Diffusion Transformer (MMDiT) text-to-image model that features improved performance in image quality, typography, complex prompt understanding, and resource-efficiency.
|
49 |
+
|
50 |
+
Please note: This model is released under the [Stability Community License](https://stability.ai/community-license-agreement). Visit [Stability AI](https://stability.ai/license) to learn or [contact us](https://stability.ai/enterprise) for commercial licensing details.
|
51 |
+
|
52 |
+
|
53 |
+
### Model Description
|
54 |
+
|
55 |
+
- **Developed by:** Stability AI
|
56 |
+
- **Model type:** MMDiT text-to-image generative model
|
57 |
+
- **Model Description:** This model generates images based on text prompts. It is a [Multimodal Diffusion Transformer](https://arxiv.org/abs/2403.03206) that use three fixed, pretrained text encoders, and with QK-normalization to improve training stability.
|
58 |
+
|
59 |
+
### License
|
60 |
+
|
61 |
+
- **Community License:** Free for research, non-commercial, and commercial use for organizations or individuals with less than $1M in total annual revenue. More details can be found in the [Community License Agreement](https://stability.ai/community-license-agreement). Read more at https://stability.ai/license.
|
62 |
+
- **For individuals and organizations with annual revenue above $1M**: please [contact us](https://stability.ai/enterprise) to get an Enterprise License.
|
63 |
+
|
64 |
+
### Model Sources
|
65 |
+
|
66 |
+
For local or self-hosted use, we recommend [ComfyUI](https://github.com/comfyanonymous/ComfyUI) for node-based UI inference, or [diffusers](https://github.com/huggingface/diffusers) or [GitHub](https://github.com/Stability-AI/sd3.5) for programmatic use.
|
67 |
+
|
68 |
+
- **ComfyUI:** [Github](https://github.com/comfyanonymous/ComfyUI), [Example Workflow](https://comfyanonymous.github.io/ComfyUI_examples/sd3/)
|
69 |
+
- **Huggingface Space:** [Space](https://huggingface.co/spaces/stabilityai/stable-diffusion-3.5-large)
|
70 |
+
- **Diffusers**: [See below](#using-with-diffusers).
|
71 |
+
- **GitHub**: [GitHub](https://github.com/Stability-AI/sd3.5).
|
72 |
+
|
73 |
+
- **API Endpoints:**
|
74 |
+
- [Stability AI API](https://platform.stability.ai/docs/api-reference#tag/Generate/paths/~1v2beta~1stable-image~1generate~1sd3/post)
|
75 |
+
- [Replicate](https://replicate.com/stability-ai/stable-diffusion-3.5-large)
|
76 |
+
- [Deepinfra](https://deepinfra.com/stabilityai/sd3.5)
|
77 |
+
|
78 |
+
|
79 |
+
### Implementation Details
|
80 |
+
|
81 |
+
- **QK Normalization:** Implements the QK normalization technique to improve training Stability.
|
82 |
+
|
83 |
+
- **Text Encoders:**
|
84 |
+
- CLIPs: [OpenCLIP-ViT/G](https://github.com/mlfoundations/open_clip), [CLIP-ViT/L](https://github.com/openai/CLIP/tree/main), context length 77 tokens
|
85 |
+
- T5: [T5-xxl](https://huggingface.co/google/t5-v1_1-xxl), context length 77/256 tokens at different stages of training
|
86 |
+
|
87 |
+
- **Training Data and Strategy:**
|
88 |
+
|
89 |
+
This model was trained on a wide variety of data, including synthetic data and filtered publicly available data.
|
90 |
+
|
91 |
+
For more technical details of the original MMDiT architecture, please refer to the [Research paper](https://stability.ai/news/stable-diffusion-3-research-paper).
|
92 |
+
|
93 |
+
|
94 |
+
### Model Performance
|
95 |
+
|
96 |
+
See [blog](https://stability.ai/news/introducing-stable-diffusion-3-5) for our study about comparative performance in prompt adherence and aesthetic quality.
|
97 |
+
|
98 |
+
|
99 |
+
## File Structure
|
100 |
+
|
101 |
+
Click here to access the [Files and versions tab](https://huggingface.co/stabilityai/stable-diffusion-3.5-large/tree/main)
|
102 |
+
|
103 |
+
```│
|
104 |
+
├── text_encoders/
|
105 |
+
│ ├── README.md
|
106 |
+
│ ├── clip_g.safetensors
|
107 |
+
│ ├── clip_l.safetensors
|
108 |
+
│ ├── t5xxl_fp16.safetensors
|
109 |
+
│ └── t5xxl_fp8_e4m3fn.safetensors
|
110 |
+
│
|
111 |
+
├── README.md
|
112 |
+
├── LICENSE
|
113 |
+
├── sd3_large.safetensors
|
114 |
+
├── SD3.5L_example_workflow.json
|
115 |
+
└── sd3_large_demo.png
|
116 |
+
|
117 |
+
** File structure below is for diffusers integration**
|
118 |
+
├── scheduler/
|
119 |
+
├── text_encoder/
|
120 |
+
├── text_encoder_2/
|
121 |
+
├── text_encoder_3/
|
122 |
+
├── tokenizer/
|
123 |
+
├── tokenizer_2/
|
124 |
+
├── tokenizer_3/
|
125 |
+
├── transformer/
|
126 |
+
├── vae/
|
127 |
+
└── model_index.json
|
128 |
+
```
|
129 |
+
|
130 |
+
## Using with Diffusers
|
131 |
+
Upgrade to the latest version of the [🧨 diffusers library](https://github.com/huggingface/diffusers)
|
132 |
+
```
|
133 |
+
pip install -U diffusers
|
134 |
+
```
|
135 |
+
|
136 |
+
and then you can run
|
137 |
+
```py
|
138 |
+
import torch
|
139 |
+
from diffusers import StableDiffusion3Pipeline
|
140 |
+
|
141 |
+
pipe = StableDiffusion3Pipeline.from_pretrained("stabilityai/stable-diffusion-3.5-large", torch_dtype=torch.bfloat16)
|
142 |
+
pipe = pipe.to("cuda")
|
143 |
+
|
144 |
+
image = pipe(
|
145 |
+
"A capybara holding a sign that reads Hello World",
|
146 |
+
num_inference_steps=28,
|
147 |
+
guidance_scale=3.5,
|
148 |
+
).images[0]
|
149 |
+
image.save("capybara.png")
|
150 |
+
```
|
151 |
+
|
152 |
+
### Quantizing the model with diffusers
|
153 |
+
|
154 |
+
Reduce your VRAM usage and have the model fit on 🤏 VRAM GPUs
|
155 |
+
|
156 |
+
```
|
157 |
+
pip install bitsandbytes
|
158 |
+
```
|
159 |
+
|
160 |
+
```py
|
161 |
+
from diffusers import BitsAndBytesConfig, SD3Transformer2DModel
|
162 |
+
from diffusers import StableDiffusion3Pipeline
|
163 |
+
import torch
|
164 |
+
|
165 |
+
model_id = "stabilityai/stable-diffusion-3.5-large"
|
166 |
+
|
167 |
+
nf4_config = BitsAndBytesConfig(
|
168 |
+
load_in_4bit=True,
|
169 |
+
bnb_4bit_quant_type="nf4",
|
170 |
+
bnb_4bit_compute_dtype=torch.bfloat16
|
171 |
+
)
|
172 |
+
model_nf4 = SD3Transformer2DModel.from_pretrained(
|
173 |
+
model_id,
|
174 |
+
subfolder="transformer",
|
175 |
+
quantization_config=nf4_config,
|
176 |
+
torch_dtype=torch.bfloat16
|
177 |
+
)
|
178 |
+
|
179 |
+
pipeline = StableDiffusion3Pipeline.from_pretrained(
|
180 |
+
model_id,
|
181 |
+
transformer=model_nf4,
|
182 |
+
torch_dtype=torch.bfloat16
|
183 |
+
)
|
184 |
+
pipeline.enable_model_cpu_offload()
|
185 |
+
|
186 |
+
prompt = "A whimsical and creative image depicting a hybrid creature that is a mix of a waffle and a hippopotamus, basking in a river of melted butter amidst a breakfast-themed landscape. It features the distinctive, bulky body shape of a hippo. However, instead of the usual grey skin, the creature's body resembles a golden-brown, crispy waffle fresh off the griddle. The skin is textured with the familiar grid pattern of a waffle, each square filled with a glistening sheen of syrup. The environment combines the natural habitat of a hippo with elements of a breakfast table setting, a river of warm, melted butter, with oversized utensils or plates peeking out from the lush, pancake-like foliage in the background, a towering pepper mill standing in for a tree. As the sun rises in this fantastical world, it casts a warm, buttery glow over the scene. The creature, content in its butter river, lets out a yawn. Nearby, a flock of birds take flight"
|
187 |
+
|
188 |
+
image = pipeline(
|
189 |
+
prompt=prompt,
|
190 |
+
num_inference_steps=28,
|
191 |
+
guidance_scale=4.5,
|
192 |
+
max_sequence_length=512,
|
193 |
+
).images[0]
|
194 |
+
image.save("whimsical.png")
|
195 |
+
```
|
196 |
+
|
197 |
+
### Fine-tuning
|
198 |
+
|
199 |
+
Please see the fine-tuning guide [here](https://stabilityai.notion.site/Stable-Diffusion-3-5-Large-Fine-tuning-Tutorial-11a61cdcd1968027a15bdbd7c40be8c6).
|
200 |
+
|
201 |
+
|
202 |
+
## Uses
|
203 |
+
|
204 |
+
### Intended Uses
|
205 |
+
|
206 |
+
Intended uses include the following:
|
207 |
+
* Generation of artworks and use in design and other artistic processes.
|
208 |
+
* Applications in educational or creative tools.
|
209 |
+
* Research on generative models, including understanding the limitations of generative models.
|
210 |
+
|
211 |
+
All uses of the model must be in accordance with our [Acceptable Use Policy](https://stability.ai/use-policy).
|
212 |
+
|
213 |
+
### Out-of-Scope Uses
|
214 |
+
|
215 |
+
The model was not trained to be factual or true representations of people or events. As such, using the model to generate such content is out-of-scope of the abilities of this model.
|
216 |
+
|
217 |
+
## Safety
|
218 |
+
|
219 |
+
As part of our safety-by-design and responsible AI deployment approach, we take deliberate measures to ensure Integrity starts at the early stages of development. We implement safety measures throughout the development of our models. We have implemented safety mitigations that are intended to reduce the risk of certain harms, however we recommend that developers conduct their own testing and apply additional mitigations based on their specific use cases.
|
220 |
+
For more about our approach to Safety, please visit our [Safety page](https://stability.ai/safety).
|
221 |
+
|
222 |
+
### Integrity Evaluation
|
223 |
+
|
224 |
+
Our integrity evaluation methods include structured evaluations and red-teaming testing for certain harms. Testing was conducted primarily in English and may not cover all possible harms.
|
225 |
+
|
226 |
+
### Risks identified and mitigations:
|
227 |
+
|
228 |
+
* Harmful content: We have used filtered data sets when training our models and implemented safeguards that attempt to strike the right balance between usefulness and preventing harm. However, this does not guarantee that all possible harmful content has been removed. TAll developers and deployers should exercise caution and implement content safety guardrails based on their specific product policies and application use cases.
|
229 |
+
* Misuse: Technical limitations and developer and end-user education can help mitigate against malicious applications of models. All users are required to adhere to our [Acceptable Use Policy](https://stability.ai/use-policy), including when applying fine-tuning and prompt engineering mechanisms. Please reference the Stability AI Acceptable Use Policy for information on violative uses of our products.
|
230 |
+
* Privacy violations: Developers and deployers are encouraged to adhere to privacy regulations with techniques that respect data privacy.
|
231 |
+
|
232 |
+
### Contact
|
233 |
+
|
234 |
+
Please report any issues with the model or contact us:
|
235 |
+
|
236 |
+
* Safety issues: safety@stability.ai
|
237 |
+
* Security issues: security@stability.ai
|
238 |
+
* Privacy issues: privacy@stability.ai
|
239 |
+
* License and general: https://stability.ai/license
|
240 |
+
* Enterprise license: https://stability.ai/enterprise
|
241 |
+
|
242 |
+
|
SD3.5L_example_workflow.json
ADDED
@@ -0,0 +1,688 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"last_node_id": 300,
|
3 |
+
"last_link_id": 605,
|
4 |
+
"nodes": [
|
5 |
+
{
|
6 |
+
"id": 70,
|
7 |
+
"type": "ConditioningSetTimestepRange",
|
8 |
+
"pos": [
|
9 |
+
126,
|
10 |
+
252
|
11 |
+
],
|
12 |
+
"size": {
|
13 |
+
"0": 317.4000244140625,
|
14 |
+
"1": 82
|
15 |
+
},
|
16 |
+
"flags": {},
|
17 |
+
"order": 7,
|
18 |
+
"mode": 0,
|
19 |
+
"inputs": [
|
20 |
+
{
|
21 |
+
"name": "conditioning",
|
22 |
+
"type": "CONDITIONING",
|
23 |
+
"link": 93,
|
24 |
+
"slot_index": 0
|
25 |
+
}
|
26 |
+
],
|
27 |
+
"outputs": [
|
28 |
+
{
|
29 |
+
"name": "CONDITIONING",
|
30 |
+
"type": "CONDITIONING",
|
31 |
+
"links": [
|
32 |
+
92
|
33 |
+
],
|
34 |
+
"shape": 3,
|
35 |
+
"slot_index": 0
|
36 |
+
}
|
37 |
+
],
|
38 |
+
"properties": {
|
39 |
+
"Node name for S&R": "ConditioningSetTimestepRange"
|
40 |
+
},
|
41 |
+
"widgets_values": [
|
42 |
+
0,
|
43 |
+
0.1
|
44 |
+
]
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"id": 68,
|
48 |
+
"type": "ConditioningSetTimestepRange",
|
49 |
+
"pos": [
|
50 |
+
126,
|
51 |
+
126
|
52 |
+
],
|
53 |
+
"size": {
|
54 |
+
"0": 317.4000244140625,
|
55 |
+
"1": 82
|
56 |
+
},
|
57 |
+
"flags": {},
|
58 |
+
"order": 9,
|
59 |
+
"mode": 0,
|
60 |
+
"inputs": [
|
61 |
+
{
|
62 |
+
"name": "conditioning",
|
63 |
+
"type": "CONDITIONING",
|
64 |
+
"link": 90
|
65 |
+
}
|
66 |
+
],
|
67 |
+
"outputs": [
|
68 |
+
{
|
69 |
+
"name": "CONDITIONING",
|
70 |
+
"type": "CONDITIONING",
|
71 |
+
"links": [
|
72 |
+
91
|
73 |
+
],
|
74 |
+
"shape": 3,
|
75 |
+
"slot_index": 0
|
76 |
+
}
|
77 |
+
],
|
78 |
+
"properties": {
|
79 |
+
"Node name for S&R": "ConditioningSetTimestepRange"
|
80 |
+
},
|
81 |
+
"widgets_values": [
|
82 |
+
0.1,
|
83 |
+
1
|
84 |
+
]
|
85 |
+
},
|
86 |
+
{
|
87 |
+
"id": 67,
|
88 |
+
"type": "ConditioningZeroOut",
|
89 |
+
"pos": [
|
90 |
+
-126,
|
91 |
+
126
|
92 |
+
],
|
93 |
+
"size": {
|
94 |
+
"0": 211.60000610351562,
|
95 |
+
"1": 26
|
96 |
+
},
|
97 |
+
"flags": {},
|
98 |
+
"order": 8,
|
99 |
+
"mode": 0,
|
100 |
+
"inputs": [
|
101 |
+
{
|
102 |
+
"name": "conditioning",
|
103 |
+
"type": "CONDITIONING",
|
104 |
+
"link": 597
|
105 |
+
}
|
106 |
+
],
|
107 |
+
"outputs": [
|
108 |
+
{
|
109 |
+
"name": "CONDITIONING",
|
110 |
+
"type": "CONDITIONING",
|
111 |
+
"links": [
|
112 |
+
90
|
113 |
+
],
|
114 |
+
"shape": 3,
|
115 |
+
"slot_index": 0
|
116 |
+
}
|
117 |
+
],
|
118 |
+
"properties": {
|
119 |
+
"Node name for S&R": "ConditioningZeroOut"
|
120 |
+
}
|
121 |
+
},
|
122 |
+
{
|
123 |
+
"id": 71,
|
124 |
+
"type": "CLIPTextEncode",
|
125 |
+
"pos": [
|
126 |
+
-1010,
|
127 |
+
252
|
128 |
+
],
|
129 |
+
"size": [
|
130 |
+
351.8130934034689,
|
131 |
+
195.5754530459866
|
132 |
+
],
|
133 |
+
"flags": {},
|
134 |
+
"order": 6,
|
135 |
+
"mode": 0,
|
136 |
+
"inputs": [
|
137 |
+
{
|
138 |
+
"name": "clip",
|
139 |
+
"type": "CLIP",
|
140 |
+
"link": 94
|
141 |
+
}
|
142 |
+
],
|
143 |
+
"outputs": [
|
144 |
+
{
|
145 |
+
"name": "CONDITIONING",
|
146 |
+
"type": "CONDITIONING",
|
147 |
+
"links": [
|
148 |
+
93,
|
149 |
+
597
|
150 |
+
],
|
151 |
+
"shape": 3,
|
152 |
+
"slot_index": 0
|
153 |
+
}
|
154 |
+
],
|
155 |
+
"properties": {
|
156 |
+
"Node name for S&R": "CLIPTextEncode"
|
157 |
+
},
|
158 |
+
"widgets_values": [
|
159 |
+
""
|
160 |
+
],
|
161 |
+
"color": "#322",
|
162 |
+
"bgcolor": "#533"
|
163 |
+
},
|
164 |
+
{
|
165 |
+
"id": 6,
|
166 |
+
"type": "CLIPTextEncode",
|
167 |
+
"pos": [
|
168 |
+
-1008,
|
169 |
+
2
|
170 |
+
],
|
171 |
+
"size": [
|
172 |
+
342.83352734520565,
|
173 |
+
177.20867231021555
|
174 |
+
],
|
175 |
+
"flags": {},
|
176 |
+
"order": 5,
|
177 |
+
"mode": 0,
|
178 |
+
"inputs": [
|
179 |
+
{
|
180 |
+
"name": "clip",
|
181 |
+
"type": "CLIP",
|
182 |
+
"link": 5
|
183 |
+
}
|
184 |
+
],
|
185 |
+
"outputs": [
|
186 |
+
{
|
187 |
+
"name": "CONDITIONING",
|
188 |
+
"type": "CONDITIONING",
|
189 |
+
"links": [
|
190 |
+
569
|
191 |
+
],
|
192 |
+
"shape": 3,
|
193 |
+
"slot_index": 0
|
194 |
+
}
|
195 |
+
],
|
196 |
+
"properties": {
|
197 |
+
"Node name for S&R": "CLIPTextEncode"
|
198 |
+
},
|
199 |
+
"widgets_values": [
|
200 |
+
"beautiful scenery nature glass bottle landscape, purple galaxy bottle,"
|
201 |
+
],
|
202 |
+
"color": "#232",
|
203 |
+
"bgcolor": "#353"
|
204 |
+
},
|
205 |
+
{
|
206 |
+
"id": 294,
|
207 |
+
"type": "KSampler",
|
208 |
+
"pos": [
|
209 |
+
882,
|
210 |
+
-504
|
211 |
+
],
|
212 |
+
"size": [
|
213 |
+
378,
|
214 |
+
504
|
215 |
+
],
|
216 |
+
"flags": {},
|
217 |
+
"order": 11,
|
218 |
+
"mode": 0,
|
219 |
+
"inputs": [
|
220 |
+
{
|
221 |
+
"name": "model",
|
222 |
+
"type": "MODEL",
|
223 |
+
"link": 568
|
224 |
+
},
|
225 |
+
{
|
226 |
+
"name": "positive",
|
227 |
+
"type": "CONDITIONING",
|
228 |
+
"link": 569
|
229 |
+
},
|
230 |
+
{
|
231 |
+
"name": "negative",
|
232 |
+
"type": "CONDITIONING",
|
233 |
+
"link": 604
|
234 |
+
},
|
235 |
+
{
|
236 |
+
"name": "latent_image",
|
237 |
+
"type": "LATENT",
|
238 |
+
"link": 598
|
239 |
+
}
|
240 |
+
],
|
241 |
+
"outputs": [
|
242 |
+
{
|
243 |
+
"name": "LATENT",
|
244 |
+
"type": "LATENT",
|
245 |
+
"links": [
|
246 |
+
572
|
247 |
+
],
|
248 |
+
"shape": 3,
|
249 |
+
"slot_index": 0
|
250 |
+
}
|
251 |
+
],
|
252 |
+
"properties": {
|
253 |
+
"Node name for S&R": "KSampler"
|
254 |
+
},
|
255 |
+
"widgets_values": [
|
256 |
+
66155038679131,
|
257 |
+
"randomize",
|
258 |
+
40,
|
259 |
+
4.5,
|
260 |
+
"dpmpp_2m",
|
261 |
+
"sgm_uniform",
|
262 |
+
1
|
263 |
+
]
|
264 |
+
},
|
265 |
+
{
|
266 |
+
"id": 13,
|
267 |
+
"type": "ModelSamplingSD3",
|
268 |
+
"pos": [
|
269 |
+
126,
|
270 |
+
-504
|
271 |
+
],
|
272 |
+
"size": {
|
273 |
+
"0": 315,
|
274 |
+
"1": 58
|
275 |
+
},
|
276 |
+
"flags": {
|
277 |
+
"collapsed": false
|
278 |
+
},
|
279 |
+
"order": 4,
|
280 |
+
"mode": 0,
|
281 |
+
"inputs": [
|
282 |
+
{
|
283 |
+
"name": "model",
|
284 |
+
"type": "MODEL",
|
285 |
+
"link": 445
|
286 |
+
}
|
287 |
+
],
|
288 |
+
"outputs": [
|
289 |
+
{
|
290 |
+
"name": "MODEL",
|
291 |
+
"type": "MODEL",
|
292 |
+
"links": [
|
293 |
+
568
|
294 |
+
],
|
295 |
+
"shape": 3,
|
296 |
+
"slot_index": 0
|
297 |
+
}
|
298 |
+
],
|
299 |
+
"properties": {
|
300 |
+
"Node name for S&R": "ModelSamplingSD3"
|
301 |
+
},
|
302 |
+
"widgets_values": [
|
303 |
+
3
|
304 |
+
]
|
305 |
+
},
|
306 |
+
{
|
307 |
+
"id": 4,
|
308 |
+
"type": "CheckpointLoaderSimple",
|
309 |
+
"pos": [
|
310 |
+
-2016,
|
311 |
+
-504
|
312 |
+
],
|
313 |
+
"size": {
|
314 |
+
"0": 632.6060180664062,
|
315 |
+
"1": 98
|
316 |
+
},
|
317 |
+
"flags": {},
|
318 |
+
"order": 0,
|
319 |
+
"mode": 0,
|
320 |
+
"outputs": [
|
321 |
+
{
|
322 |
+
"name": "MODEL",
|
323 |
+
"type": "MODEL",
|
324 |
+
"links": [
|
325 |
+
445
|
326 |
+
],
|
327 |
+
"shape": 3,
|
328 |
+
"slot_index": 0
|
329 |
+
},
|
330 |
+
{
|
331 |
+
"name": "CLIP",
|
332 |
+
"type": "CLIP",
|
333 |
+
"links": null,
|
334 |
+
"shape": 3
|
335 |
+
},
|
336 |
+
{
|
337 |
+
"name": "VAE",
|
338 |
+
"type": "VAE",
|
339 |
+
"links": [
|
340 |
+
605
|
341 |
+
],
|
342 |
+
"shape": 3,
|
343 |
+
"slot_index": 2
|
344 |
+
}
|
345 |
+
],
|
346 |
+
"properties": {
|
347 |
+
"Node name for S&R": "CheckpointLoaderSimple"
|
348 |
+
},
|
349 |
+
"widgets_values": [
|
350 |
+
"sd3.5_large.safetensors"
|
351 |
+
]
|
352 |
+
},
|
353 |
+
{
|
354 |
+
"id": 69,
|
355 |
+
"type": "ConditioningCombine",
|
356 |
+
"pos": [
|
357 |
+
504,
|
358 |
+
126
|
359 |
+
],
|
360 |
+
"size": {
|
361 |
+
"0": 228.39999389648438,
|
362 |
+
"1": 46
|
363 |
+
},
|
364 |
+
"flags": {},
|
365 |
+
"order": 10,
|
366 |
+
"mode": 0,
|
367 |
+
"inputs": [
|
368 |
+
{
|
369 |
+
"name": "conditioning_1",
|
370 |
+
"type": "CONDITIONING",
|
371 |
+
"link": 91
|
372 |
+
},
|
373 |
+
{
|
374 |
+
"name": "conditioning_2",
|
375 |
+
"type": "CONDITIONING",
|
376 |
+
"link": 92
|
377 |
+
}
|
378 |
+
],
|
379 |
+
"outputs": [
|
380 |
+
{
|
381 |
+
"name": "CONDITIONING",
|
382 |
+
"type": "CONDITIONING",
|
383 |
+
"links": [
|
384 |
+
604
|
385 |
+
],
|
386 |
+
"shape": 3,
|
387 |
+
"slot_index": 0
|
388 |
+
}
|
389 |
+
],
|
390 |
+
"properties": {
|
391 |
+
"Node name for S&R": "ConditioningCombine"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
{
|
395 |
+
"id": 8,
|
396 |
+
"type": "VAEDecode",
|
397 |
+
"pos": [
|
398 |
+
1386,
|
399 |
+
-504
|
400 |
+
],
|
401 |
+
"size": {
|
402 |
+
"0": 210,
|
403 |
+
"1": 46
|
404 |
+
},
|
405 |
+
"flags": {},
|
406 |
+
"order": 12,
|
407 |
+
"mode": 0,
|
408 |
+
"inputs": [
|
409 |
+
{
|
410 |
+
"name": "samples",
|
411 |
+
"type": "LATENT",
|
412 |
+
"link": 572
|
413 |
+
},
|
414 |
+
{
|
415 |
+
"name": "vae",
|
416 |
+
"type": "VAE",
|
417 |
+
"link": 605
|
418 |
+
}
|
419 |
+
],
|
420 |
+
"outputs": [
|
421 |
+
{
|
422 |
+
"name": "IMAGE",
|
423 |
+
"type": "IMAGE",
|
424 |
+
"links": [
|
425 |
+
274
|
426 |
+
],
|
427 |
+
"shape": 3,
|
428 |
+
"slot_index": 0
|
429 |
+
}
|
430 |
+
],
|
431 |
+
"properties": {
|
432 |
+
"Node name for S&R": "VAEDecode"
|
433 |
+
}
|
434 |
+
},
|
435 |
+
{
|
436 |
+
"id": 50,
|
437 |
+
"type": "PreviewImage",
|
438 |
+
"pos": [
|
439 |
+
1764,
|
440 |
+
-504
|
441 |
+
],
|
442 |
+
"size": {
|
443 |
+
"0": 616.689697265625,
|
444 |
+
"1": 613.84130859375
|
445 |
+
},
|
446 |
+
"flags": {},
|
447 |
+
"order": 13,
|
448 |
+
"mode": 0,
|
449 |
+
"inputs": [
|
450 |
+
{
|
451 |
+
"name": "images",
|
452 |
+
"type": "IMAGE",
|
453 |
+
"link": 274
|
454 |
+
}
|
455 |
+
],
|
456 |
+
"properties": {
|
457 |
+
"Node name for S&R": "PreviewImage"
|
458 |
+
}
|
459 |
+
},
|
460 |
+
{
|
461 |
+
"id": 135,
|
462 |
+
"type": "EmptySD3LatentImage",
|
463 |
+
"pos": [
|
464 |
+
-1008,
|
465 |
+
-252
|
466 |
+
],
|
467 |
+
"size": [
|
468 |
+
315,
|
469 |
+
106
|
470 |
+
],
|
471 |
+
"flags": {},
|
472 |
+
"order": 1,
|
473 |
+
"mode": 0,
|
474 |
+
"inputs": [],
|
475 |
+
"outputs": [
|
476 |
+
{
|
477 |
+
"name": "LATENT",
|
478 |
+
"type": "LATENT",
|
479 |
+
"links": [
|
480 |
+
598
|
481 |
+
],
|
482 |
+
"shape": 3,
|
483 |
+
"slot_index": 0
|
484 |
+
}
|
485 |
+
],
|
486 |
+
"properties": {
|
487 |
+
"Node name for S&R": "EmptySD3LatentImage"
|
488 |
+
},
|
489 |
+
"widgets_values": [
|
490 |
+
1024,
|
491 |
+
1024,
|
492 |
+
1
|
493 |
+
]
|
494 |
+
},
|
495 |
+
{
|
496 |
+
"id": 105,
|
497 |
+
"type": "Note",
|
498 |
+
"pos": [
|
499 |
+
-1260,
|
500 |
+
-252
|
501 |
+
],
|
502 |
+
"size": [
|
503 |
+
210,
|
504 |
+
110.1894832228611
|
505 |
+
],
|
506 |
+
"flags": {},
|
507 |
+
"order": 2,
|
508 |
+
"mode": 0,
|
509 |
+
"properties": {
|
510 |
+
"text": ""
|
511 |
+
},
|
512 |
+
"widgets_values": [
|
513 |
+
"Make sure the resolution is multiple of 64 pixels and adds up to around 1 megapixel. "
|
514 |
+
],
|
515 |
+
"color": "#432",
|
516 |
+
"bgcolor": "#653"
|
517 |
+
},
|
518 |
+
{
|
519 |
+
"id": 11,
|
520 |
+
"type": "TripleCLIPLoader",
|
521 |
+
"pos": [
|
522 |
+
-2016,
|
523 |
+
-252
|
524 |
+
],
|
525 |
+
"size": {
|
526 |
+
"0": 315,
|
527 |
+
"1": 106
|
528 |
+
},
|
529 |
+
"flags": {},
|
530 |
+
"order": 3,
|
531 |
+
"mode": 0,
|
532 |
+
"outputs": [
|
533 |
+
{
|
534 |
+
"name": "CLIP",
|
535 |
+
"type": "CLIP",
|
536 |
+
"links": [
|
537 |
+
5,
|
538 |
+
94
|
539 |
+
],
|
540 |
+
"shape": 3,
|
541 |
+
"slot_index": 0
|
542 |
+
}
|
543 |
+
],
|
544 |
+
"properties": {
|
545 |
+
"Node name for S&R": "TripleCLIPLoader"
|
546 |
+
},
|
547 |
+
"widgets_values": [
|
548 |
+
"clip_g_sdxl_base.safetensors",
|
549 |
+
"clip_l_sdxl_base.safetensors",
|
550 |
+
"t5xxl.safetensors"
|
551 |
+
]
|
552 |
+
}
|
553 |
+
],
|
554 |
+
"links": [
|
555 |
+
[
|
556 |
+
5,
|
557 |
+
11,
|
558 |
+
0,
|
559 |
+
6,
|
560 |
+
0,
|
561 |
+
"CLIP"
|
562 |
+
],
|
563 |
+
[
|
564 |
+
90,
|
565 |
+
67,
|
566 |
+
0,
|
567 |
+
68,
|
568 |
+
0,
|
569 |
+
"CONDITIONING"
|
570 |
+
],
|
571 |
+
[
|
572 |
+
91,
|
573 |
+
68,
|
574 |
+
0,
|
575 |
+
69,
|
576 |
+
0,
|
577 |
+
"CONDITIONING"
|
578 |
+
],
|
579 |
+
[
|
580 |
+
92,
|
581 |
+
70,
|
582 |
+
0,
|
583 |
+
69,
|
584 |
+
1,
|
585 |
+
"CONDITIONING"
|
586 |
+
],
|
587 |
+
[
|
588 |
+
93,
|
589 |
+
71,
|
590 |
+
0,
|
591 |
+
70,
|
592 |
+
0,
|
593 |
+
"CONDITIONING"
|
594 |
+
],
|
595 |
+
[
|
596 |
+
94,
|
597 |
+
11,
|
598 |
+
0,
|
599 |
+
71,
|
600 |
+
0,
|
601 |
+
"CLIP"
|
602 |
+
],
|
603 |
+
[
|
604 |
+
274,
|
605 |
+
8,
|
606 |
+
0,
|
607 |
+
50,
|
608 |
+
0,
|
609 |
+
"IMAGE"
|
610 |
+
],
|
611 |
+
[
|
612 |
+
445,
|
613 |
+
4,
|
614 |
+
0,
|
615 |
+
13,
|
616 |
+
0,
|
617 |
+
"MODEL"
|
618 |
+
],
|
619 |
+
[
|
620 |
+
568,
|
621 |
+
13,
|
622 |
+
0,
|
623 |
+
294,
|
624 |
+
0,
|
625 |
+
"MODEL"
|
626 |
+
],
|
627 |
+
[
|
628 |
+
569,
|
629 |
+
6,
|
630 |
+
0,
|
631 |
+
294,
|
632 |
+
1,
|
633 |
+
"CONDITIONING"
|
634 |
+
],
|
635 |
+
[
|
636 |
+
572,
|
637 |
+
294,
|
638 |
+
0,
|
639 |
+
8,
|
640 |
+
0,
|
641 |
+
"LATENT"
|
642 |
+
],
|
643 |
+
[
|
644 |
+
597,
|
645 |
+
71,
|
646 |
+
0,
|
647 |
+
67,
|
648 |
+
0,
|
649 |
+
"CONDITIONING"
|
650 |
+
],
|
651 |
+
[
|
652 |
+
598,
|
653 |
+
135,
|
654 |
+
0,
|
655 |
+
294,
|
656 |
+
3,
|
657 |
+
"LATENT"
|
658 |
+
],
|
659 |
+
[
|
660 |
+
604,
|
661 |
+
69,
|
662 |
+
0,
|
663 |
+
294,
|
664 |
+
2,
|
665 |
+
"CONDITIONING"
|
666 |
+
],
|
667 |
+
[
|
668 |
+
605,
|
669 |
+
4,
|
670 |
+
2,
|
671 |
+
8,
|
672 |
+
1,
|
673 |
+
"VAE"
|
674 |
+
]
|
675 |
+
],
|
676 |
+
"groups": [],
|
677 |
+
"config": {},
|
678 |
+
"extra": {
|
679 |
+
"ds": {
|
680 |
+
"scale": 0.31384283767210064,
|
681 |
+
"offset": [
|
682 |
+
1701.063725248347,
|
683 |
+
1511.923032931671
|
684 |
+
]
|
685 |
+
}
|
686 |
+
},
|
687 |
+
"version": 0.4
|
688 |
+
}
|
mmdit.png
ADDED
model_index.json
ADDED
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_class_name": "StableDiffusion3Pipeline",
|
3 |
+
"_diffusers_version": "0.30.3.dev0",
|
4 |
+
"scheduler": [
|
5 |
+
"diffusers",
|
6 |
+
"FlowMatchEulerDiscreteScheduler"
|
7 |
+
],
|
8 |
+
"text_encoder": [
|
9 |
+
"transformers",
|
10 |
+
"CLIPTextModelWithProjection"
|
11 |
+
],
|
12 |
+
"text_encoder_2": [
|
13 |
+
"transformers",
|
14 |
+
"CLIPTextModelWithProjection"
|
15 |
+
],
|
16 |
+
"text_encoder_3": [
|
17 |
+
"transformers",
|
18 |
+
"T5EncoderModel"
|
19 |
+
],
|
20 |
+
"tokenizer": [
|
21 |
+
"transformers",
|
22 |
+
"CLIPTokenizer"
|
23 |
+
],
|
24 |
+
"tokenizer_2": [
|
25 |
+
"transformers",
|
26 |
+
"CLIPTokenizer"
|
27 |
+
],
|
28 |
+
"tokenizer_3": [
|
29 |
+
"transformers",
|
30 |
+
"T5TokenizerFast"
|
31 |
+
],
|
32 |
+
"transformer": [
|
33 |
+
"diffusers",
|
34 |
+
"SD3Transformer2DModel"
|
35 |
+
],
|
36 |
+
"vae": [
|
37 |
+
"diffusers",
|
38 |
+
"AutoencoderKL"
|
39 |
+
]
|
40 |
+
}
|
scheduler/scheduler_config.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_class_name": "FlowMatchEulerDiscreteScheduler",
|
3 |
+
"_diffusers_version": "0.29.0.dev0",
|
4 |
+
"num_train_timesteps": 1000,
|
5 |
+
"shift": 3.0
|
6 |
+
}
|
sd3.5_large.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ffef7a279d9134626e6ce0d494fba84fc1c7e720b3c7df2d19a09dc3796d8f93
|
3 |
+
size 16460379262
|
sd3.5_large_demo.png
ADDED
Git LFS Details
|
text_encoder/config.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"CLIPTextModelWithProjection"
|
4 |
+
],
|
5 |
+
"attention_dropout": 0.0,
|
6 |
+
"bos_token_id": 0,
|
7 |
+
"dropout": 0.0,
|
8 |
+
"eos_token_id": 2,
|
9 |
+
"hidden_act": "quick_gelu",
|
10 |
+
"hidden_size": 768,
|
11 |
+
"initializer_factor": 1.0,
|
12 |
+
"initializer_range": 0.02,
|
13 |
+
"intermediate_size": 3072,
|
14 |
+
"layer_norm_eps": 1e-05,
|
15 |
+
"max_position_embeddings": 77,
|
16 |
+
"model_type": "clip_text_model",
|
17 |
+
"num_attention_heads": 12,
|
18 |
+
"num_hidden_layers": 12,
|
19 |
+
"pad_token_id": 1,
|
20 |
+
"projection_dim": 768,
|
21 |
+
"torch_dtype": "float16",
|
22 |
+
"transformers_version": "4.41.2",
|
23 |
+
"vocab_size": 49408
|
24 |
+
}
|
text_encoder/model.fp16.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:71e183d11db0c6b6282a4d9e0abb74125edc8692393e89ed8ee5571005f35cb1
|
3 |
+
size 247323896
|
text_encoder/model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:71e183d11db0c6b6282a4d9e0abb74125edc8692393e89ed8ee5571005f35cb1
|
3 |
+
size 247323896
|
text_encoder_2/config.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"CLIPTextModelWithProjection"
|
4 |
+
],
|
5 |
+
"attention_dropout": 0.0,
|
6 |
+
"bos_token_id": 0,
|
7 |
+
"dropout": 0.0,
|
8 |
+
"eos_token_id": 2,
|
9 |
+
"hidden_act": "gelu",
|
10 |
+
"hidden_size": 1280,
|
11 |
+
"initializer_factor": 1.0,
|
12 |
+
"initializer_range": 0.02,
|
13 |
+
"intermediate_size": 5120,
|
14 |
+
"layer_norm_eps": 1e-05,
|
15 |
+
"max_position_embeddings": 77,
|
16 |
+
"model_type": "clip_text_model",
|
17 |
+
"num_attention_heads": 20,
|
18 |
+
"num_hidden_layers": 32,
|
19 |
+
"pad_token_id": 1,
|
20 |
+
"projection_dim": 1280,
|
21 |
+
"torch_dtype": "float16",
|
22 |
+
"transformers_version": "4.41.2",
|
23 |
+
"vocab_size": 49408
|
24 |
+
}
|
text_encoder_2/model.fp16.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ec310df2af79c318e24d20511b601a591ca8cd4f1fce1d8dff822a356bcdb1f4
|
3 |
+
size 1389382176
|
text_encoder_2/model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ec310df2af79c318e24d20511b601a591ca8cd4f1fce1d8dff822a356bcdb1f4
|
3 |
+
size 1389382176
|
text_encoder_3/config.json
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"T5EncoderModel"
|
4 |
+
],
|
5 |
+
"classifier_dropout": 0.0,
|
6 |
+
"d_ff": 10240,
|
7 |
+
"d_kv": 64,
|
8 |
+
"d_model": 4096,
|
9 |
+
"decoder_start_token_id": 0,
|
10 |
+
"dense_act_fn": "gelu_new",
|
11 |
+
"dropout_rate": 0.1,
|
12 |
+
"eos_token_id": 1,
|
13 |
+
"feed_forward_proj": "gated-gelu",
|
14 |
+
"initializer_factor": 1.0,
|
15 |
+
"is_encoder_decoder": true,
|
16 |
+
"is_gated_act": true,
|
17 |
+
"layer_norm_epsilon": 1e-06,
|
18 |
+
"model_type": "t5",
|
19 |
+
"num_decoder_layers": 24,
|
20 |
+
"num_heads": 64,
|
21 |
+
"num_layers": 24,
|
22 |
+
"output_past": true,
|
23 |
+
"pad_token_id": 0,
|
24 |
+
"relative_attention_max_distance": 128,
|
25 |
+
"relative_attention_num_buckets": 32,
|
26 |
+
"tie_word_embeddings": false,
|
27 |
+
"torch_dtype": "float16",
|
28 |
+
"transformers_version": "4.41.2",
|
29 |
+
"use_cache": true,
|
30 |
+
"vocab_size": 32128
|
31 |
+
}
|
text_encoder_3/model-00001-of-00002.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4f2751ceeb2a96edd693e539dc5d6bba0b8d3814f49a9b3798403a0cec4b2e3d
|
3 |
+
size 4994582104
|
text_encoder_3/model-00002-of-00002.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f63154532130422309532ff56f11945fbea8266c958e3133e8e5aef85c6293c7
|
3 |
+
size 4530066248
|
text_encoder_3/model.fp16-00001-of-00002.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4f2751ceeb2a96edd693e539dc5d6bba0b8d3814f49a9b3798403a0cec4b2e3d
|
3 |
+
size 4994582104
|
text_encoder_3/model.fp16-00002-of-00002.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f63154532130422309532ff56f11945fbea8266c958e3133e8e5aef85c6293c7
|
3 |
+
size 4530066248
|
text_encoder_3/model.safetensors.index.fp16.json
ADDED
@@ -0,0 +1,226 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"metadata": {
|
3 |
+
"total_size": 9524621312
|
4 |
+
},
|
5 |
+
"weight_map": {
|
6 |
+
"encoder.block.0.layer.0.SelfAttention.k.weight": "model.fp16-00001-of-00002.safetensors",
|
7 |
+
"encoder.block.0.layer.0.SelfAttention.o.weight": "model.fp16-00001-of-00002.safetensors",
|
8 |
+
"encoder.block.0.layer.0.SelfAttention.q.weight": "model.fp16-00001-of-00002.safetensors",
|
9 |
+
"encoder.block.0.layer.0.SelfAttention.relative_attention_bias.weight": "model.fp16-00001-of-00002.safetensors",
|
10 |
+
"encoder.block.0.layer.0.SelfAttention.v.weight": "model.fp16-00001-of-00002.safetensors",
|
11 |
+
"encoder.block.0.layer.0.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
12 |
+
"encoder.block.0.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00001-of-00002.safetensors",
|
13 |
+
"encoder.block.0.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00001-of-00002.safetensors",
|
14 |
+
"encoder.block.0.layer.1.DenseReluDense.wo.weight": "model.fp16-00001-of-00002.safetensors",
|
15 |
+
"encoder.block.0.layer.1.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
16 |
+
"encoder.block.1.layer.0.SelfAttention.k.weight": "model.fp16-00001-of-00002.safetensors",
|
17 |
+
"encoder.block.1.layer.0.SelfAttention.o.weight": "model.fp16-00001-of-00002.safetensors",
|
18 |
+
"encoder.block.1.layer.0.SelfAttention.q.weight": "model.fp16-00001-of-00002.safetensors",
|
19 |
+
"encoder.block.1.layer.0.SelfAttention.v.weight": "model.fp16-00001-of-00002.safetensors",
|
20 |
+
"encoder.block.1.layer.0.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
21 |
+
"encoder.block.1.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00001-of-00002.safetensors",
|
22 |
+
"encoder.block.1.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00001-of-00002.safetensors",
|
23 |
+
"encoder.block.1.layer.1.DenseReluDense.wo.weight": "model.fp16-00001-of-00002.safetensors",
|
24 |
+
"encoder.block.1.layer.1.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
25 |
+
"encoder.block.10.layer.0.SelfAttention.k.weight": "model.fp16-00001-of-00002.safetensors",
|
26 |
+
"encoder.block.10.layer.0.SelfAttention.o.weight": "model.fp16-00001-of-00002.safetensors",
|
27 |
+
"encoder.block.10.layer.0.SelfAttention.q.weight": "model.fp16-00001-of-00002.safetensors",
|
28 |
+
"encoder.block.10.layer.0.SelfAttention.v.weight": "model.fp16-00001-of-00002.safetensors",
|
29 |
+
"encoder.block.10.layer.0.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
30 |
+
"encoder.block.10.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00001-of-00002.safetensors",
|
31 |
+
"encoder.block.10.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00001-of-00002.safetensors",
|
32 |
+
"encoder.block.10.layer.1.DenseReluDense.wo.weight": "model.fp16-00001-of-00002.safetensors",
|
33 |
+
"encoder.block.10.layer.1.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
34 |
+
"encoder.block.11.layer.0.SelfAttention.k.weight": "model.fp16-00001-of-00002.safetensors",
|
35 |
+
"encoder.block.11.layer.0.SelfAttention.o.weight": "model.fp16-00001-of-00002.safetensors",
|
36 |
+
"encoder.block.11.layer.0.SelfAttention.q.weight": "model.fp16-00001-of-00002.safetensors",
|
37 |
+
"encoder.block.11.layer.0.SelfAttention.v.weight": "model.fp16-00001-of-00002.safetensors",
|
38 |
+
"encoder.block.11.layer.0.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
39 |
+
"encoder.block.11.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00001-of-00002.safetensors",
|
40 |
+
"encoder.block.11.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00001-of-00002.safetensors",
|
41 |
+
"encoder.block.11.layer.1.DenseReluDense.wo.weight": "model.fp16-00001-of-00002.safetensors",
|
42 |
+
"encoder.block.11.layer.1.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
43 |
+
"encoder.block.12.layer.0.SelfAttention.k.weight": "model.fp16-00001-of-00002.safetensors",
|
44 |
+
"encoder.block.12.layer.0.SelfAttention.o.weight": "model.fp16-00002-of-00002.safetensors",
|
45 |
+
"encoder.block.12.layer.0.SelfAttention.q.weight": "model.fp16-00001-of-00002.safetensors",
|
46 |
+
"encoder.block.12.layer.0.SelfAttention.v.weight": "model.fp16-00001-of-00002.safetensors",
|
47 |
+
"encoder.block.12.layer.0.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
48 |
+
"encoder.block.12.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00002-of-00002.safetensors",
|
49 |
+
"encoder.block.12.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00002-of-00002.safetensors",
|
50 |
+
"encoder.block.12.layer.1.DenseReluDense.wo.weight": "model.fp16-00002-of-00002.safetensors",
|
51 |
+
"encoder.block.12.layer.1.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
52 |
+
"encoder.block.13.layer.0.SelfAttention.k.weight": "model.fp16-00002-of-00002.safetensors",
|
53 |
+
"encoder.block.13.layer.0.SelfAttention.o.weight": "model.fp16-00002-of-00002.safetensors",
|
54 |
+
"encoder.block.13.layer.0.SelfAttention.q.weight": "model.fp16-00002-of-00002.safetensors",
|
55 |
+
"encoder.block.13.layer.0.SelfAttention.v.weight": "model.fp16-00002-of-00002.safetensors",
|
56 |
+
"encoder.block.13.layer.0.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
57 |
+
"encoder.block.13.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00002-of-00002.safetensors",
|
58 |
+
"encoder.block.13.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00002-of-00002.safetensors",
|
59 |
+
"encoder.block.13.layer.1.DenseReluDense.wo.weight": "model.fp16-00002-of-00002.safetensors",
|
60 |
+
"encoder.block.13.layer.1.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
61 |
+
"encoder.block.14.layer.0.SelfAttention.k.weight": "model.fp16-00002-of-00002.safetensors",
|
62 |
+
"encoder.block.14.layer.0.SelfAttention.o.weight": "model.fp16-00002-of-00002.safetensors",
|
63 |
+
"encoder.block.14.layer.0.SelfAttention.q.weight": "model.fp16-00002-of-00002.safetensors",
|
64 |
+
"encoder.block.14.layer.0.SelfAttention.v.weight": "model.fp16-00002-of-00002.safetensors",
|
65 |
+
"encoder.block.14.layer.0.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
66 |
+
"encoder.block.14.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00002-of-00002.safetensors",
|
67 |
+
"encoder.block.14.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00002-of-00002.safetensors",
|
68 |
+
"encoder.block.14.layer.1.DenseReluDense.wo.weight": "model.fp16-00002-of-00002.safetensors",
|
69 |
+
"encoder.block.14.layer.1.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
70 |
+
"encoder.block.15.layer.0.SelfAttention.k.weight": "model.fp16-00002-of-00002.safetensors",
|
71 |
+
"encoder.block.15.layer.0.SelfAttention.o.weight": "model.fp16-00002-of-00002.safetensors",
|
72 |
+
"encoder.block.15.layer.0.SelfAttention.q.weight": "model.fp16-00002-of-00002.safetensors",
|
73 |
+
"encoder.block.15.layer.0.SelfAttention.v.weight": "model.fp16-00002-of-00002.safetensors",
|
74 |
+
"encoder.block.15.layer.0.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
75 |
+
"encoder.block.15.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00002-of-00002.safetensors",
|
76 |
+
"encoder.block.15.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00002-of-00002.safetensors",
|
77 |
+
"encoder.block.15.layer.1.DenseReluDense.wo.weight": "model.fp16-00002-of-00002.safetensors",
|
78 |
+
"encoder.block.15.layer.1.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
79 |
+
"encoder.block.16.layer.0.SelfAttention.k.weight": "model.fp16-00002-of-00002.safetensors",
|
80 |
+
"encoder.block.16.layer.0.SelfAttention.o.weight": "model.fp16-00002-of-00002.safetensors",
|
81 |
+
"encoder.block.16.layer.0.SelfAttention.q.weight": "model.fp16-00002-of-00002.safetensors",
|
82 |
+
"encoder.block.16.layer.0.SelfAttention.v.weight": "model.fp16-00002-of-00002.safetensors",
|
83 |
+
"encoder.block.16.layer.0.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
84 |
+
"encoder.block.16.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00002-of-00002.safetensors",
|
85 |
+
"encoder.block.16.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00002-of-00002.safetensors",
|
86 |
+
"encoder.block.16.layer.1.DenseReluDense.wo.weight": "model.fp16-00002-of-00002.safetensors",
|
87 |
+
"encoder.block.16.layer.1.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
88 |
+
"encoder.block.17.layer.0.SelfAttention.k.weight": "model.fp16-00002-of-00002.safetensors",
|
89 |
+
"encoder.block.17.layer.0.SelfAttention.o.weight": "model.fp16-00002-of-00002.safetensors",
|
90 |
+
"encoder.block.17.layer.0.SelfAttention.q.weight": "model.fp16-00002-of-00002.safetensors",
|
91 |
+
"encoder.block.17.layer.0.SelfAttention.v.weight": "model.fp16-00002-of-00002.safetensors",
|
92 |
+
"encoder.block.17.layer.0.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
93 |
+
"encoder.block.17.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00002-of-00002.safetensors",
|
94 |
+
"encoder.block.17.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00002-of-00002.safetensors",
|
95 |
+
"encoder.block.17.layer.1.DenseReluDense.wo.weight": "model.fp16-00002-of-00002.safetensors",
|
96 |
+
"encoder.block.17.layer.1.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
97 |
+
"encoder.block.18.layer.0.SelfAttention.k.weight": "model.fp16-00002-of-00002.safetensors",
|
98 |
+
"encoder.block.18.layer.0.SelfAttention.o.weight": "model.fp16-00002-of-00002.safetensors",
|
99 |
+
"encoder.block.18.layer.0.SelfAttention.q.weight": "model.fp16-00002-of-00002.safetensors",
|
100 |
+
"encoder.block.18.layer.0.SelfAttention.v.weight": "model.fp16-00002-of-00002.safetensors",
|
101 |
+
"encoder.block.18.layer.0.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
102 |
+
"encoder.block.18.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00002-of-00002.safetensors",
|
103 |
+
"encoder.block.18.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00002-of-00002.safetensors",
|
104 |
+
"encoder.block.18.layer.1.DenseReluDense.wo.weight": "model.fp16-00002-of-00002.safetensors",
|
105 |
+
"encoder.block.18.layer.1.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
106 |
+
"encoder.block.19.layer.0.SelfAttention.k.weight": "model.fp16-00002-of-00002.safetensors",
|
107 |
+
"encoder.block.19.layer.0.SelfAttention.o.weight": "model.fp16-00002-of-00002.safetensors",
|
108 |
+
"encoder.block.19.layer.0.SelfAttention.q.weight": "model.fp16-00002-of-00002.safetensors",
|
109 |
+
"encoder.block.19.layer.0.SelfAttention.v.weight": "model.fp16-00002-of-00002.safetensors",
|
110 |
+
"encoder.block.19.layer.0.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
111 |
+
"encoder.block.19.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00002-of-00002.safetensors",
|
112 |
+
"encoder.block.19.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00002-of-00002.safetensors",
|
113 |
+
"encoder.block.19.layer.1.DenseReluDense.wo.weight": "model.fp16-00002-of-00002.safetensors",
|
114 |
+
"encoder.block.19.layer.1.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
115 |
+
"encoder.block.2.layer.0.SelfAttention.k.weight": "model.fp16-00001-of-00002.safetensors",
|
116 |
+
"encoder.block.2.layer.0.SelfAttention.o.weight": "model.fp16-00001-of-00002.safetensors",
|
117 |
+
"encoder.block.2.layer.0.SelfAttention.q.weight": "model.fp16-00001-of-00002.safetensors",
|
118 |
+
"encoder.block.2.layer.0.SelfAttention.v.weight": "model.fp16-00001-of-00002.safetensors",
|
119 |
+
"encoder.block.2.layer.0.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
120 |
+
"encoder.block.2.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00001-of-00002.safetensors",
|
121 |
+
"encoder.block.2.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00001-of-00002.safetensors",
|
122 |
+
"encoder.block.2.layer.1.DenseReluDense.wo.weight": "model.fp16-00001-of-00002.safetensors",
|
123 |
+
"encoder.block.2.layer.1.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
124 |
+
"encoder.block.20.layer.0.SelfAttention.k.weight": "model.fp16-00002-of-00002.safetensors",
|
125 |
+
"encoder.block.20.layer.0.SelfAttention.o.weight": "model.fp16-00002-of-00002.safetensors",
|
126 |
+
"encoder.block.20.layer.0.SelfAttention.q.weight": "model.fp16-00002-of-00002.safetensors",
|
127 |
+
"encoder.block.20.layer.0.SelfAttention.v.weight": "model.fp16-00002-of-00002.safetensors",
|
128 |
+
"encoder.block.20.layer.0.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
129 |
+
"encoder.block.20.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00002-of-00002.safetensors",
|
130 |
+
"encoder.block.20.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00002-of-00002.safetensors",
|
131 |
+
"encoder.block.20.layer.1.DenseReluDense.wo.weight": "model.fp16-00002-of-00002.safetensors",
|
132 |
+
"encoder.block.20.layer.1.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
133 |
+
"encoder.block.21.layer.0.SelfAttention.k.weight": "model.fp16-00002-of-00002.safetensors",
|
134 |
+
"encoder.block.21.layer.0.SelfAttention.o.weight": "model.fp16-00002-of-00002.safetensors",
|
135 |
+
"encoder.block.21.layer.0.SelfAttention.q.weight": "model.fp16-00002-of-00002.safetensors",
|
136 |
+
"encoder.block.21.layer.0.SelfAttention.v.weight": "model.fp16-00002-of-00002.safetensors",
|
137 |
+
"encoder.block.21.layer.0.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
138 |
+
"encoder.block.21.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00002-of-00002.safetensors",
|
139 |
+
"encoder.block.21.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00002-of-00002.safetensors",
|
140 |
+
"encoder.block.21.layer.1.DenseReluDense.wo.weight": "model.fp16-00002-of-00002.safetensors",
|
141 |
+
"encoder.block.21.layer.1.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
142 |
+
"encoder.block.22.layer.0.SelfAttention.k.weight": "model.fp16-00002-of-00002.safetensors",
|
143 |
+
"encoder.block.22.layer.0.SelfAttention.o.weight": "model.fp16-00002-of-00002.safetensors",
|
144 |
+
"encoder.block.22.layer.0.SelfAttention.q.weight": "model.fp16-00002-of-00002.safetensors",
|
145 |
+
"encoder.block.22.layer.0.SelfAttention.v.weight": "model.fp16-00002-of-00002.safetensors",
|
146 |
+
"encoder.block.22.layer.0.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
147 |
+
"encoder.block.22.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00002-of-00002.safetensors",
|
148 |
+
"encoder.block.22.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00002-of-00002.safetensors",
|
149 |
+
"encoder.block.22.layer.1.DenseReluDense.wo.weight": "model.fp16-00002-of-00002.safetensors",
|
150 |
+
"encoder.block.22.layer.1.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
151 |
+
"encoder.block.23.layer.0.SelfAttention.k.weight": "model.fp16-00002-of-00002.safetensors",
|
152 |
+
"encoder.block.23.layer.0.SelfAttention.o.weight": "model.fp16-00002-of-00002.safetensors",
|
153 |
+
"encoder.block.23.layer.0.SelfAttention.q.weight": "model.fp16-00002-of-00002.safetensors",
|
154 |
+
"encoder.block.23.layer.0.SelfAttention.v.weight": "model.fp16-00002-of-00002.safetensors",
|
155 |
+
"encoder.block.23.layer.0.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
156 |
+
"encoder.block.23.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00002-of-00002.safetensors",
|
157 |
+
"encoder.block.23.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00002-of-00002.safetensors",
|
158 |
+
"encoder.block.23.layer.1.DenseReluDense.wo.weight": "model.fp16-00002-of-00002.safetensors",
|
159 |
+
"encoder.block.23.layer.1.layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
160 |
+
"encoder.block.3.layer.0.SelfAttention.k.weight": "model.fp16-00001-of-00002.safetensors",
|
161 |
+
"encoder.block.3.layer.0.SelfAttention.o.weight": "model.fp16-00001-of-00002.safetensors",
|
162 |
+
"encoder.block.3.layer.0.SelfAttention.q.weight": "model.fp16-00001-of-00002.safetensors",
|
163 |
+
"encoder.block.3.layer.0.SelfAttention.v.weight": "model.fp16-00001-of-00002.safetensors",
|
164 |
+
"encoder.block.3.layer.0.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
165 |
+
"encoder.block.3.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00001-of-00002.safetensors",
|
166 |
+
"encoder.block.3.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00001-of-00002.safetensors",
|
167 |
+
"encoder.block.3.layer.1.DenseReluDense.wo.weight": "model.fp16-00001-of-00002.safetensors",
|
168 |
+
"encoder.block.3.layer.1.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
169 |
+
"encoder.block.4.layer.0.SelfAttention.k.weight": "model.fp16-00001-of-00002.safetensors",
|
170 |
+
"encoder.block.4.layer.0.SelfAttention.o.weight": "model.fp16-00001-of-00002.safetensors",
|
171 |
+
"encoder.block.4.layer.0.SelfAttention.q.weight": "model.fp16-00001-of-00002.safetensors",
|
172 |
+
"encoder.block.4.layer.0.SelfAttention.v.weight": "model.fp16-00001-of-00002.safetensors",
|
173 |
+
"encoder.block.4.layer.0.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
174 |
+
"encoder.block.4.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00001-of-00002.safetensors",
|
175 |
+
"encoder.block.4.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00001-of-00002.safetensors",
|
176 |
+
"encoder.block.4.layer.1.DenseReluDense.wo.weight": "model.fp16-00001-of-00002.safetensors",
|
177 |
+
"encoder.block.4.layer.1.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
178 |
+
"encoder.block.5.layer.0.SelfAttention.k.weight": "model.fp16-00001-of-00002.safetensors",
|
179 |
+
"encoder.block.5.layer.0.SelfAttention.o.weight": "model.fp16-00001-of-00002.safetensors",
|
180 |
+
"encoder.block.5.layer.0.SelfAttention.q.weight": "model.fp16-00001-of-00002.safetensors",
|
181 |
+
"encoder.block.5.layer.0.SelfAttention.v.weight": "model.fp16-00001-of-00002.safetensors",
|
182 |
+
"encoder.block.5.layer.0.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
183 |
+
"encoder.block.5.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00001-of-00002.safetensors",
|
184 |
+
"encoder.block.5.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00001-of-00002.safetensors",
|
185 |
+
"encoder.block.5.layer.1.DenseReluDense.wo.weight": "model.fp16-00001-of-00002.safetensors",
|
186 |
+
"encoder.block.5.layer.1.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
187 |
+
"encoder.block.6.layer.0.SelfAttention.k.weight": "model.fp16-00001-of-00002.safetensors",
|
188 |
+
"encoder.block.6.layer.0.SelfAttention.o.weight": "model.fp16-00001-of-00002.safetensors",
|
189 |
+
"encoder.block.6.layer.0.SelfAttention.q.weight": "model.fp16-00001-of-00002.safetensors",
|
190 |
+
"encoder.block.6.layer.0.SelfAttention.v.weight": "model.fp16-00001-of-00002.safetensors",
|
191 |
+
"encoder.block.6.layer.0.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
192 |
+
"encoder.block.6.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00001-of-00002.safetensors",
|
193 |
+
"encoder.block.6.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00001-of-00002.safetensors",
|
194 |
+
"encoder.block.6.layer.1.DenseReluDense.wo.weight": "model.fp16-00001-of-00002.safetensors",
|
195 |
+
"encoder.block.6.layer.1.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
196 |
+
"encoder.block.7.layer.0.SelfAttention.k.weight": "model.fp16-00001-of-00002.safetensors",
|
197 |
+
"encoder.block.7.layer.0.SelfAttention.o.weight": "model.fp16-00001-of-00002.safetensors",
|
198 |
+
"encoder.block.7.layer.0.SelfAttention.q.weight": "model.fp16-00001-of-00002.safetensors",
|
199 |
+
"encoder.block.7.layer.0.SelfAttention.v.weight": "model.fp16-00001-of-00002.safetensors",
|
200 |
+
"encoder.block.7.layer.0.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
201 |
+
"encoder.block.7.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00001-of-00002.safetensors",
|
202 |
+
"encoder.block.7.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00001-of-00002.safetensors",
|
203 |
+
"encoder.block.7.layer.1.DenseReluDense.wo.weight": "model.fp16-00001-of-00002.safetensors",
|
204 |
+
"encoder.block.7.layer.1.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
205 |
+
"encoder.block.8.layer.0.SelfAttention.k.weight": "model.fp16-00001-of-00002.safetensors",
|
206 |
+
"encoder.block.8.layer.0.SelfAttention.o.weight": "model.fp16-00001-of-00002.safetensors",
|
207 |
+
"encoder.block.8.layer.0.SelfAttention.q.weight": "model.fp16-00001-of-00002.safetensors",
|
208 |
+
"encoder.block.8.layer.0.SelfAttention.v.weight": "model.fp16-00001-of-00002.safetensors",
|
209 |
+
"encoder.block.8.layer.0.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
210 |
+
"encoder.block.8.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00001-of-00002.safetensors",
|
211 |
+
"encoder.block.8.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00001-of-00002.safetensors",
|
212 |
+
"encoder.block.8.layer.1.DenseReluDense.wo.weight": "model.fp16-00001-of-00002.safetensors",
|
213 |
+
"encoder.block.8.layer.1.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
214 |
+
"encoder.block.9.layer.0.SelfAttention.k.weight": "model.fp16-00001-of-00002.safetensors",
|
215 |
+
"encoder.block.9.layer.0.SelfAttention.o.weight": "model.fp16-00001-of-00002.safetensors",
|
216 |
+
"encoder.block.9.layer.0.SelfAttention.q.weight": "model.fp16-00001-of-00002.safetensors",
|
217 |
+
"encoder.block.9.layer.0.SelfAttention.v.weight": "model.fp16-00001-of-00002.safetensors",
|
218 |
+
"encoder.block.9.layer.0.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
219 |
+
"encoder.block.9.layer.1.DenseReluDense.wi_0.weight": "model.fp16-00001-of-00002.safetensors",
|
220 |
+
"encoder.block.9.layer.1.DenseReluDense.wi_1.weight": "model.fp16-00001-of-00002.safetensors",
|
221 |
+
"encoder.block.9.layer.1.DenseReluDense.wo.weight": "model.fp16-00001-of-00002.safetensors",
|
222 |
+
"encoder.block.9.layer.1.layer_norm.weight": "model.fp16-00001-of-00002.safetensors",
|
223 |
+
"encoder.final_layer_norm.weight": "model.fp16-00002-of-00002.safetensors",
|
224 |
+
"shared.weight": "model.fp16-00001-of-00002.safetensors"
|
225 |
+
}
|
226 |
+
}
|
text_encoder_3/model.safetensors.index.json
ADDED
@@ -0,0 +1,226 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"metadata": {
|
3 |
+
"total_size": 9524621312
|
4 |
+
},
|
5 |
+
"weight_map": {
|
6 |
+
"encoder.block.0.layer.0.SelfAttention.k.weight": "model-00001-of-00002.safetensors",
|
7 |
+
"encoder.block.0.layer.0.SelfAttention.o.weight": "model-00001-of-00002.safetensors",
|
8 |
+
"encoder.block.0.layer.0.SelfAttention.q.weight": "model-00001-of-00002.safetensors",
|
9 |
+
"encoder.block.0.layer.0.SelfAttention.relative_attention_bias.weight": "model-00001-of-00002.safetensors",
|
10 |
+
"encoder.block.0.layer.0.SelfAttention.v.weight": "model-00001-of-00002.safetensors",
|
11 |
+
"encoder.block.0.layer.0.layer_norm.weight": "model-00001-of-00002.safetensors",
|
12 |
+
"encoder.block.0.layer.1.DenseReluDense.wi_0.weight": "model-00001-of-00002.safetensors",
|
13 |
+
"encoder.block.0.layer.1.DenseReluDense.wi_1.weight": "model-00001-of-00002.safetensors",
|
14 |
+
"encoder.block.0.layer.1.DenseReluDense.wo.weight": "model-00001-of-00002.safetensors",
|
15 |
+
"encoder.block.0.layer.1.layer_norm.weight": "model-00001-of-00002.safetensors",
|
16 |
+
"encoder.block.1.layer.0.SelfAttention.k.weight": "model-00001-of-00002.safetensors",
|
17 |
+
"encoder.block.1.layer.0.SelfAttention.o.weight": "model-00001-of-00002.safetensors",
|
18 |
+
"encoder.block.1.layer.0.SelfAttention.q.weight": "model-00001-of-00002.safetensors",
|
19 |
+
"encoder.block.1.layer.0.SelfAttention.v.weight": "model-00001-of-00002.safetensors",
|
20 |
+
"encoder.block.1.layer.0.layer_norm.weight": "model-00001-of-00002.safetensors",
|
21 |
+
"encoder.block.1.layer.1.DenseReluDense.wi_0.weight": "model-00001-of-00002.safetensors",
|
22 |
+
"encoder.block.1.layer.1.DenseReluDense.wi_1.weight": "model-00001-of-00002.safetensors",
|
23 |
+
"encoder.block.1.layer.1.DenseReluDense.wo.weight": "model-00001-of-00002.safetensors",
|
24 |
+
"encoder.block.1.layer.1.layer_norm.weight": "model-00001-of-00002.safetensors",
|
25 |
+
"encoder.block.10.layer.0.SelfAttention.k.weight": "model-00001-of-00002.safetensors",
|
26 |
+
"encoder.block.10.layer.0.SelfAttention.o.weight": "model-00001-of-00002.safetensors",
|
27 |
+
"encoder.block.10.layer.0.SelfAttention.q.weight": "model-00001-of-00002.safetensors",
|
28 |
+
"encoder.block.10.layer.0.SelfAttention.v.weight": "model-00001-of-00002.safetensors",
|
29 |
+
"encoder.block.10.layer.0.layer_norm.weight": "model-00001-of-00002.safetensors",
|
30 |
+
"encoder.block.10.layer.1.DenseReluDense.wi_0.weight": "model-00001-of-00002.safetensors",
|
31 |
+
"encoder.block.10.layer.1.DenseReluDense.wi_1.weight": "model-00001-of-00002.safetensors",
|
32 |
+
"encoder.block.10.layer.1.DenseReluDense.wo.weight": "model-00001-of-00002.safetensors",
|
33 |
+
"encoder.block.10.layer.1.layer_norm.weight": "model-00001-of-00002.safetensors",
|
34 |
+
"encoder.block.11.layer.0.SelfAttention.k.weight": "model-00001-of-00002.safetensors",
|
35 |
+
"encoder.block.11.layer.0.SelfAttention.o.weight": "model-00001-of-00002.safetensors",
|
36 |
+
"encoder.block.11.layer.0.SelfAttention.q.weight": "model-00001-of-00002.safetensors",
|
37 |
+
"encoder.block.11.layer.0.SelfAttention.v.weight": "model-00001-of-00002.safetensors",
|
38 |
+
"encoder.block.11.layer.0.layer_norm.weight": "model-00001-of-00002.safetensors",
|
39 |
+
"encoder.block.11.layer.1.DenseReluDense.wi_0.weight": "model-00001-of-00002.safetensors",
|
40 |
+
"encoder.block.11.layer.1.DenseReluDense.wi_1.weight": "model-00001-of-00002.safetensors",
|
41 |
+
"encoder.block.11.layer.1.DenseReluDense.wo.weight": "model-00001-of-00002.safetensors",
|
42 |
+
"encoder.block.11.layer.1.layer_norm.weight": "model-00001-of-00002.safetensors",
|
43 |
+
"encoder.block.12.layer.0.SelfAttention.k.weight": "model-00001-of-00002.safetensors",
|
44 |
+
"encoder.block.12.layer.0.SelfAttention.o.weight": "model-00002-of-00002.safetensors",
|
45 |
+
"encoder.block.12.layer.0.SelfAttention.q.weight": "model-00001-of-00002.safetensors",
|
46 |
+
"encoder.block.12.layer.0.SelfAttention.v.weight": "model-00001-of-00002.safetensors",
|
47 |
+
"encoder.block.12.layer.0.layer_norm.weight": "model-00002-of-00002.safetensors",
|
48 |
+
"encoder.block.12.layer.1.DenseReluDense.wi_0.weight": "model-00002-of-00002.safetensors",
|
49 |
+
"encoder.block.12.layer.1.DenseReluDense.wi_1.weight": "model-00002-of-00002.safetensors",
|
50 |
+
"encoder.block.12.layer.1.DenseReluDense.wo.weight": "model-00002-of-00002.safetensors",
|
51 |
+
"encoder.block.12.layer.1.layer_norm.weight": "model-00002-of-00002.safetensors",
|
52 |
+
"encoder.block.13.layer.0.SelfAttention.k.weight": "model-00002-of-00002.safetensors",
|
53 |
+
"encoder.block.13.layer.0.SelfAttention.o.weight": "model-00002-of-00002.safetensors",
|
54 |
+
"encoder.block.13.layer.0.SelfAttention.q.weight": "model-00002-of-00002.safetensors",
|
55 |
+
"encoder.block.13.layer.0.SelfAttention.v.weight": "model-00002-of-00002.safetensors",
|
56 |
+
"encoder.block.13.layer.0.layer_norm.weight": "model-00002-of-00002.safetensors",
|
57 |
+
"encoder.block.13.layer.1.DenseReluDense.wi_0.weight": "model-00002-of-00002.safetensors",
|
58 |
+
"encoder.block.13.layer.1.DenseReluDense.wi_1.weight": "model-00002-of-00002.safetensors",
|
59 |
+
"encoder.block.13.layer.1.DenseReluDense.wo.weight": "model-00002-of-00002.safetensors",
|
60 |
+
"encoder.block.13.layer.1.layer_norm.weight": "model-00002-of-00002.safetensors",
|
61 |
+
"encoder.block.14.layer.0.SelfAttention.k.weight": "model-00002-of-00002.safetensors",
|
62 |
+
"encoder.block.14.layer.0.SelfAttention.o.weight": "model-00002-of-00002.safetensors",
|
63 |
+
"encoder.block.14.layer.0.SelfAttention.q.weight": "model-00002-of-00002.safetensors",
|
64 |
+
"encoder.block.14.layer.0.SelfAttention.v.weight": "model-00002-of-00002.safetensors",
|
65 |
+
"encoder.block.14.layer.0.layer_norm.weight": "model-00002-of-00002.safetensors",
|
66 |
+
"encoder.block.14.layer.1.DenseReluDense.wi_0.weight": "model-00002-of-00002.safetensors",
|
67 |
+
"encoder.block.14.layer.1.DenseReluDense.wi_1.weight": "model-00002-of-00002.safetensors",
|
68 |
+
"encoder.block.14.layer.1.DenseReluDense.wo.weight": "model-00002-of-00002.safetensors",
|
69 |
+
"encoder.block.14.layer.1.layer_norm.weight": "model-00002-of-00002.safetensors",
|
70 |
+
"encoder.block.15.layer.0.SelfAttention.k.weight": "model-00002-of-00002.safetensors",
|
71 |
+
"encoder.block.15.layer.0.SelfAttention.o.weight": "model-00002-of-00002.safetensors",
|
72 |
+
"encoder.block.15.layer.0.SelfAttention.q.weight": "model-00002-of-00002.safetensors",
|
73 |
+
"encoder.block.15.layer.0.SelfAttention.v.weight": "model-00002-of-00002.safetensors",
|
74 |
+
"encoder.block.15.layer.0.layer_norm.weight": "model-00002-of-00002.safetensors",
|
75 |
+
"encoder.block.15.layer.1.DenseReluDense.wi_0.weight": "model-00002-of-00002.safetensors",
|
76 |
+
"encoder.block.15.layer.1.DenseReluDense.wi_1.weight": "model-00002-of-00002.safetensors",
|
77 |
+
"encoder.block.15.layer.1.DenseReluDense.wo.weight": "model-00002-of-00002.safetensors",
|
78 |
+
"encoder.block.15.layer.1.layer_norm.weight": "model-00002-of-00002.safetensors",
|
79 |
+
"encoder.block.16.layer.0.SelfAttention.k.weight": "model-00002-of-00002.safetensors",
|
80 |
+
"encoder.block.16.layer.0.SelfAttention.o.weight": "model-00002-of-00002.safetensors",
|
81 |
+
"encoder.block.16.layer.0.SelfAttention.q.weight": "model-00002-of-00002.safetensors",
|
82 |
+
"encoder.block.16.layer.0.SelfAttention.v.weight": "model-00002-of-00002.safetensors",
|
83 |
+
"encoder.block.16.layer.0.layer_norm.weight": "model-00002-of-00002.safetensors",
|
84 |
+
"encoder.block.16.layer.1.DenseReluDense.wi_0.weight": "model-00002-of-00002.safetensors",
|
85 |
+
"encoder.block.16.layer.1.DenseReluDense.wi_1.weight": "model-00002-of-00002.safetensors",
|
86 |
+
"encoder.block.16.layer.1.DenseReluDense.wo.weight": "model-00002-of-00002.safetensors",
|
87 |
+
"encoder.block.16.layer.1.layer_norm.weight": "model-00002-of-00002.safetensors",
|
88 |
+
"encoder.block.17.layer.0.SelfAttention.k.weight": "model-00002-of-00002.safetensors",
|
89 |
+
"encoder.block.17.layer.0.SelfAttention.o.weight": "model-00002-of-00002.safetensors",
|
90 |
+
"encoder.block.17.layer.0.SelfAttention.q.weight": "model-00002-of-00002.safetensors",
|
91 |
+
"encoder.block.17.layer.0.SelfAttention.v.weight": "model-00002-of-00002.safetensors",
|
92 |
+
"encoder.block.17.layer.0.layer_norm.weight": "model-00002-of-00002.safetensors",
|
93 |
+
"encoder.block.17.layer.1.DenseReluDense.wi_0.weight": "model-00002-of-00002.safetensors",
|
94 |
+
"encoder.block.17.layer.1.DenseReluDense.wi_1.weight": "model-00002-of-00002.safetensors",
|
95 |
+
"encoder.block.17.layer.1.DenseReluDense.wo.weight": "model-00002-of-00002.safetensors",
|
96 |
+
"encoder.block.17.layer.1.layer_norm.weight": "model-00002-of-00002.safetensors",
|
97 |
+
"encoder.block.18.layer.0.SelfAttention.k.weight": "model-00002-of-00002.safetensors",
|
98 |
+
"encoder.block.18.layer.0.SelfAttention.o.weight": "model-00002-of-00002.safetensors",
|
99 |
+
"encoder.block.18.layer.0.SelfAttention.q.weight": "model-00002-of-00002.safetensors",
|
100 |
+
"encoder.block.18.layer.0.SelfAttention.v.weight": "model-00002-of-00002.safetensors",
|
101 |
+
"encoder.block.18.layer.0.layer_norm.weight": "model-00002-of-00002.safetensors",
|
102 |
+
"encoder.block.18.layer.1.DenseReluDense.wi_0.weight": "model-00002-of-00002.safetensors",
|
103 |
+
"encoder.block.18.layer.1.DenseReluDense.wi_1.weight": "model-00002-of-00002.safetensors",
|
104 |
+
"encoder.block.18.layer.1.DenseReluDense.wo.weight": "model-00002-of-00002.safetensors",
|
105 |
+
"encoder.block.18.layer.1.layer_norm.weight": "model-00002-of-00002.safetensors",
|
106 |
+
"encoder.block.19.layer.0.SelfAttention.k.weight": "model-00002-of-00002.safetensors",
|
107 |
+
"encoder.block.19.layer.0.SelfAttention.o.weight": "model-00002-of-00002.safetensors",
|
108 |
+
"encoder.block.19.layer.0.SelfAttention.q.weight": "model-00002-of-00002.safetensors",
|
109 |
+
"encoder.block.19.layer.0.SelfAttention.v.weight": "model-00002-of-00002.safetensors",
|
110 |
+
"encoder.block.19.layer.0.layer_norm.weight": "model-00002-of-00002.safetensors",
|
111 |
+
"encoder.block.19.layer.1.DenseReluDense.wi_0.weight": "model-00002-of-00002.safetensors",
|
112 |
+
"encoder.block.19.layer.1.DenseReluDense.wi_1.weight": "model-00002-of-00002.safetensors",
|
113 |
+
"encoder.block.19.layer.1.DenseReluDense.wo.weight": "model-00002-of-00002.safetensors",
|
114 |
+
"encoder.block.19.layer.1.layer_norm.weight": "model-00002-of-00002.safetensors",
|
115 |
+
"encoder.block.2.layer.0.SelfAttention.k.weight": "model-00001-of-00002.safetensors",
|
116 |
+
"encoder.block.2.layer.0.SelfAttention.o.weight": "model-00001-of-00002.safetensors",
|
117 |
+
"encoder.block.2.layer.0.SelfAttention.q.weight": "model-00001-of-00002.safetensors",
|
118 |
+
"encoder.block.2.layer.0.SelfAttention.v.weight": "model-00001-of-00002.safetensors",
|
119 |
+
"encoder.block.2.layer.0.layer_norm.weight": "model-00001-of-00002.safetensors",
|
120 |
+
"encoder.block.2.layer.1.DenseReluDense.wi_0.weight": "model-00001-of-00002.safetensors",
|
121 |
+
"encoder.block.2.layer.1.DenseReluDense.wi_1.weight": "model-00001-of-00002.safetensors",
|
122 |
+
"encoder.block.2.layer.1.DenseReluDense.wo.weight": "model-00001-of-00002.safetensors",
|
123 |
+
"encoder.block.2.layer.1.layer_norm.weight": "model-00001-of-00002.safetensors",
|
124 |
+
"encoder.block.20.layer.0.SelfAttention.k.weight": "model-00002-of-00002.safetensors",
|
125 |
+
"encoder.block.20.layer.0.SelfAttention.o.weight": "model-00002-of-00002.safetensors",
|
126 |
+
"encoder.block.20.layer.0.SelfAttention.q.weight": "model-00002-of-00002.safetensors",
|
127 |
+
"encoder.block.20.layer.0.SelfAttention.v.weight": "model-00002-of-00002.safetensors",
|
128 |
+
"encoder.block.20.layer.0.layer_norm.weight": "model-00002-of-00002.safetensors",
|
129 |
+
"encoder.block.20.layer.1.DenseReluDense.wi_0.weight": "model-00002-of-00002.safetensors",
|
130 |
+
"encoder.block.20.layer.1.DenseReluDense.wi_1.weight": "model-00002-of-00002.safetensors",
|
131 |
+
"encoder.block.20.layer.1.DenseReluDense.wo.weight": "model-00002-of-00002.safetensors",
|
132 |
+
"encoder.block.20.layer.1.layer_norm.weight": "model-00002-of-00002.safetensors",
|
133 |
+
"encoder.block.21.layer.0.SelfAttention.k.weight": "model-00002-of-00002.safetensors",
|
134 |
+
"encoder.block.21.layer.0.SelfAttention.o.weight": "model-00002-of-00002.safetensors",
|
135 |
+
"encoder.block.21.layer.0.SelfAttention.q.weight": "model-00002-of-00002.safetensors",
|
136 |
+
"encoder.block.21.layer.0.SelfAttention.v.weight": "model-00002-of-00002.safetensors",
|
137 |
+
"encoder.block.21.layer.0.layer_norm.weight": "model-00002-of-00002.safetensors",
|
138 |
+
"encoder.block.21.layer.1.DenseReluDense.wi_0.weight": "model-00002-of-00002.safetensors",
|
139 |
+
"encoder.block.21.layer.1.DenseReluDense.wi_1.weight": "model-00002-of-00002.safetensors",
|
140 |
+
"encoder.block.21.layer.1.DenseReluDense.wo.weight": "model-00002-of-00002.safetensors",
|
141 |
+
"encoder.block.21.layer.1.layer_norm.weight": "model-00002-of-00002.safetensors",
|
142 |
+
"encoder.block.22.layer.0.SelfAttention.k.weight": "model-00002-of-00002.safetensors",
|
143 |
+
"encoder.block.22.layer.0.SelfAttention.o.weight": "model-00002-of-00002.safetensors",
|
144 |
+
"encoder.block.22.layer.0.SelfAttention.q.weight": "model-00002-of-00002.safetensors",
|
145 |
+
"encoder.block.22.layer.0.SelfAttention.v.weight": "model-00002-of-00002.safetensors",
|
146 |
+
"encoder.block.22.layer.0.layer_norm.weight": "model-00002-of-00002.safetensors",
|
147 |
+
"encoder.block.22.layer.1.DenseReluDense.wi_0.weight": "model-00002-of-00002.safetensors",
|
148 |
+
"encoder.block.22.layer.1.DenseReluDense.wi_1.weight": "model-00002-of-00002.safetensors",
|
149 |
+
"encoder.block.22.layer.1.DenseReluDense.wo.weight": "model-00002-of-00002.safetensors",
|
150 |
+
"encoder.block.22.layer.1.layer_norm.weight": "model-00002-of-00002.safetensors",
|
151 |
+
"encoder.block.23.layer.0.SelfAttention.k.weight": "model-00002-of-00002.safetensors",
|
152 |
+
"encoder.block.23.layer.0.SelfAttention.o.weight": "model-00002-of-00002.safetensors",
|
153 |
+
"encoder.block.23.layer.0.SelfAttention.q.weight": "model-00002-of-00002.safetensors",
|
154 |
+
"encoder.block.23.layer.0.SelfAttention.v.weight": "model-00002-of-00002.safetensors",
|
155 |
+
"encoder.block.23.layer.0.layer_norm.weight": "model-00002-of-00002.safetensors",
|
156 |
+
"encoder.block.23.layer.1.DenseReluDense.wi_0.weight": "model-00002-of-00002.safetensors",
|
157 |
+
"encoder.block.23.layer.1.DenseReluDense.wi_1.weight": "model-00002-of-00002.safetensors",
|
158 |
+
"encoder.block.23.layer.1.DenseReluDense.wo.weight": "model-00002-of-00002.safetensors",
|
159 |
+
"encoder.block.23.layer.1.layer_norm.weight": "model-00002-of-00002.safetensors",
|
160 |
+
"encoder.block.3.layer.0.SelfAttention.k.weight": "model-00001-of-00002.safetensors",
|
161 |
+
"encoder.block.3.layer.0.SelfAttention.o.weight": "model-00001-of-00002.safetensors",
|
162 |
+
"encoder.block.3.layer.0.SelfAttention.q.weight": "model-00001-of-00002.safetensors",
|
163 |
+
"encoder.block.3.layer.0.SelfAttention.v.weight": "model-00001-of-00002.safetensors",
|
164 |
+
"encoder.block.3.layer.0.layer_norm.weight": "model-00001-of-00002.safetensors",
|
165 |
+
"encoder.block.3.layer.1.DenseReluDense.wi_0.weight": "model-00001-of-00002.safetensors",
|
166 |
+
"encoder.block.3.layer.1.DenseReluDense.wi_1.weight": "model-00001-of-00002.safetensors",
|
167 |
+
"encoder.block.3.layer.1.DenseReluDense.wo.weight": "model-00001-of-00002.safetensors",
|
168 |
+
"encoder.block.3.layer.1.layer_norm.weight": "model-00001-of-00002.safetensors",
|
169 |
+
"encoder.block.4.layer.0.SelfAttention.k.weight": "model-00001-of-00002.safetensors",
|
170 |
+
"encoder.block.4.layer.0.SelfAttention.o.weight": "model-00001-of-00002.safetensors",
|
171 |
+
"encoder.block.4.layer.0.SelfAttention.q.weight": "model-00001-of-00002.safetensors",
|
172 |
+
"encoder.block.4.layer.0.SelfAttention.v.weight": "model-00001-of-00002.safetensors",
|
173 |
+
"encoder.block.4.layer.0.layer_norm.weight": "model-00001-of-00002.safetensors",
|
174 |
+
"encoder.block.4.layer.1.DenseReluDense.wi_0.weight": "model-00001-of-00002.safetensors",
|
175 |
+
"encoder.block.4.layer.1.DenseReluDense.wi_1.weight": "model-00001-of-00002.safetensors",
|
176 |
+
"encoder.block.4.layer.1.DenseReluDense.wo.weight": "model-00001-of-00002.safetensors",
|
177 |
+
"encoder.block.4.layer.1.layer_norm.weight": "model-00001-of-00002.safetensors",
|
178 |
+
"encoder.block.5.layer.0.SelfAttention.k.weight": "model-00001-of-00002.safetensors",
|
179 |
+
"encoder.block.5.layer.0.SelfAttention.o.weight": "model-00001-of-00002.safetensors",
|
180 |
+
"encoder.block.5.layer.0.SelfAttention.q.weight": "model-00001-of-00002.safetensors",
|
181 |
+
"encoder.block.5.layer.0.SelfAttention.v.weight": "model-00001-of-00002.safetensors",
|
182 |
+
"encoder.block.5.layer.0.layer_norm.weight": "model-00001-of-00002.safetensors",
|
183 |
+
"encoder.block.5.layer.1.DenseReluDense.wi_0.weight": "model-00001-of-00002.safetensors",
|
184 |
+
"encoder.block.5.layer.1.DenseReluDense.wi_1.weight": "model-00001-of-00002.safetensors",
|
185 |
+
"encoder.block.5.layer.1.DenseReluDense.wo.weight": "model-00001-of-00002.safetensors",
|
186 |
+
"encoder.block.5.layer.1.layer_norm.weight": "model-00001-of-00002.safetensors",
|
187 |
+
"encoder.block.6.layer.0.SelfAttention.k.weight": "model-00001-of-00002.safetensors",
|
188 |
+
"encoder.block.6.layer.0.SelfAttention.o.weight": "model-00001-of-00002.safetensors",
|
189 |
+
"encoder.block.6.layer.0.SelfAttention.q.weight": "model-00001-of-00002.safetensors",
|
190 |
+
"encoder.block.6.layer.0.SelfAttention.v.weight": "model-00001-of-00002.safetensors",
|
191 |
+
"encoder.block.6.layer.0.layer_norm.weight": "model-00001-of-00002.safetensors",
|
192 |
+
"encoder.block.6.layer.1.DenseReluDense.wi_0.weight": "model-00001-of-00002.safetensors",
|
193 |
+
"encoder.block.6.layer.1.DenseReluDense.wi_1.weight": "model-00001-of-00002.safetensors",
|
194 |
+
"encoder.block.6.layer.1.DenseReluDense.wo.weight": "model-00001-of-00002.safetensors",
|
195 |
+
"encoder.block.6.layer.1.layer_norm.weight": "model-00001-of-00002.safetensors",
|
196 |
+
"encoder.block.7.layer.0.SelfAttention.k.weight": "model-00001-of-00002.safetensors",
|
197 |
+
"encoder.block.7.layer.0.SelfAttention.o.weight": "model-00001-of-00002.safetensors",
|
198 |
+
"encoder.block.7.layer.0.SelfAttention.q.weight": "model-00001-of-00002.safetensors",
|
199 |
+
"encoder.block.7.layer.0.SelfAttention.v.weight": "model-00001-of-00002.safetensors",
|
200 |
+
"encoder.block.7.layer.0.layer_norm.weight": "model-00001-of-00002.safetensors",
|
201 |
+
"encoder.block.7.layer.1.DenseReluDense.wi_0.weight": "model-00001-of-00002.safetensors",
|
202 |
+
"encoder.block.7.layer.1.DenseReluDense.wi_1.weight": "model-00001-of-00002.safetensors",
|
203 |
+
"encoder.block.7.layer.1.DenseReluDense.wo.weight": "model-00001-of-00002.safetensors",
|
204 |
+
"encoder.block.7.layer.1.layer_norm.weight": "model-00001-of-00002.safetensors",
|
205 |
+
"encoder.block.8.layer.0.SelfAttention.k.weight": "model-00001-of-00002.safetensors",
|
206 |
+
"encoder.block.8.layer.0.SelfAttention.o.weight": "model-00001-of-00002.safetensors",
|
207 |
+
"encoder.block.8.layer.0.SelfAttention.q.weight": "model-00001-of-00002.safetensors",
|
208 |
+
"encoder.block.8.layer.0.SelfAttention.v.weight": "model-00001-of-00002.safetensors",
|
209 |
+
"encoder.block.8.layer.0.layer_norm.weight": "model-00001-of-00002.safetensors",
|
210 |
+
"encoder.block.8.layer.1.DenseReluDense.wi_0.weight": "model-00001-of-00002.safetensors",
|
211 |
+
"encoder.block.8.layer.1.DenseReluDense.wi_1.weight": "model-00001-of-00002.safetensors",
|
212 |
+
"encoder.block.8.layer.1.DenseReluDense.wo.weight": "model-00001-of-00002.safetensors",
|
213 |
+
"encoder.block.8.layer.1.layer_norm.weight": "model-00001-of-00002.safetensors",
|
214 |
+
"encoder.block.9.layer.0.SelfAttention.k.weight": "model-00001-of-00002.safetensors",
|
215 |
+
"encoder.block.9.layer.0.SelfAttention.o.weight": "model-00001-of-00002.safetensors",
|
216 |
+
"encoder.block.9.layer.0.SelfAttention.q.weight": "model-00001-of-00002.safetensors",
|
217 |
+
"encoder.block.9.layer.0.SelfAttention.v.weight": "model-00001-of-00002.safetensors",
|
218 |
+
"encoder.block.9.layer.0.layer_norm.weight": "model-00001-of-00002.safetensors",
|
219 |
+
"encoder.block.9.layer.1.DenseReluDense.wi_0.weight": "model-00001-of-00002.safetensors",
|
220 |
+
"encoder.block.9.layer.1.DenseReluDense.wi_1.weight": "model-00001-of-00002.safetensors",
|
221 |
+
"encoder.block.9.layer.1.DenseReluDense.wo.weight": "model-00001-of-00002.safetensors",
|
222 |
+
"encoder.block.9.layer.1.layer_norm.weight": "model-00001-of-00002.safetensors",
|
223 |
+
"encoder.final_layer_norm.weight": "model-00002-of-00002.safetensors",
|
224 |
+
"shared.weight": "model-00001-of-00002.safetensors"
|
225 |
+
}
|
226 |
+
}
|
text_encoders/README.md
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
CLIP-ViT/L:
|
2 |
+
* [https://huggingface.co/openai/clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14/blob/main/README.md)
|
3 |
+
* [MIT License](https://github.com/openai/CLIP/blob/main/LICENSE)
|
4 |
+
|
5 |
+
OpenCLIP-ViT/G:
|
6 |
+
* [https://huggingface.co/laion/CLIP-ViT-bigG-14-laion2B-39B-b160k](https://huggingface.co/laion/CLIP-ViT-bigG-14-laion2B-39B-b160k/blob/main/README.md)
|
7 |
+
* [MIT License](https://choosealicense.com/licenses/mit)
|
8 |
+
|
9 |
+
T5 Version 1.1:
|
10 |
+
* [https://huggingface.co/google/t5-v1_1-xxl](https://huggingface.co/google/t5-v1_1-xxl/blob/main/README.md)
|
11 |
+
* [Apache License 2.0](https://choosealicense.com/licenses/apache-2.0)
|
text_encoders/clip_g.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ec310df2af79c318e24d20511b601a591ca8cd4f1fce1d8dff822a356bcdb1f4
|
3 |
+
size 1389382176
|
text_encoders/clip_l.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:660c6f5b1abae9dc498ac2d21e1347d2abdb0cf6c0c0c8576cd796491d9a6cdd
|
3 |
+
size 246144152
|
text_encoders/t5xxl_fp16.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6e480b09fae049a72d2a8c5fbccb8d3e92febeb233bbe9dfe7256958a9167635
|
3 |
+
size 9787841024
|
text_encoders/t5xxl_fp8_e4m3fn.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7d330da4816157540d6bb7838bf63a0f02f573fc48ca4d8de34bb0cbfd514f09
|
3 |
+
size 4893934904
|
tokenizer/merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer/special_tokens_map.json
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<|startoftext|>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": true,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "<|endoftext|>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": {
|
17 |
+
"content": "<|endoftext|>",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
},
|
23 |
+
"unk_token": {
|
24 |
+
"content": "<|endoftext|>",
|
25 |
+
"lstrip": false,
|
26 |
+
"normalized": false,
|
27 |
+
"rstrip": false,
|
28 |
+
"single_word": false
|
29 |
+
}
|
30 |
+
}
|
tokenizer/tokenizer_config.json
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_prefix_space": false,
|
3 |
+
"added_tokens_decoder": {
|
4 |
+
"49406": {
|
5 |
+
"content": "<|startoftext|>",
|
6 |
+
"lstrip": false,
|
7 |
+
"normalized": true,
|
8 |
+
"rstrip": false,
|
9 |
+
"single_word": false,
|
10 |
+
"special": true
|
11 |
+
},
|
12 |
+
"49407": {
|
13 |
+
"content": "<|endoftext|>",
|
14 |
+
"lstrip": false,
|
15 |
+
"normalized": false,
|
16 |
+
"rstrip": false,
|
17 |
+
"single_word": false,
|
18 |
+
"special": true
|
19 |
+
}
|
20 |
+
},
|
21 |
+
"bos_token": "<|startoftext|>",
|
22 |
+
"clean_up_tokenization_spaces": true,
|
23 |
+
"do_lower_case": true,
|
24 |
+
"eos_token": "<|endoftext|>",
|
25 |
+
"errors": "replace",
|
26 |
+
"model_max_length": 77,
|
27 |
+
"pad_token": "<|endoftext|>",
|
28 |
+
"tokenizer_class": "CLIPTokenizer",
|
29 |
+
"unk_token": "<|endoftext|>"
|
30 |
+
}
|
tokenizer/vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_2/merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_2/special_tokens_map.json
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<|startoftext|>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": true,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "<|endoftext|>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": {
|
17 |
+
"content": "!",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
},
|
23 |
+
"unk_token": {
|
24 |
+
"content": "<|endoftext|>",
|
25 |
+
"lstrip": false,
|
26 |
+
"normalized": false,
|
27 |
+
"rstrip": false,
|
28 |
+
"single_word": false
|
29 |
+
}
|
30 |
+
}
|
tokenizer_2/tokenizer_config.json
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_prefix_space": false,
|
3 |
+
"added_tokens_decoder": {
|
4 |
+
"0": {
|
5 |
+
"content": "!",
|
6 |
+
"lstrip": false,
|
7 |
+
"normalized": false,
|
8 |
+
"rstrip": false,
|
9 |
+
"single_word": false,
|
10 |
+
"special": true
|
11 |
+
},
|
12 |
+
"49406": {
|
13 |
+
"content": "<|startoftext|>",
|
14 |
+
"lstrip": false,
|
15 |
+
"normalized": true,
|
16 |
+
"rstrip": false,
|
17 |
+
"single_word": false,
|
18 |
+
"special": true
|
19 |
+
},
|
20 |
+
"49407": {
|
21 |
+
"content": "<|endoftext|>",
|
22 |
+
"lstrip": false,
|
23 |
+
"normalized": false,
|
24 |
+
"rstrip": false,
|
25 |
+
"single_word": false,
|
26 |
+
"special": true
|
27 |
+
}
|
28 |
+
},
|
29 |
+
"bos_token": "<|startoftext|>",
|
30 |
+
"clean_up_tokenization_spaces": true,
|
31 |
+
"do_lower_case": true,
|
32 |
+
"eos_token": "<|endoftext|>",
|
33 |
+
"errors": "replace",
|
34 |
+
"model_max_length": 77,
|
35 |
+
"pad_token": "!",
|
36 |
+
"tokenizer_class": "CLIPTokenizer",
|
37 |
+
"unk_token": "<|endoftext|>"
|
38 |
+
}
|
tokenizer_2/vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_3/special_tokens_map.json
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"additional_special_tokens": [
|
3 |
+
"<extra_id_0>",
|
4 |
+
"<extra_id_1>",
|
5 |
+
"<extra_id_2>",
|
6 |
+
"<extra_id_3>",
|
7 |
+
"<extra_id_4>",
|
8 |
+
"<extra_id_5>",
|
9 |
+
"<extra_id_6>",
|
10 |
+
"<extra_id_7>",
|
11 |
+
"<extra_id_8>",
|
12 |
+
"<extra_id_9>",
|
13 |
+
"<extra_id_10>",
|
14 |
+
"<extra_id_11>",
|
15 |
+
"<extra_id_12>",
|
16 |
+
"<extra_id_13>",
|
17 |
+
"<extra_id_14>",
|
18 |
+
"<extra_id_15>",
|
19 |
+
"<extra_id_16>",
|
20 |
+
"<extra_id_17>",
|
21 |
+
"<extra_id_18>",
|
22 |
+
"<extra_id_19>",
|
23 |
+
"<extra_id_20>",
|
24 |
+
"<extra_id_21>",
|
25 |
+
"<extra_id_22>",
|
26 |
+
"<extra_id_23>",
|
27 |
+
"<extra_id_24>",
|
28 |
+
"<extra_id_25>",
|
29 |
+
"<extra_id_26>",
|
30 |
+
"<extra_id_27>",
|
31 |
+
"<extra_id_28>",
|
32 |
+
"<extra_id_29>",
|
33 |
+
"<extra_id_30>",
|
34 |
+
"<extra_id_31>",
|
35 |
+
"<extra_id_32>",
|
36 |
+
"<extra_id_33>",
|
37 |
+
"<extra_id_34>",
|
38 |
+
"<extra_id_35>",
|
39 |
+
"<extra_id_36>",
|
40 |
+
"<extra_id_37>",
|
41 |
+
"<extra_id_38>",
|
42 |
+
"<extra_id_39>",
|
43 |
+
"<extra_id_40>",
|
44 |
+
"<extra_id_41>",
|
45 |
+
"<extra_id_42>",
|
46 |
+
"<extra_id_43>",
|
47 |
+
"<extra_id_44>",
|
48 |
+
"<extra_id_45>",
|
49 |
+
"<extra_id_46>",
|
50 |
+
"<extra_id_47>",
|
51 |
+
"<extra_id_48>",
|
52 |
+
"<extra_id_49>",
|
53 |
+
"<extra_id_50>",
|
54 |
+
"<extra_id_51>",
|
55 |
+
"<extra_id_52>",
|
56 |
+
"<extra_id_53>",
|
57 |
+
"<extra_id_54>",
|
58 |
+
"<extra_id_55>",
|
59 |
+
"<extra_id_56>",
|
60 |
+
"<extra_id_57>",
|
61 |
+
"<extra_id_58>",
|
62 |
+
"<extra_id_59>",
|
63 |
+
"<extra_id_60>",
|
64 |
+
"<extra_id_61>",
|
65 |
+
"<extra_id_62>",
|
66 |
+
"<extra_id_63>",
|
67 |
+
"<extra_id_64>",
|
68 |
+
"<extra_id_65>",
|
69 |
+
"<extra_id_66>",
|
70 |
+
"<extra_id_67>",
|
71 |
+
"<extra_id_68>",
|
72 |
+
"<extra_id_69>",
|
73 |
+
"<extra_id_70>",
|
74 |
+
"<extra_id_71>",
|
75 |
+
"<extra_id_72>",
|
76 |
+
"<extra_id_73>",
|
77 |
+
"<extra_id_74>",
|
78 |
+
"<extra_id_75>",
|
79 |
+
"<extra_id_76>",
|
80 |
+
"<extra_id_77>",
|
81 |
+
"<extra_id_78>",
|
82 |
+
"<extra_id_79>",
|
83 |
+
"<extra_id_80>",
|
84 |
+
"<extra_id_81>",
|
85 |
+
"<extra_id_82>",
|
86 |
+
"<extra_id_83>",
|
87 |
+
"<extra_id_84>",
|
88 |
+
"<extra_id_85>",
|
89 |
+
"<extra_id_86>",
|
90 |
+
"<extra_id_87>",
|
91 |
+
"<extra_id_88>",
|
92 |
+
"<extra_id_89>",
|
93 |
+
"<extra_id_90>",
|
94 |
+
"<extra_id_91>",
|
95 |
+
"<extra_id_92>",
|
96 |
+
"<extra_id_93>",
|
97 |
+
"<extra_id_94>",
|
98 |
+
"<extra_id_95>",
|
99 |
+
"<extra_id_96>",
|
100 |
+
"<extra_id_97>",
|
101 |
+
"<extra_id_98>",
|
102 |
+
"<extra_id_99>"
|
103 |
+
],
|
104 |
+
"eos_token": {
|
105 |
+
"content": "</s>",
|
106 |
+
"lstrip": false,
|
107 |
+
"normalized": false,
|
108 |
+
"rstrip": false,
|
109 |
+
"single_word": false
|
110 |
+
},
|
111 |
+
"pad_token": {
|
112 |
+
"content": "<pad>",
|
113 |
+
"lstrip": false,
|
114 |
+
"normalized": false,
|
115 |
+
"rstrip": false,
|
116 |
+
"single_word": false
|
117 |
+
},
|
118 |
+
"unk_token": {
|
119 |
+
"content": "<unk>",
|
120 |
+
"lstrip": false,
|
121 |
+
"normalized": false,
|
122 |
+
"rstrip": false,
|
123 |
+
"single_word": false
|
124 |
+
}
|
125 |
+
}
|
tokenizer_3/spiece.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d60acb128cf7b7f2536e8f38a5b18a05535c9e14c7a355904270e15b0945ea86
|
3 |
+
size 791656
|
tokenizer_3/tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_3/tokenizer_config.json
ADDED
@@ -0,0 +1,940 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_prefix_space": true,
|
3 |
+
"added_tokens_decoder": {
|
4 |
+
"0": {
|
5 |
+
"content": "<pad>",
|
6 |
+
"lstrip": false,
|
7 |
+
"normalized": false,
|
8 |
+
"rstrip": false,
|
9 |
+
"single_word": false,
|
10 |
+
"special": true
|
11 |
+
},
|
12 |
+
"1": {
|
13 |
+
"content": "</s>",
|
14 |
+
"lstrip": false,
|
15 |
+
"normalized": false,
|
16 |
+
"rstrip": false,
|
17 |
+
"single_word": false,
|
18 |
+
"special": true
|
19 |
+
},
|
20 |
+
"2": {
|
21 |
+
"content": "<unk>",
|
22 |
+
"lstrip": false,
|
23 |
+
"normalized": false,
|
24 |
+
"rstrip": false,
|
25 |
+
"single_word": false,
|
26 |
+
"special": true
|
27 |
+
},
|
28 |
+
"32000": {
|
29 |
+
"content": "<extra_id_99>",
|
30 |
+
"lstrip": true,
|
31 |
+
"normalized": false,
|
32 |
+
"rstrip": true,
|
33 |
+
"single_word": false,
|
34 |
+
"special": true
|
35 |
+
},
|
36 |
+
"32001": {
|
37 |
+
"content": "<extra_id_98>",
|
38 |
+
"lstrip": true,
|
39 |
+
"normalized": false,
|
40 |
+
"rstrip": true,
|
41 |
+
"single_word": false,
|
42 |
+
"special": true
|
43 |
+
},
|
44 |
+
"32002": {
|
45 |
+
"content": "<extra_id_97>",
|
46 |
+
"lstrip": true,
|
47 |
+
"normalized": false,
|
48 |
+
"rstrip": true,
|
49 |
+
"single_word": false,
|
50 |
+
"special": true
|
51 |
+
},
|
52 |
+
"32003": {
|
53 |
+
"content": "<extra_id_96>",
|
54 |
+
"lstrip": true,
|
55 |
+
"normalized": false,
|
56 |
+
"rstrip": true,
|
57 |
+
"single_word": false,
|
58 |
+
"special": true
|
59 |
+
},
|
60 |
+
"32004": {
|
61 |
+
"content": "<extra_id_95>",
|
62 |
+
"lstrip": true,
|
63 |
+
"normalized": false,
|
64 |
+
"rstrip": true,
|
65 |
+
"single_word": false,
|
66 |
+
"special": true
|
67 |
+
},
|
68 |
+
"32005": {
|
69 |
+
"content": "<extra_id_94>",
|
70 |
+
"lstrip": true,
|
71 |
+
"normalized": false,
|
72 |
+
"rstrip": true,
|
73 |
+
"single_word": false,
|
74 |
+
"special": true
|
75 |
+
},
|
76 |
+
"32006": {
|
77 |
+
"content": "<extra_id_93>",
|
78 |
+
"lstrip": true,
|
79 |
+
"normalized": false,
|
80 |
+
"rstrip": true,
|
81 |
+
"single_word": false,
|
82 |
+
"special": true
|
83 |
+
},
|
84 |
+
"32007": {
|
85 |
+
"content": "<extra_id_92>",
|
86 |
+
"lstrip": true,
|
87 |
+
"normalized": false,
|
88 |
+
"rstrip": true,
|
89 |
+
"single_word": false,
|
90 |
+
"special": true
|
91 |
+
},
|
92 |
+
"32008": {
|
93 |
+
"content": "<extra_id_91>",
|
94 |
+
"lstrip": true,
|
95 |
+
"normalized": false,
|
96 |
+
"rstrip": true,
|
97 |
+
"single_word": false,
|
98 |
+
"special": true
|
99 |
+
},
|
100 |
+
"32009": {
|
101 |
+
"content": "<extra_id_90>",
|
102 |
+
"lstrip": true,
|
103 |
+
"normalized": false,
|
104 |
+
"rstrip": true,
|
105 |
+
"single_word": false,
|
106 |
+
"special": true
|
107 |
+
},
|
108 |
+
"32010": {
|
109 |
+
"content": "<extra_id_89>",
|
110 |
+
"lstrip": true,
|
111 |
+
"normalized": false,
|
112 |
+
"rstrip": true,
|
113 |
+
"single_word": false,
|
114 |
+
"special": true
|
115 |
+
},
|
116 |
+
"32011": {
|
117 |
+
"content": "<extra_id_88>",
|
118 |
+
"lstrip": true,
|
119 |
+
"normalized": false,
|
120 |
+
"rstrip": true,
|
121 |
+
"single_word": false,
|
122 |
+
"special": true
|
123 |
+
},
|
124 |
+
"32012": {
|
125 |
+
"content": "<extra_id_87>",
|
126 |
+
"lstrip": true,
|
127 |
+
"normalized": false,
|
128 |
+
"rstrip": true,
|
129 |
+
"single_word": false,
|
130 |
+
"special": true
|
131 |
+
},
|
132 |
+
"32013": {
|
133 |
+
"content": "<extra_id_86>",
|
134 |
+
"lstrip": true,
|
135 |
+
"normalized": false,
|
136 |
+
"rstrip": true,
|
137 |
+
"single_word": false,
|
138 |
+
"special": true
|
139 |
+
},
|
140 |
+
"32014": {
|
141 |
+
"content": "<extra_id_85>",
|
142 |
+
"lstrip": true,
|
143 |
+
"normalized": false,
|
144 |
+
"rstrip": true,
|
145 |
+
"single_word": false,
|
146 |
+
"special": true
|
147 |
+
},
|
148 |
+
"32015": {
|
149 |
+
"content": "<extra_id_84>",
|
150 |
+
"lstrip": true,
|
151 |
+
"normalized": false,
|
152 |
+
"rstrip": true,
|
153 |
+
"single_word": false,
|
154 |
+
"special": true
|
155 |
+
},
|
156 |
+
"32016": {
|
157 |
+
"content": "<extra_id_83>",
|
158 |
+
"lstrip": true,
|
159 |
+
"normalized": false,
|
160 |
+
"rstrip": true,
|
161 |
+
"single_word": false,
|
162 |
+
"special": true
|
163 |
+
},
|
164 |
+
"32017": {
|
165 |
+
"content": "<extra_id_82>",
|
166 |
+
"lstrip": true,
|
167 |
+
"normalized": false,
|
168 |
+
"rstrip": true,
|
169 |
+
"single_word": false,
|
170 |
+
"special": true
|
171 |
+
},
|
172 |
+
"32018": {
|
173 |
+
"content": "<extra_id_81>",
|
174 |
+
"lstrip": true,
|
175 |
+
"normalized": false,
|
176 |
+
"rstrip": true,
|
177 |
+
"single_word": false,
|
178 |
+
"special": true
|
179 |
+
},
|
180 |
+
"32019": {
|
181 |
+
"content": "<extra_id_80>",
|
182 |
+
"lstrip": true,
|
183 |
+
"normalized": false,
|
184 |
+
"rstrip": true,
|
185 |
+
"single_word": false,
|
186 |
+
"special": true
|
187 |
+
},
|
188 |
+
"32020": {
|
189 |
+
"content": "<extra_id_79>",
|
190 |
+
"lstrip": true,
|
191 |
+
"normalized": false,
|
192 |
+
"rstrip": true,
|
193 |
+
"single_word": false,
|
194 |
+
"special": true
|
195 |
+
},
|
196 |
+
"32021": {
|
197 |
+
"content": "<extra_id_78>",
|
198 |
+
"lstrip": true,
|
199 |
+
"normalized": false,
|
200 |
+
"rstrip": true,
|
201 |
+
"single_word": false,
|
202 |
+
"special": true
|
203 |
+
},
|
204 |
+
"32022": {
|
205 |
+
"content": "<extra_id_77>",
|
206 |
+
"lstrip": true,
|
207 |
+
"normalized": false,
|
208 |
+
"rstrip": true,
|
209 |
+
"single_word": false,
|
210 |
+
"special": true
|
211 |
+
},
|
212 |
+
"32023": {
|
213 |
+
"content": "<extra_id_76>",
|
214 |
+
"lstrip": true,
|
215 |
+
"normalized": false,
|
216 |
+
"rstrip": true,
|
217 |
+
"single_word": false,
|
218 |
+
"special": true
|
219 |
+
},
|
220 |
+
"32024": {
|
221 |
+
"content": "<extra_id_75>",
|
222 |
+
"lstrip": true,
|
223 |
+
"normalized": false,
|
224 |
+
"rstrip": true,
|
225 |
+
"single_word": false,
|
226 |
+
"special": true
|
227 |
+
},
|
228 |
+
"32025": {
|
229 |
+
"content": "<extra_id_74>",
|
230 |
+
"lstrip": true,
|
231 |
+
"normalized": false,
|
232 |
+
"rstrip": true,
|
233 |
+
"single_word": false,
|
234 |
+
"special": true
|
235 |
+
},
|
236 |
+
"32026": {
|
237 |
+
"content": "<extra_id_73>",
|
238 |
+
"lstrip": true,
|
239 |
+
"normalized": false,
|
240 |
+
"rstrip": true,
|
241 |
+
"single_word": false,
|
242 |
+
"special": true
|
243 |
+
},
|
244 |
+
"32027": {
|
245 |
+
"content": "<extra_id_72>",
|
246 |
+
"lstrip": true,
|
247 |
+
"normalized": false,
|
248 |
+
"rstrip": true,
|
249 |
+
"single_word": false,
|
250 |
+
"special": true
|
251 |
+
},
|
252 |
+
"32028": {
|
253 |
+
"content": "<extra_id_71>",
|
254 |
+
"lstrip": true,
|
255 |
+
"normalized": false,
|
256 |
+
"rstrip": true,
|
257 |
+
"single_word": false,
|
258 |
+
"special": true
|
259 |
+
},
|
260 |
+
"32029": {
|
261 |
+
"content": "<extra_id_70>",
|
262 |
+
"lstrip": true,
|
263 |
+
"normalized": false,
|
264 |
+
"rstrip": true,
|
265 |
+
"single_word": false,
|
266 |
+
"special": true
|
267 |
+
},
|
268 |
+
"32030": {
|
269 |
+
"content": "<extra_id_69>",
|
270 |
+
"lstrip": true,
|
271 |
+
"normalized": false,
|
272 |
+
"rstrip": true,
|
273 |
+
"single_word": false,
|
274 |
+
"special": true
|
275 |
+
},
|
276 |
+
"32031": {
|
277 |
+
"content": "<extra_id_68>",
|
278 |
+
"lstrip": true,
|
279 |
+
"normalized": false,
|
280 |
+
"rstrip": true,
|
281 |
+
"single_word": false,
|
282 |
+
"special": true
|
283 |
+
},
|
284 |
+
"32032": {
|
285 |
+
"content": "<extra_id_67>",
|
286 |
+
"lstrip": true,
|
287 |
+
"normalized": false,
|
288 |
+
"rstrip": true,
|
289 |
+
"single_word": false,
|
290 |
+
"special": true
|
291 |
+
},
|
292 |
+
"32033": {
|
293 |
+
"content": "<extra_id_66>",
|
294 |
+
"lstrip": true,
|
295 |
+
"normalized": false,
|
296 |
+
"rstrip": true,
|
297 |
+
"single_word": false,
|
298 |
+
"special": true
|
299 |
+
},
|
300 |
+
"32034": {
|
301 |
+
"content": "<extra_id_65>",
|
302 |
+
"lstrip": true,
|
303 |
+
"normalized": false,
|
304 |
+
"rstrip": true,
|
305 |
+
"single_word": false,
|
306 |
+
"special": true
|
307 |
+
},
|
308 |
+
"32035": {
|
309 |
+
"content": "<extra_id_64>",
|
310 |
+
"lstrip": true,
|
311 |
+
"normalized": false,
|
312 |
+
"rstrip": true,
|
313 |
+
"single_word": false,
|
314 |
+
"special": true
|
315 |
+
},
|
316 |
+
"32036": {
|
317 |
+
"content": "<extra_id_63>",
|
318 |
+
"lstrip": true,
|
319 |
+
"normalized": false,
|
320 |
+
"rstrip": true,
|
321 |
+
"single_word": false,
|
322 |
+
"special": true
|
323 |
+
},
|
324 |
+
"32037": {
|
325 |
+
"content": "<extra_id_62>",
|
326 |
+
"lstrip": true,
|
327 |
+
"normalized": false,
|
328 |
+
"rstrip": true,
|
329 |
+
"single_word": false,
|
330 |
+
"special": true
|
331 |
+
},
|
332 |
+
"32038": {
|
333 |
+
"content": "<extra_id_61>",
|
334 |
+
"lstrip": true,
|
335 |
+
"normalized": false,
|
336 |
+
"rstrip": true,
|
337 |
+
"single_word": false,
|
338 |
+
"special": true
|
339 |
+
},
|
340 |
+
"32039": {
|
341 |
+
"content": "<extra_id_60>",
|
342 |
+
"lstrip": true,
|
343 |
+
"normalized": false,
|
344 |
+
"rstrip": true,
|
345 |
+
"single_word": false,
|
346 |
+
"special": true
|
347 |
+
},
|
348 |
+
"32040": {
|
349 |
+
"content": "<extra_id_59>",
|
350 |
+
"lstrip": true,
|
351 |
+
"normalized": false,
|
352 |
+
"rstrip": true,
|
353 |
+
"single_word": false,
|
354 |
+
"special": true
|
355 |
+
},
|
356 |
+
"32041": {
|
357 |
+
"content": "<extra_id_58>",
|
358 |
+
"lstrip": true,
|
359 |
+
"normalized": false,
|
360 |
+
"rstrip": true,
|
361 |
+
"single_word": false,
|
362 |
+
"special": true
|
363 |
+
},
|
364 |
+
"32042": {
|
365 |
+
"content": "<extra_id_57>",
|
366 |
+
"lstrip": true,
|
367 |
+
"normalized": false,
|
368 |
+
"rstrip": true,
|
369 |
+
"single_word": false,
|
370 |
+
"special": true
|
371 |
+
},
|
372 |
+
"32043": {
|
373 |
+
"content": "<extra_id_56>",
|
374 |
+
"lstrip": true,
|
375 |
+
"normalized": false,
|
376 |
+
"rstrip": true,
|
377 |
+
"single_word": false,
|
378 |
+
"special": true
|
379 |
+
},
|
380 |
+
"32044": {
|
381 |
+
"content": "<extra_id_55>",
|
382 |
+
"lstrip": true,
|
383 |
+
"normalized": false,
|
384 |
+
"rstrip": true,
|
385 |
+
"single_word": false,
|
386 |
+
"special": true
|
387 |
+
},
|
388 |
+
"32045": {
|
389 |
+
"content": "<extra_id_54>",
|
390 |
+
"lstrip": true,
|
391 |
+
"normalized": false,
|
392 |
+
"rstrip": true,
|
393 |
+
"single_word": false,
|
394 |
+
"special": true
|
395 |
+
},
|
396 |
+
"32046": {
|
397 |
+
"content": "<extra_id_53>",
|
398 |
+
"lstrip": true,
|
399 |
+
"normalized": false,
|
400 |
+
"rstrip": true,
|
401 |
+
"single_word": false,
|
402 |
+
"special": true
|
403 |
+
},
|
404 |
+
"32047": {
|
405 |
+
"content": "<extra_id_52>",
|
406 |
+
"lstrip": true,
|
407 |
+
"normalized": false,
|
408 |
+
"rstrip": true,
|
409 |
+
"single_word": false,
|
410 |
+
"special": true
|
411 |
+
},
|
412 |
+
"32048": {
|
413 |
+
"content": "<extra_id_51>",
|
414 |
+
"lstrip": true,
|
415 |
+
"normalized": false,
|
416 |
+
"rstrip": true,
|
417 |
+
"single_word": false,
|
418 |
+
"special": true
|
419 |
+
},
|
420 |
+
"32049": {
|
421 |
+
"content": "<extra_id_50>",
|
422 |
+
"lstrip": true,
|
423 |
+
"normalized": false,
|
424 |
+
"rstrip": true,
|
425 |
+
"single_word": false,
|
426 |
+
"special": true
|
427 |
+
},
|
428 |
+
"32050": {
|
429 |
+
"content": "<extra_id_49>",
|
430 |
+
"lstrip": true,
|
431 |
+
"normalized": false,
|
432 |
+
"rstrip": true,
|
433 |
+
"single_word": false,
|
434 |
+
"special": true
|
435 |
+
},
|
436 |
+
"32051": {
|
437 |
+
"content": "<extra_id_48>",
|
438 |
+
"lstrip": true,
|
439 |
+
"normalized": false,
|
440 |
+
"rstrip": true,
|
441 |
+
"single_word": false,
|
442 |
+
"special": true
|
443 |
+
},
|
444 |
+
"32052": {
|
445 |
+
"content": "<extra_id_47>",
|
446 |
+
"lstrip": true,
|
447 |
+
"normalized": false,
|
448 |
+
"rstrip": true,
|
449 |
+
"single_word": false,
|
450 |
+
"special": true
|
451 |
+
},
|
452 |
+
"32053": {
|
453 |
+
"content": "<extra_id_46>",
|
454 |
+
"lstrip": true,
|
455 |
+
"normalized": false,
|
456 |
+
"rstrip": true,
|
457 |
+
"single_word": false,
|
458 |
+
"special": true
|
459 |
+
},
|
460 |
+
"32054": {
|
461 |
+
"content": "<extra_id_45>",
|
462 |
+
"lstrip": true,
|
463 |
+
"normalized": false,
|
464 |
+
"rstrip": true,
|
465 |
+
"single_word": false,
|
466 |
+
"special": true
|
467 |
+
},
|
468 |
+
"32055": {
|
469 |
+
"content": "<extra_id_44>",
|
470 |
+
"lstrip": true,
|
471 |
+
"normalized": false,
|
472 |
+
"rstrip": true,
|
473 |
+
"single_word": false,
|
474 |
+
"special": true
|
475 |
+
},
|
476 |
+
"32056": {
|
477 |
+
"content": "<extra_id_43>",
|
478 |
+
"lstrip": true,
|
479 |
+
"normalized": false,
|
480 |
+
"rstrip": true,
|
481 |
+
"single_word": false,
|
482 |
+
"special": true
|
483 |
+
},
|
484 |
+
"32057": {
|
485 |
+
"content": "<extra_id_42>",
|
486 |
+
"lstrip": true,
|
487 |
+
"normalized": false,
|
488 |
+
"rstrip": true,
|
489 |
+
"single_word": false,
|
490 |
+
"special": true
|
491 |
+
},
|
492 |
+
"32058": {
|
493 |
+
"content": "<extra_id_41>",
|
494 |
+
"lstrip": true,
|
495 |
+
"normalized": false,
|
496 |
+
"rstrip": true,
|
497 |
+
"single_word": false,
|
498 |
+
"special": true
|
499 |
+
},
|
500 |
+
"32059": {
|
501 |
+
"content": "<extra_id_40>",
|
502 |
+
"lstrip": true,
|
503 |
+
"normalized": false,
|
504 |
+
"rstrip": true,
|
505 |
+
"single_word": false,
|
506 |
+
"special": true
|
507 |
+
},
|
508 |
+
"32060": {
|
509 |
+
"content": "<extra_id_39>",
|
510 |
+
"lstrip": true,
|
511 |
+
"normalized": false,
|
512 |
+
"rstrip": true,
|
513 |
+
"single_word": false,
|
514 |
+
"special": true
|
515 |
+
},
|
516 |
+
"32061": {
|
517 |
+
"content": "<extra_id_38>",
|
518 |
+
"lstrip": true,
|
519 |
+
"normalized": false,
|
520 |
+
"rstrip": true,
|
521 |
+
"single_word": false,
|
522 |
+
"special": true
|
523 |
+
},
|
524 |
+
"32062": {
|
525 |
+
"content": "<extra_id_37>",
|
526 |
+
"lstrip": true,
|
527 |
+
"normalized": false,
|
528 |
+
"rstrip": true,
|
529 |
+
"single_word": false,
|
530 |
+
"special": true
|
531 |
+
},
|
532 |
+
"32063": {
|
533 |
+
"content": "<extra_id_36>",
|
534 |
+
"lstrip": true,
|
535 |
+
"normalized": false,
|
536 |
+
"rstrip": true,
|
537 |
+
"single_word": false,
|
538 |
+
"special": true
|
539 |
+
},
|
540 |
+
"32064": {
|
541 |
+
"content": "<extra_id_35>",
|
542 |
+
"lstrip": true,
|
543 |
+
"normalized": false,
|
544 |
+
"rstrip": true,
|
545 |
+
"single_word": false,
|
546 |
+
"special": true
|
547 |
+
},
|
548 |
+
"32065": {
|
549 |
+
"content": "<extra_id_34>",
|
550 |
+
"lstrip": true,
|
551 |
+
"normalized": false,
|
552 |
+
"rstrip": true,
|
553 |
+
"single_word": false,
|
554 |
+
"special": true
|
555 |
+
},
|
556 |
+
"32066": {
|
557 |
+
"content": "<extra_id_33>",
|
558 |
+
"lstrip": true,
|
559 |
+
"normalized": false,
|
560 |
+
"rstrip": true,
|
561 |
+
"single_word": false,
|
562 |
+
"special": true
|
563 |
+
},
|
564 |
+
"32067": {
|
565 |
+
"content": "<extra_id_32>",
|
566 |
+
"lstrip": true,
|
567 |
+
"normalized": false,
|
568 |
+
"rstrip": true,
|
569 |
+
"single_word": false,
|
570 |
+
"special": true
|
571 |
+
},
|
572 |
+
"32068": {
|
573 |
+
"content": "<extra_id_31>",
|
574 |
+
"lstrip": true,
|
575 |
+
"normalized": false,
|
576 |
+
"rstrip": true,
|
577 |
+
"single_word": false,
|
578 |
+
"special": true
|
579 |
+
},
|
580 |
+
"32069": {
|
581 |
+
"content": "<extra_id_30>",
|
582 |
+
"lstrip": true,
|
583 |
+
"normalized": false,
|
584 |
+
"rstrip": true,
|
585 |
+
"single_word": false,
|
586 |
+
"special": true
|
587 |
+
},
|
588 |
+
"32070": {
|
589 |
+
"content": "<extra_id_29>",
|
590 |
+
"lstrip": true,
|
591 |
+
"normalized": false,
|
592 |
+
"rstrip": true,
|
593 |
+
"single_word": false,
|
594 |
+
"special": true
|
595 |
+
},
|
596 |
+
"32071": {
|
597 |
+
"content": "<extra_id_28>",
|
598 |
+
"lstrip": true,
|
599 |
+
"normalized": false,
|
600 |
+
"rstrip": true,
|
601 |
+
"single_word": false,
|
602 |
+
"special": true
|
603 |
+
},
|
604 |
+
"32072": {
|
605 |
+
"content": "<extra_id_27>",
|
606 |
+
"lstrip": true,
|
607 |
+
"normalized": false,
|
608 |
+
"rstrip": true,
|
609 |
+
"single_word": false,
|
610 |
+
"special": true
|
611 |
+
},
|
612 |
+
"32073": {
|
613 |
+
"content": "<extra_id_26>",
|
614 |
+
"lstrip": true,
|
615 |
+
"normalized": false,
|
616 |
+
"rstrip": true,
|
617 |
+
"single_word": false,
|
618 |
+
"special": true
|
619 |
+
},
|
620 |
+
"32074": {
|
621 |
+
"content": "<extra_id_25>",
|
622 |
+
"lstrip": true,
|
623 |
+
"normalized": false,
|
624 |
+
"rstrip": true,
|
625 |
+
"single_word": false,
|
626 |
+
"special": true
|
627 |
+
},
|
628 |
+
"32075": {
|
629 |
+
"content": "<extra_id_24>",
|
630 |
+
"lstrip": true,
|
631 |
+
"normalized": false,
|
632 |
+
"rstrip": true,
|
633 |
+
"single_word": false,
|
634 |
+
"special": true
|
635 |
+
},
|
636 |
+
"32076": {
|
637 |
+
"content": "<extra_id_23>",
|
638 |
+
"lstrip": true,
|
639 |
+
"normalized": false,
|
640 |
+
"rstrip": true,
|
641 |
+
"single_word": false,
|
642 |
+
"special": true
|
643 |
+
},
|
644 |
+
"32077": {
|
645 |
+
"content": "<extra_id_22>",
|
646 |
+
"lstrip": true,
|
647 |
+
"normalized": false,
|
648 |
+
"rstrip": true,
|
649 |
+
"single_word": false,
|
650 |
+
"special": true
|
651 |
+
},
|
652 |
+
"32078": {
|
653 |
+
"content": "<extra_id_21>",
|
654 |
+
"lstrip": true,
|
655 |
+
"normalized": false,
|
656 |
+
"rstrip": true,
|
657 |
+
"single_word": false,
|
658 |
+
"special": true
|
659 |
+
},
|
660 |
+
"32079": {
|
661 |
+
"content": "<extra_id_20>",
|
662 |
+
"lstrip": true,
|
663 |
+
"normalized": false,
|
664 |
+
"rstrip": true,
|
665 |
+
"single_word": false,
|
666 |
+
"special": true
|
667 |
+
},
|
668 |
+
"32080": {
|
669 |
+
"content": "<extra_id_19>",
|
670 |
+
"lstrip": true,
|
671 |
+
"normalized": false,
|
672 |
+
"rstrip": true,
|
673 |
+
"single_word": false,
|
674 |
+
"special": true
|
675 |
+
},
|
676 |
+
"32081": {
|
677 |
+
"content": "<extra_id_18>",
|
678 |
+
"lstrip": true,
|
679 |
+
"normalized": false,
|
680 |
+
"rstrip": true,
|
681 |
+
"single_word": false,
|
682 |
+
"special": true
|
683 |
+
},
|
684 |
+
"32082": {
|
685 |
+
"content": "<extra_id_17>",
|
686 |
+
"lstrip": true,
|
687 |
+
"normalized": false,
|
688 |
+
"rstrip": true,
|
689 |
+
"single_word": false,
|
690 |
+
"special": true
|
691 |
+
},
|
692 |
+
"32083": {
|
693 |
+
"content": "<extra_id_16>",
|
694 |
+
"lstrip": true,
|
695 |
+
"normalized": false,
|
696 |
+
"rstrip": true,
|
697 |
+
"single_word": false,
|
698 |
+
"special": true
|
699 |
+
},
|
700 |
+
"32084": {
|
701 |
+
"content": "<extra_id_15>",
|
702 |
+
"lstrip": true,
|
703 |
+
"normalized": false,
|
704 |
+
"rstrip": true,
|
705 |
+
"single_word": false,
|
706 |
+
"special": true
|
707 |
+
},
|
708 |
+
"32085": {
|
709 |
+
"content": "<extra_id_14>",
|
710 |
+
"lstrip": true,
|
711 |
+
"normalized": false,
|
712 |
+
"rstrip": true,
|
713 |
+
"single_word": false,
|
714 |
+
"special": true
|
715 |
+
},
|
716 |
+
"32086": {
|
717 |
+
"content": "<extra_id_13>",
|
718 |
+
"lstrip": true,
|
719 |
+
"normalized": false,
|
720 |
+
"rstrip": true,
|
721 |
+
"single_word": false,
|
722 |
+
"special": true
|
723 |
+
},
|
724 |
+
"32087": {
|
725 |
+
"content": "<extra_id_12>",
|
726 |
+
"lstrip": true,
|
727 |
+
"normalized": false,
|
728 |
+
"rstrip": true,
|
729 |
+
"single_word": false,
|
730 |
+
"special": true
|
731 |
+
},
|
732 |
+
"32088": {
|
733 |
+
"content": "<extra_id_11>",
|
734 |
+
"lstrip": true,
|
735 |
+
"normalized": false,
|
736 |
+
"rstrip": true,
|
737 |
+
"single_word": false,
|
738 |
+
"special": true
|
739 |
+
},
|
740 |
+
"32089": {
|
741 |
+
"content": "<extra_id_10>",
|
742 |
+
"lstrip": true,
|
743 |
+
"normalized": false,
|
744 |
+
"rstrip": true,
|
745 |
+
"single_word": false,
|
746 |
+
"special": true
|
747 |
+
},
|
748 |
+
"32090": {
|
749 |
+
"content": "<extra_id_9>",
|
750 |
+
"lstrip": true,
|
751 |
+
"normalized": false,
|
752 |
+
"rstrip": true,
|
753 |
+
"single_word": false,
|
754 |
+
"special": true
|
755 |
+
},
|
756 |
+
"32091": {
|
757 |
+
"content": "<extra_id_8>",
|
758 |
+
"lstrip": true,
|
759 |
+
"normalized": false,
|
760 |
+
"rstrip": true,
|
761 |
+
"single_word": false,
|
762 |
+
"special": true
|
763 |
+
},
|
764 |
+
"32092": {
|
765 |
+
"content": "<extra_id_7>",
|
766 |
+
"lstrip": true,
|
767 |
+
"normalized": false,
|
768 |
+
"rstrip": true,
|
769 |
+
"single_word": false,
|
770 |
+
"special": true
|
771 |
+
},
|
772 |
+
"32093": {
|
773 |
+
"content": "<extra_id_6>",
|
774 |
+
"lstrip": true,
|
775 |
+
"normalized": false,
|
776 |
+
"rstrip": true,
|
777 |
+
"single_word": false,
|
778 |
+
"special": true
|
779 |
+
},
|
780 |
+
"32094": {
|
781 |
+
"content": "<extra_id_5>",
|
782 |
+
"lstrip": true,
|
783 |
+
"normalized": false,
|
784 |
+
"rstrip": true,
|
785 |
+
"single_word": false,
|
786 |
+
"special": true
|
787 |
+
},
|
788 |
+
"32095": {
|
789 |
+
"content": "<extra_id_4>",
|
790 |
+
"lstrip": true,
|
791 |
+
"normalized": false,
|
792 |
+
"rstrip": true,
|
793 |
+
"single_word": false,
|
794 |
+
"special": true
|
795 |
+
},
|
796 |
+
"32096": {
|
797 |
+
"content": "<extra_id_3>",
|
798 |
+
"lstrip": true,
|
799 |
+
"normalized": false,
|
800 |
+
"rstrip": true,
|
801 |
+
"single_word": false,
|
802 |
+
"special": true
|
803 |
+
},
|
804 |
+
"32097": {
|
805 |
+
"content": "<extra_id_2>",
|
806 |
+
"lstrip": true,
|
807 |
+
"normalized": false,
|
808 |
+
"rstrip": true,
|
809 |
+
"single_word": false,
|
810 |
+
"special": true
|
811 |
+
},
|
812 |
+
"32098": {
|
813 |
+
"content": "<extra_id_1>",
|
814 |
+
"lstrip": true,
|
815 |
+
"normalized": false,
|
816 |
+
"rstrip": true,
|
817 |
+
"single_word": false,
|
818 |
+
"special": true
|
819 |
+
},
|
820 |
+
"32099": {
|
821 |
+
"content": "<extra_id_0>",
|
822 |
+
"lstrip": true,
|
823 |
+
"normalized": false,
|
824 |
+
"rstrip": true,
|
825 |
+
"single_word": false,
|
826 |
+
"special": true
|
827 |
+
}
|
828 |
+
},
|
829 |
+
"additional_special_tokens": [
|
830 |
+
"<extra_id_0>",
|
831 |
+
"<extra_id_1>",
|
832 |
+
"<extra_id_2>",
|
833 |
+
"<extra_id_3>",
|
834 |
+
"<extra_id_4>",
|
835 |
+
"<extra_id_5>",
|
836 |
+
"<extra_id_6>",
|
837 |
+
"<extra_id_7>",
|
838 |
+
"<extra_id_8>",
|
839 |
+
"<extra_id_9>",
|
840 |
+
"<extra_id_10>",
|
841 |
+
"<extra_id_11>",
|
842 |
+
"<extra_id_12>",
|
843 |
+
"<extra_id_13>",
|
844 |
+
"<extra_id_14>",
|
845 |
+
"<extra_id_15>",
|
846 |
+
"<extra_id_16>",
|
847 |
+
"<extra_id_17>",
|
848 |
+
"<extra_id_18>",
|
849 |
+
"<extra_id_19>",
|
850 |
+
"<extra_id_20>",
|
851 |
+
"<extra_id_21>",
|
852 |
+
"<extra_id_22>",
|
853 |
+
"<extra_id_23>",
|
854 |
+
"<extra_id_24>",
|
855 |
+
"<extra_id_25>",
|
856 |
+
"<extra_id_26>",
|
857 |
+
"<extra_id_27>",
|
858 |
+
"<extra_id_28>",
|
859 |
+
"<extra_id_29>",
|
860 |
+
"<extra_id_30>",
|
861 |
+
"<extra_id_31>",
|
862 |
+
"<extra_id_32>",
|
863 |
+
"<extra_id_33>",
|
864 |
+
"<extra_id_34>",
|
865 |
+
"<extra_id_35>",
|
866 |
+
"<extra_id_36>",
|
867 |
+
"<extra_id_37>",
|
868 |
+
"<extra_id_38>",
|
869 |
+
"<extra_id_39>",
|
870 |
+
"<extra_id_40>",
|
871 |
+
"<extra_id_41>",
|
872 |
+
"<extra_id_42>",
|
873 |
+
"<extra_id_43>",
|
874 |
+
"<extra_id_44>",
|
875 |
+
"<extra_id_45>",
|
876 |
+
"<extra_id_46>",
|
877 |
+
"<extra_id_47>",
|
878 |
+
"<extra_id_48>",
|
879 |
+
"<extra_id_49>",
|
880 |
+
"<extra_id_50>",
|
881 |
+
"<extra_id_51>",
|
882 |
+
"<extra_id_52>",
|
883 |
+
"<extra_id_53>",
|
884 |
+
"<extra_id_54>",
|
885 |
+
"<extra_id_55>",
|
886 |
+
"<extra_id_56>",
|
887 |
+
"<extra_id_57>",
|
888 |
+
"<extra_id_58>",
|
889 |
+
"<extra_id_59>",
|
890 |
+
"<extra_id_60>",
|
891 |
+
"<extra_id_61>",
|
892 |
+
"<extra_id_62>",
|
893 |
+
"<extra_id_63>",
|
894 |
+
"<extra_id_64>",
|
895 |
+
"<extra_id_65>",
|
896 |
+
"<extra_id_66>",
|
897 |
+
"<extra_id_67>",
|
898 |
+
"<extra_id_68>",
|
899 |
+
"<extra_id_69>",
|
900 |
+
"<extra_id_70>",
|
901 |
+
"<extra_id_71>",
|
902 |
+
"<extra_id_72>",
|
903 |
+
"<extra_id_73>",
|
904 |
+
"<extra_id_74>",
|
905 |
+
"<extra_id_75>",
|
906 |
+
"<extra_id_76>",
|
907 |
+
"<extra_id_77>",
|
908 |
+
"<extra_id_78>",
|
909 |
+
"<extra_id_79>",
|
910 |
+
"<extra_id_80>",
|
911 |
+
"<extra_id_81>",
|
912 |
+
"<extra_id_82>",
|
913 |
+
"<extra_id_83>",
|
914 |
+
"<extra_id_84>",
|
915 |
+
"<extra_id_85>",
|
916 |
+
"<extra_id_86>",
|
917 |
+
"<extra_id_87>",
|
918 |
+
"<extra_id_88>",
|
919 |
+
"<extra_id_89>",
|
920 |
+
"<extra_id_90>",
|
921 |
+
"<extra_id_91>",
|
922 |
+
"<extra_id_92>",
|
923 |
+
"<extra_id_93>",
|
924 |
+
"<extra_id_94>",
|
925 |
+
"<extra_id_95>",
|
926 |
+
"<extra_id_96>",
|
927 |
+
"<extra_id_97>",
|
928 |
+
"<extra_id_98>",
|
929 |
+
"<extra_id_99>"
|
930 |
+
],
|
931 |
+
"clean_up_tokenization_spaces": true,
|
932 |
+
"eos_token": "</s>",
|
933 |
+
"extra_ids": 100,
|
934 |
+
"legacy": true,
|
935 |
+
"model_max_length": 512,
|
936 |
+
"pad_token": "<pad>",
|
937 |
+
"sp_model_kwargs": {},
|
938 |
+
"tokenizer_class": "T5Tokenizer",
|
939 |
+
"unk_token": "<unk>"
|
940 |
+
}
|
transformer/config.json
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_class_name": "SD3Transformer2DModel",
|
3 |
+
"_diffusers_version": "0.31.0.dev0",
|
4 |
+
"attention_head_dim": 64,
|
5 |
+
"caption_projection_dim": 2432,
|
6 |
+
"in_channels": 16,
|
7 |
+
"joint_attention_dim": 4096,
|
8 |
+
"num_attention_heads": 38,
|
9 |
+
"num_layers": 38,
|
10 |
+
"out_channels": 16,
|
11 |
+
"patch_size": 2,
|
12 |
+
"pooled_projection_dim": 2048,
|
13 |
+
"pos_embed_max_size": 192,
|
14 |
+
"qk_norm": "rms_norm",
|
15 |
+
"sample_size": 128
|
16 |
+
}
|
transformer/diffusion_pytorch_model-00001-of-00002.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8ec9c4af8093417fded1eec8638f313f5671c7dd0a17a3bf64807bee9954a3e0
|
3 |
+
size 9985185992
|
transformer/diffusion_pytorch_model-00002-of-00002.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a84bad7103aa22d8ddf6baafc7664868b1edcd316ca6dd9c7e1f667345bd99db
|
3 |
+
size 6307519304
|
transformer/diffusion_pytorch_model.safetensors.index.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
vae/config.json
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_class_name": "AutoencoderKL",
|
3 |
+
"_diffusers_version": "0.31.0.dev0",
|
4 |
+
"_name_or_path": "../sdxl-vae/",
|
5 |
+
"act_fn": "silu",
|
6 |
+
"block_out_channels": [
|
7 |
+
128,
|
8 |
+
256,
|
9 |
+
512,
|
10 |
+
512
|
11 |
+
],
|
12 |
+
"down_block_types": [
|
13 |
+
"DownEncoderBlock2D",
|
14 |
+
"DownEncoderBlock2D",
|
15 |
+
"DownEncoderBlock2D",
|
16 |
+
"DownEncoderBlock2D"
|
17 |
+
],
|
18 |
+
"force_upcast": true,
|
19 |
+
"in_channels": 3,
|
20 |
+
"latent_channels": 16,
|
21 |
+
"latents_mean": null,
|
22 |
+
"latents_std": null,
|
23 |
+
"layers_per_block": 2,
|
24 |
+
"mid_block_add_attention": true,
|
25 |
+
"norm_num_groups": 32,
|
26 |
+
"out_channels": 3,
|
27 |
+
"sample_size": 1024,
|
28 |
+
"scaling_factor": 1.5305,
|
29 |
+
"shift_factor": 0.0609,
|
30 |
+
"up_block_types": [
|
31 |
+
"UpDecoderBlock2D",
|
32 |
+
"UpDecoderBlock2D",
|
33 |
+
"UpDecoderBlock2D",
|
34 |
+
"UpDecoderBlock2D"
|
35 |
+
],
|
36 |
+
"use_post_quant_conv": false,
|
37 |
+
"use_quant_conv": false
|
38 |
+
}
|
vae/diffusion_pytorch_model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8f53304a79335b55e13ec50f63e5157fee4deb2f30d5fae0654e2b2653c109dc
|
3 |
+
size 167666902
|