Update README.md
Browse files
README.md
CHANGED
@@ -9,10 +9,6 @@ datasets:
|
|
9 |
This is the example model of [Distill SDXL](https://github.com/okotaku/diffengine/tree/main/configs/distill_sd).
|
10 |
The training is based on [DiffEngine](https://github.com/okotaku/diffengine), the open-source toolbox for training state-of-the-art Diffusion Models with diffusers and mmengine.
|
11 |
|
12 |
-
Paper: [On Architectural Compression of Text-to-Image Diffusion Models](https://arxiv.org/abs/2305.15798)
|
13 |
-
|
14 |
-
Unofficial implementation: https://github.com/segmind/distill-sd
|
15 |
-
|
16 |
# Training
|
17 |
|
18 |
```
|
@@ -34,7 +30,7 @@ import torch
|
|
34 |
from diffusers import DiffusionPipeline, UNet2DConditionModel, AutoencoderKL
|
35 |
|
36 |
checkpoint = 'takuoko/tiny_sd_xl_pokemon_blip'
|
37 |
-
prompt = 'a
|
38 |
|
39 |
unet = UNet2DConditionModel.from_pretrained(
|
40 |
checkpoint, torch_dtype=torch.bfloat16
|
@@ -57,6 +53,12 @@ image.save('demo.png')
|
|
57 |
|
58 |
# Example result
|
59 |
|
60 |
-
prompt = 'a
|
|
|
|
|
61 |
|
62 |
-
|
|
|
|
|
|
|
|
|
|
9 |
This is the example model of [Distill SDXL](https://github.com/okotaku/diffengine/tree/main/configs/distill_sd).
|
10 |
The training is based on [DiffEngine](https://github.com/okotaku/diffengine), the open-source toolbox for training state-of-the-art Diffusion Models with diffusers and mmengine.
|
11 |
|
|
|
|
|
|
|
|
|
12 |
# Training
|
13 |
|
14 |
```
|
|
|
30 |
from diffusers import DiffusionPipeline, UNet2DConditionModel, AutoencoderKL
|
31 |
|
32 |
checkpoint = 'takuoko/tiny_sd_xl_pokemon_blip'
|
33 |
+
prompt = 'a very cute looking pokemon with a hat on its head'
|
34 |
|
35 |
unet = UNet2DConditionModel.from_pretrained(
|
36 |
checkpoint, torch_dtype=torch.bfloat16
|
|
|
53 |
|
54 |
# Example result
|
55 |
|
56 |
+
prompt = 'a very cute looking pokemon with a hat on its head'
|
57 |
+
|
58 |
+
![image](demo.png)
|
59 |
|
60 |
+
# Reference
|
61 |
+
|
62 |
+
Paper: [On Architectural Compression of Text-to-Image Diffusion Models](https://arxiv.org/abs/2305.15798)
|
63 |
+
|
64 |
+
Unofficial implementation: https://github.com/segmind/distill-sd
|