multimodalart HF staff commited on
Commit
4e7780d
1 Parent(s): 257e1a1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -15
app.py CHANGED
@@ -12,32 +12,18 @@ import spaces
12
  from huggingface_hub import hf_hub_download
13
  from optimum.quanto import freeze, qfloat8, quantize
14
 
15
- # Load fp8
16
- #transformer = FluxTransformer2DModel.from_single_file("https://huggingface.co/Kijai/flux-fp8/blob/main/flux1-dev-fp8.safetensors", torch_dtype=torch.bfloat16)
17
- #quantize(transformer, weights=qfloat8)
18
- #freeze(transformer)
19
-
20
- # Load models
21
- #controlnet = FluxControlNetModel.from_pretrained("alimama-creative/FLUX.1-dev-Controlnet-Inpainting-Alpha", torch_dtype=torch.bfloat16)
22
- #quantize(controlnet, weights=qfloat8)
23
- #freeze(controlnet)
24
  controlnet = FluxControlNetModel.from_pretrained("alimama-creative/FLUX.1-dev-Controlnet-Inpainting-Alpha", torch_dtype=torch.bfloat16)
25
  transformer = FluxTransformer2DModel.from_pretrained(
26
  "black-forest-labs/FLUX.1-dev", subfolder='transformer', torch_dtype=torch.bfloat16
27
  )
28
 
29
- text_encoder_2 = T5EncoderModel.from_pretrained("black-forest-labs/FLUX.1-dev", subfolder="text_encoder_2", torch_dtype=torch.bfloat16)
30
- quantize(text_encoder_2, weights=qfloat8)
31
- freeze(text_encoder_2)
32
-
33
  pipe = FluxControlNetInpaintingPipeline.from_pretrained(
34
  "black-forest-labs/FLUX.1-dev",
35
- text_encoder_2=None,
36
  transformer=transformer,
37
  controlnet=controlnet,
38
  torch_dtype=torch.bfloat16
39
  )
40
- pipe.text_encoder_2 = text_encoder_2
41
 
42
  repo_name = "ByteDance/Hyper-SD"
43
  ckpt_name = "Hyper-FLUX.1-dev-8steps-lora.safetensors"
 
12
  from huggingface_hub import hf_hub_download
13
  from optimum.quanto import freeze, qfloat8, quantize
14
 
15
+
 
 
 
 
 
 
 
 
16
  controlnet = FluxControlNetModel.from_pretrained("alimama-creative/FLUX.1-dev-Controlnet-Inpainting-Alpha", torch_dtype=torch.bfloat16)
17
  transformer = FluxTransformer2DModel.from_pretrained(
18
  "black-forest-labs/FLUX.1-dev", subfolder='transformer', torch_dtype=torch.bfloat16
19
  )
20
 
 
 
 
 
21
  pipe = FluxControlNetInpaintingPipeline.from_pretrained(
22
  "black-forest-labs/FLUX.1-dev",
 
23
  transformer=transformer,
24
  controlnet=controlnet,
25
  torch_dtype=torch.bfloat16
26
  )
 
27
 
28
  repo_name = "ByteDance/Hyper-SD"
29
  ckpt_name = "Hyper-FLUX.1-dev-8steps-lora.safetensors"