junnyu commited on
Commit
441d2c3
1 Parent(s): 13af955

Update pipeline.py

Browse files
Files changed (1) hide show
  1. pipeline.py +3 -2
pipeline.py CHANGED
@@ -600,6 +600,7 @@ class WebUIStableDiffusionControlNetPipeline(DiffusionPipeline):
600
  cross_attention_kwargs: Optional[Dict[str, Any]] = None,
601
  clip_skip: int = 1,
602
  controlnet_conditioning_scale: Union[float, List[float]] = 1.0,
 
603
  ):
604
  r"""
605
  Function invoked when calling the pipeline for generation.
@@ -705,7 +706,7 @@ class WebUIStableDiffusionControlNetPipeline(DiffusionPipeline):
705
 
706
  prompts, extra_network_data = parse_prompts([prompt])
707
 
708
- if self.LORA_DIR is not None:
709
  if os.path.exists(self.LORA_DIR):
710
  lora_mapping = {p.stem: p.absolute() for p in Path(self.LORA_DIR).glob("*.safetensors")}
711
  for params in extra_network_data["lora"]:
@@ -864,7 +865,7 @@ class WebUIStableDiffusionControlNetPipeline(DiffusionPipeline):
864
  except Exception as e:
865
  raise ValueError(e)
866
  finally:
867
- if self.weights_has_changed:
868
  for sub_layer in self.text_encoder.sublayers(include_self=True):
869
  if hasattr(sub_layer, "backup_weights"):
870
  sub_layer.weight.copy_(sub_layer.backup_weights, True)
 
600
  cross_attention_kwargs: Optional[Dict[str, Any]] = None,
601
  clip_skip: int = 1,
602
  controlnet_conditioning_scale: Union[float, List[float]] = 1.0,
603
+ enable_lora: bool = True,
604
  ):
605
  r"""
606
  Function invoked when calling the pipeline for generation.
 
706
 
707
  prompts, extra_network_data = parse_prompts([prompt])
708
 
709
+ if enable_lora and self.LORA_DIR is not None:
710
  if os.path.exists(self.LORA_DIR):
711
  lora_mapping = {p.stem: p.absolute() for p in Path(self.LORA_DIR).glob("*.safetensors")}
712
  for params in extra_network_data["lora"]:
 
865
  except Exception as e:
866
  raise ValueError(e)
867
  finally:
868
+ if enable_lora and self.weights_has_changed:
869
  for sub_layer in self.text_encoder.sublayers(include_self=True):
870
  if hasattr(sub_layer, "backup_weights"):
871
  sub_layer.weight.copy_(sub_layer.backup_weights, True)