adamelliotfields commited on
Commit
1c11426
·
verified ·
1 Parent(s): 0acf94b

Disable loading progress bar

Browse files
Files changed (3) hide show
  1. app.py +4 -0
  2. lib/inference.py +3 -1
  3. lib/loader.py +4 -0
app.py CHANGED
@@ -12,8 +12,12 @@ from lib import Config, async_call, download_civit_file, download_repo_files, ge
12
 
13
  filterwarnings("ignore", category=FutureWarning, module="diffusers")
14
  filterwarnings("ignore", category=FutureWarning, module="transformers")
 
15
  diffusers_logging.set_verbosity_error()
 
 
16
  transformers_logging.set_verbosity_error()
 
17
 
18
  # the CSS `content` attribute expects a string so we need to wrap the number in quotes
19
  refresh_seed_js = """
 
12
 
13
  filterwarnings("ignore", category=FutureWarning, module="diffusers")
14
  filterwarnings("ignore", category=FutureWarning, module="transformers")
15
+
16
  diffusers_logging.set_verbosity_error()
17
+ diffusers_logging.disable_progress_bar()
18
+
19
  transformers_logging.set_verbosity_error()
20
+ transformers_logging.disable_progress_bar()
21
 
22
  # the CSS `content` attribute expects a string so we need to wrap the number in quotes
23
  refresh_seed_js = """
lib/inference.py CHANGED
@@ -169,6 +169,8 @@ def generate(
169
 
170
  start = time.perf_counter()
171
  log = Logger("generate")
 
 
172
  loader = Loader()
173
  loader.load(
174
  KIND,
@@ -314,7 +316,7 @@ def generate(
314
  CURRENT_IMAGE += 1
315
 
316
  diff = time.perf_counter() - start
317
- msg = f"Generated {len(images)} image{'s' if len(images) > 1 else ''} in {diff:.2f}s"
318
  log.info(msg)
319
  if Info:
320
  Info(msg)
 
169
 
170
  start = time.perf_counter()
171
  log = Logger("generate")
172
+ log.info(f"Generating {num_images} image{'s' if num_images > 1 else ''}")
173
+
174
  loader = Loader()
175
  loader.load(
176
  KIND,
 
316
  CURRENT_IMAGE += 1
317
 
318
  diff = time.perf_counter() - start
319
+ msg = f"Generating {len(images)} image{'s' if len(images) > 1 else ''} done in {diff:.2f}s"
320
  log.info(msg)
321
  if Info:
322
  Info(msg)
lib/loader.py CHANGED
@@ -1,4 +1,5 @@
1
  import gc
 
2
  from threading import Lock
3
 
4
  import torch
@@ -145,6 +146,7 @@ class Loader:
145
  pipeline = Config.PIPELINES[kind]
146
  if self.pipe is None:
147
  try:
 
148
  self.log.info(f"Loading {model}")
149
  self.model = model
150
  if model.lower() in Config.MODEL_CHECKPOINTS.keys():
@@ -154,6 +156,8 @@ class Loader:
154
  ).to("cuda")
155
  else:
156
  self.pipe = pipeline.from_pretrained(model, **kwargs).to("cuda")
 
 
157
  except Exception as e:
158
  self.log.error(f"Error loading {model}: {e}")
159
  self.model = None
 
1
  import gc
2
+ import time
3
  from threading import Lock
4
 
5
  import torch
 
146
  pipeline = Config.PIPELINES[kind]
147
  if self.pipe is None:
148
  try:
149
+ start = time.perf_counter()
150
  self.log.info(f"Loading {model}")
151
  self.model = model
152
  if model.lower() in Config.MODEL_CHECKPOINTS.keys():
 
156
  ).to("cuda")
157
  else:
158
  self.pipe = pipeline.from_pretrained(model, **kwargs).to("cuda")
159
+ diff = time.perf_counter() - start
160
+ self.log.info(f"Loading {model} done in {diff:.2f}s")
161
  except Exception as e:
162
  self.log.error(f"Error loading {model}: {e}")
163
  self.model = None