MohamedRashad commited on
Commit
041b736
·
1 Parent(s): 77e7720

Remove redundant text encoder model download and update tokenizer path in app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -8
app.py CHANGED
@@ -44,13 +44,6 @@ def download_weights(weights_path):
44
  if not vae_file.exists():
45
  hf_hub_download(repo_id="FoundationVision/Infinity", filename="infinity_vae_d32reg.pth", local_dir=str(weights_path))
46
 
47
- # For the text encoder, we need to download the entire model
48
- text_encoder_ckpt = weights_path / 'flan-t5-xl'
49
- if not text_encoder_ckpt.exists():
50
- tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-xl")
51
- model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-xl")
52
- tokenizer.save_pretrained(text_encoder_ckpt)
53
- model.save_pretrained(text_encoder_ckpt)
54
  except Exception as e:
55
  print(f"Error downloading weights: {e}")
56
 
@@ -402,7 +395,7 @@ args = argparse.Namespace(
402
  )
403
 
404
  # Load models
405
- text_tokenizer, text_encoder = load_tokenizer(t5_path=str(weights_path / 'flan-t5-xl'))
406
  vae = load_visual_tokenizer(args)
407
  infinity = load_transformer(vae, args)
408
 
 
44
  if not vae_file.exists():
45
  hf_hub_download(repo_id="FoundationVision/Infinity", filename="infinity_vae_d32reg.pth", local_dir=str(weights_path))
46
 
 
 
 
 
 
 
 
47
  except Exception as e:
48
  print(f"Error downloading weights: {e}")
49
 
 
395
  )
396
 
397
  # Load models
398
+ text_tokenizer, text_encoder = load_tokenizer(t5_path="google/flan-t5-xl")
399
  vae = load_visual_tokenizer(args)
400
  infinity = load_transformer(vae, args)
401