AdrienB134 commited on
Commit
aeb1e2d
1 Parent(s): d7c0e17

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -9
app.py CHANGED
@@ -33,17 +33,17 @@ id_model = Idefics3ForConditionalGeneration.from_pretrained("HuggingFaceM4/Idefi
33
  BAD_WORDS_IDS = id_processor.tokenizer(["<image>", "<fake_token_around_image>"], add_special_tokens=False).input_ids
34
  EOS_WORDS_IDS = [id_processor.tokenizer.eos_token_id]
35
 
36
- # # Load colpali model
37
- # model_name = "vidore/colpali-v1.2"
38
- # token = os.environ.get("HF_TOKEN")
39
- # model = ColPali.from_pretrained(
40
- # "vidore/colpaligemma-3b-pt-448-base", torch_dtype=torch.bfloat16, device_map="cuda", token = token).eval()
41
 
42
- # model.load_adapter(model_name)
43
- # model = model.eval()
44
- # processor = AutoProcessor.from_pretrained(model_name, token = token)
45
 
46
- # mock_image = Image.new("RGB", (448, 448), (255, 255, 255))
47
 
48
  @spaces.GPU
49
  def model_inference(
 
33
  BAD_WORDS_IDS = id_processor.tokenizer(["<image>", "<fake_token_around_image>"], add_special_tokens=False).input_ids
34
  EOS_WORDS_IDS = [id_processor.tokenizer.eos_token_id]
35
 
36
+ # Load colpali model
37
+ model_name = "vidore/colpali-v1.2"
38
+ token = os.environ.get("HF_TOKEN")
39
+ model = ColPali.from_pretrained(
40
+ "vidore/colpaligemma-3b-pt-448-base", torch_dtype=torch.bfloat16, device_map="cuda", token = token).eval()
41
 
42
+ model.load_adapter(model_name)
43
+ model = model.eval()
44
+ processor = AutoProcessor.from_pretrained(model_name, token = token)
45
 
46
+ mock_image = Image.new("RGB", (448, 448), (255, 255, 255))
47
 
48
  @spaces.GPU
49
  def model_inference(