manu commited on
Commit
6851b70
β€’
1 Parent(s): 83816c5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -55,9 +55,13 @@ def index(files, ds):
55
  collate_fn=lambda x: process_images(processor, x),
56
  )
57
 
 
 
58
  for batch_doc in tqdm(dataloader):
59
  with torch.no_grad():
60
  batch_doc = {k: v.to(model.device) for k, v in batch_doc.items()}
 
 
61
  embeddings_doc = model(**batch_doc)
62
  ds.extend(list(torch.unbind(embeddings_doc.to("cpu"))))
63
  return f"Uploaded and converted {len(images)} pages", ds, images
@@ -71,8 +75,6 @@ model = ColPali.from_pretrained(
71
  model.load_adapter(model_name)
72
  processor = AutoProcessor.from_pretrained(model_name, token = token)
73
 
74
- device = model.device
75
-
76
  mock_image = Image.new("RGB", (448, 448), (255, 255, 255))
77
 
78
  with gr.Blocks(theme=gr.themes.Soft()) as demo:
 
55
  collate_fn=lambda x: process_images(processor, x),
56
  )
57
 
58
+ print(f"model device: {model.device})
59
+
60
  for batch_doc in tqdm(dataloader):
61
  with torch.no_grad():
62
  batch_doc = {k: v.to(model.device) for k, v in batch_doc.items()}
63
+ print(f"model device: {model.device})
64
+ print(f"model device: {batch_doc.input_ids})
65
  embeddings_doc = model(**batch_doc)
66
  ds.extend(list(torch.unbind(embeddings_doc.to("cpu"))))
67
  return f"Uploaded and converted {len(images)} pages", ds, images
 
75
  model.load_adapter(model_name)
76
  processor = AutoProcessor.from_pretrained(model_name, token = token)
77
 
 
 
78
  mock_image = Image.new("RGB", (448, 448), (255, 255, 255))
79
 
80
  with gr.Blocks(theme=gr.themes.Soft()) as demo: