tonyassi commited on
Commit
9d4c268
β€’
1 Parent(s): a29c427

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -6,16 +6,16 @@ from transformers import BlipProcessor, BlipForConditionalGeneration
6
  processor = BlipProcessor.from_pretrained("Salesforce/blip-image-captioning-large")
7
  model = BlipForConditionalGeneration.from_pretrained("Salesforce/blip-image-captioning-large")
8
 
9
- def caption(img, min_, max_):
10
  raw_image = Image.open(img).convert('RGB')
11
 
12
  inputs = processor(raw_image, return_tensors="pt")
13
 
14
- out = model.generate(**inputs, min_length=min_, max_length=max_)
15
  return processor.decode(out[0], skip_special_tokens=True)
16
 
17
- def greet(img):
18
- return caption(img)
19
 
20
  iface = gr.Interface(fn=greet,
21
  title='Blip Image Captioning Large',
 
6
  processor = BlipProcessor.from_pretrained("Salesforce/blip-image-captioning-large")
7
  model = BlipForConditionalGeneration.from_pretrained("Salesforce/blip-image-captioning-large")
8
 
9
+ def caption(img, min_len, max_len):
10
  raw_image = Image.open(img).convert('RGB')
11
 
12
  inputs = processor(raw_image, return_tensors="pt")
13
 
14
+ out = model.generate(**inputs, min_length=min_len, max_length=max_len)
15
  return processor.decode(out[0], skip_special_tokens=True)
16
 
17
+ def greet(img, min_len, max_len):
18
+ return caption(img, min_len, max_len)
19
 
20
  iface = gr.Interface(fn=greet,
21
  title='Blip Image Captioning Large',