noamrot commited on
Commit
1b97aa8
1 Parent(s): c38662a

add examples

Browse files
Files changed (3) hide show
  1. app.py +2 -10
  2. bike.jpg +0 -0
  3. surfer.jpg +0 -0
app.py CHANGED
@@ -1,11 +1,3 @@
1
- # import gradio as gr
2
-
3
- # def greet(image):
4
- # return "Shape " + image.shape + "!!"
5
-
6
- # iface = gr.Interface(fn=greet, inputs="image", outputs="text")
7
- # iface.launch()
8
-
9
  import gradio as gr
10
  import sys
11
  from BLIP.models.blip import blip_decoder
@@ -42,7 +34,7 @@ inputs = [gr.Image(type='pil', interactive=False),]
42
  outputs = gr.outputs.Textbox(label="Caption")
43
 
44
  description = "Gradio demo for FuseCap: Leveraging Large Language Models to Fuse Visual Data into Enriched Image Captions. This demo features a BLIP-based model, trained using FuseCap."
45
-
46
  article = "<p style='text-align: center'><a href='google.com' target='_blank'>place holder</a>/p>"
47
 
48
 
@@ -52,6 +44,6 @@ iface = gr.Interface(fn=inference,
52
  title="FuseCap",
53
  description=description,
54
  article=article,
55
- examples=[['birthday_dog.jpeg']],
56
  enable_queue=True)
57
  iface.launch()
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
  import sys
3
  from BLIP.models.blip import blip_decoder
 
34
  outputs = gr.outputs.Textbox(label="Caption")
35
 
36
  description = "Gradio demo for FuseCap: Leveraging Large Language Models to Fuse Visual Data into Enriched Image Captions. This demo features a BLIP-based model, trained using FuseCap."
37
+ examples = [["birthday_dog.jpeg"], ["surfer.png"], ["bike.jpg"]]
38
  article = "<p style='text-align: center'><a href='google.com' target='_blank'>place holder</a>/p>"
39
 
40
 
 
44
  title="FuseCap",
45
  description=description,
46
  article=article,
47
+ examples=examples,
48
  enable_queue=True)
49
  iface.launch()
bike.jpg ADDED
surfer.jpg ADDED