taesiri commited on
Commit
a822ce3
1 Parent(s): 02a6b14
Files changed (1) hide show
  1. app.py +5 -3
app.py CHANGED
@@ -59,7 +59,7 @@ def clear_chat(history):
59
 
60
  with gr.Blocks() as demo:
61
  gr.Markdown(
62
- "## BLIP-2 - Bootstrapping Language-Image Pre-training with Frozen Image Encoders and Large Language Models"
63
  )
64
  gr.Markdown(
65
  "This demo uses `OPT2.7B` weights. For more information please see [Github](https://github.com/salesforce/LAVIS/tree/main/projects/blip2) or [Paper](https://arxiv.org/abs/2301.12597)."
@@ -107,13 +107,15 @@ with gr.Blocks() as demo:
107
  ["./5kstbz-0001.png", "Beam Search", "where is the man standing?"],
108
  [
109
  "ILSVRC2012_val_00000008.JPEG",
110
- "eam Search",
111
  "Name the colors of macarons you see in the image.",
112
  ],
113
  ],
114
  inputs=[input_image, caption_type, question_txt],
115
  )
116
 
117
- gr.Markdown("Sample images are taken from ImageNet, CUB and GamePhysics datasets.")
 
 
118
 
119
  demo.launch()
 
59
 
60
  with gr.Blocks() as demo:
61
  gr.Markdown(
62
+ "### BLIP-2: Bootstrapping Language-Image Pre-training with Frozen Image Encoders and Large Language Models"
63
  )
64
  gr.Markdown(
65
  "This demo uses `OPT2.7B` weights. For more information please see [Github](https://github.com/salesforce/LAVIS/tree/main/projects/blip2) or [Paper](https://arxiv.org/abs/2301.12597)."
 
107
  ["./5kstbz-0001.png", "Beam Search", "where is the man standing?"],
108
  [
109
  "ILSVRC2012_val_00000008.JPEG",
110
+ "Beam Search",
111
  "Name the colors of macarons you see in the image.",
112
  ],
113
  ],
114
  inputs=[input_image, caption_type, question_txt],
115
  )
116
 
117
+ gr.Markdown(
118
+ "Sample images are taken from [ImageNet](https://paperswithcode.com/sota/image-classification-on-imagenet), [CUB](https://paperswithcode.com/dataset/cub-200-2011) and [GamePhysics](https://asgaardlab.github.io/CLIPxGamePhysics/) datasets."
119
+ )
120
 
121
  demo.launch()