Spaces:
Runtime error
Runtime error
switching to pretrain_flant5xl
Browse files
app.py
CHANGED
@@ -9,7 +9,7 @@ from PIL import Image
|
|
9 |
device = torch.device("cuda") if torch.cuda.is_available() else "cpu"
|
10 |
|
11 |
model, vis_processors, _ = load_model_and_preprocess(
|
12 |
-
name="
|
13 |
)
|
14 |
|
15 |
|
@@ -62,7 +62,7 @@ with gr.Blocks() as demo:
|
|
62 |
"### BLIP-2: Bootstrapping Language-Image Pre-training with Frozen Image Encoders and Large Language Models"
|
63 |
)
|
64 |
gr.Markdown(
|
65 |
-
"This demo uses `
|
66 |
)
|
67 |
|
68 |
with gr.Row():
|
|
|
9 |
device = torch.device("cuda") if torch.cuda.is_available() else "cpu"
|
10 |
|
11 |
model, vis_processors, _ = load_model_and_preprocess(
|
12 |
+
name="blip2_t5", model_type="pretrain_flant5xl", is_eval=True, device=device
|
13 |
)
|
14 |
|
15 |
|
|
|
62 |
"### BLIP-2: Bootstrapping Language-Image Pre-training with Frozen Image Encoders and Large Language Models"
|
63 |
)
|
64 |
gr.Markdown(
|
65 |
+
"This demo uses the `pretrain_flant5xl` weights. For more information please visit [Github](https://github.com/salesforce/LAVIS/tree/main/projects/blip2) or [Paper](https://arxiv.org/abs/2301.12597)."
|
66 |
)
|
67 |
|
68 |
with gr.Row():
|