Spaces:
Sleeping
Sleeping
AdrienB134
commited on
Commit
•
57c4bbb
1
Parent(s):
e3aae8f
jhgk
Browse files
app.py
CHANGED
@@ -32,11 +32,13 @@ def model_inference(
|
|
32 |
images, text,
|
33 |
):
|
34 |
|
35 |
-
print(type(images))
|
36 |
-
print(images[0])
|
37 |
-
images = Image.open(images[0][0])
|
|
|
|
|
|
|
38 |
print(images)
|
39 |
-
print(type(images))
|
40 |
# model = Qwen2VLForConditionalGeneration.from_pretrained(
|
41 |
# "Qwen/Qwen2-VL-7B-Instruct", torch_dtype="auto", device_map="auto"
|
42 |
# )
|
@@ -55,13 +57,7 @@ def model_inference(
|
|
55 |
messages = [
|
56 |
{
|
57 |
"role": "user",
|
58 |
-
"content":
|
59 |
-
{
|
60 |
-
"type": "image",
|
61 |
-
"image": images,
|
62 |
-
},
|
63 |
-
{"type": "text", "text": text},
|
64 |
-
],
|
65 |
}
|
66 |
]
|
67 |
|
|
|
32 |
images, text,
|
33 |
):
|
34 |
|
35 |
+
# print(type(images))
|
36 |
+
# print(images[0])
|
37 |
+
# images = Image.open(images[0][0])
|
38 |
+
# print(images)
|
39 |
+
# print(type(images))
|
40 |
+
images = [{"type": "image", "image": Image.open(image[0])} for image in images]
|
41 |
print(images)
|
|
|
42 |
# model = Qwen2VLForConditionalGeneration.from_pretrained(
|
43 |
# "Qwen/Qwen2-VL-7B-Instruct", torch_dtype="auto", device_map="auto"
|
44 |
# )
|
|
|
57 |
messages = [
|
58 |
{
|
59 |
"role": "user",
|
60 |
+
"content": images.append({"type": "text", "text": text}),
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
}
|
62 |
]
|
63 |
|