backup
Browse files
app.py
CHANGED
@@ -28,8 +28,8 @@ check_environment()
|
|
28 |
login(token=os.environ["HF_TOKEN"], add_to_git_credential=True)
|
29 |
|
30 |
# Load model and processor (do this outside the inference function to avoid reloading)
|
31 |
-
base_model_path = "
|
32 |
-
lora_weights_path = "taesiri/BugsBunny-LLama-3.2-11B-Vision-Base-Medium-LoRA"
|
33 |
|
34 |
processor = AutoProcessor.from_pretrained(base_model_path)
|
35 |
model = MllamaForConditionalGeneration.from_pretrained(
|
@@ -37,9 +37,10 @@ model = MllamaForConditionalGeneration.from_pretrained(
|
|
37 |
torch_dtype=torch.bfloat16,
|
38 |
device_map="cuda",
|
39 |
)
|
40 |
-
model = PeftModel.from_pretrained(model, lora_weights_path)
|
41 |
model.tie_weights()
|
42 |
|
|
|
|
|
43 |
|
44 |
def describe_image_in_JSON(json_string):
|
45 |
try:
|
|
|
28 |
login(token=os.environ["HF_TOKEN"], add_to_git_credential=True)
|
29 |
|
30 |
# Load model and processor (do this outside the inference function to avoid reloading)
|
31 |
+
base_model_path = "taesiri/BugsBunny-LLama-3.2-11B-Vision-Instruct-Medium-FullModel"
|
32 |
+
# lora_weights_path = "taesiri/BugsBunny-LLama-3.2-11B-Vision-Base-Medium-LoRA"
|
33 |
|
34 |
processor = AutoProcessor.from_pretrained(base_model_path)
|
35 |
model = MllamaForConditionalGeneration.from_pretrained(
|
|
|
37 |
torch_dtype=torch.bfloat16,
|
38 |
device_map="cuda",
|
39 |
)
|
|
|
40 |
model.tie_weights()
|
41 |
|
42 |
+
# model = PeftModel.from_pretrained(model, lora_weights_path)
|
43 |
+
|
44 |
|
45 |
def describe_image_in_JSON(json_string):
|
46 |
try:
|