linhcuem commited on
Commit
6f5ac87
1 Parent(s): 6d4dc2e

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +63 -0
app.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import cv2
3
+ import requests
4
+ import os
5
+
6
+ from ultralyticsplus import YOLO, render_result
7
+
8
+ image_path = [['test_images/2a998cfb0901db5f8210.jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45],['test_images/2ce19ce0191acb44920b.jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45],
9
+ ['test_images/2daab6ea3310e14eb801.jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45], ['test_images/4a137deefb14294a7005 (1).jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45],
10
+ ['test_images/7e77c596436c9132c87d.jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45], ['test_images/170f914014bac6e49fab.jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45],
11
+ ['test_images/3355ec3269c8bb96e2d9.jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45], ['test_images/546306a88052520c0b43.jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45],
12
+ ['test_images/33148464019ed3c08a8f.jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45], ['test_images/a17a992a1cd0ce8e97c1.jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45],
13
+ ['test_images/b5db5e42d8b80ae653a9 (1).jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45],['test_images/b8ee1f5299a84bf612b9.jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45],
14
+ ['test_images/b272fec7783daa63f32c.jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45],['test_images/bb202b3eaec47c9a25d5.jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45],
15
+ ['test_images/bf1e22b0a44a76142f5b.jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45], ['test_images/ea5473c5f53f27617e2e.jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45],
16
+ ['test_images/ee106392e56837366e79.jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45], ['test_images/f88d2214a4ee76b02fff.jpg','linhcuem/chamdiem_yolov8_ver10', 640, 0.25, 0.45]]
17
+
18
+ # Load YOLO model
19
+ model = YOLO('linhcuem/chamdiem_yolov8_ver10')
20
+
21
+ ###################################################
22
+ def yolov8_img_inference(
23
+ image: gr.inputs.Image= None,
24
+ model_path: gr.inputs.Dropdown = None,
25
+ image_size: gr.inputs.Slider = 640,
26
+ conf_threshold: gr.inputs.Slider = 0.25,
27
+ iou_threshold: gr.inputs.Slider = 0.45,
28
+ ):
29
+ model = YOLO(model_path)
30
+ model.overrides['conf'] = conf_threshold
31
+ model.overrides['iou'] = iou_threshold
32
+ model.overrides['agnostic_nms'] = False
33
+ model.overrides['max_det'] = 1000
34
+ results = model.predict(image)
35
+ render = render_result(model=model, image=image, result=results[0])
36
+
37
+ return render
38
+
39
+ inputs_images = [
40
+ gr.inputs.Image(type="filepath", label="Input Image"),
41
+ gr.inputs.Dropdown(["linhcuem/chamdiem_yolov8_ver10"],
42
+ default="linhcuem/chamdiem_yolov8_ver10", label="Model"),
43
+ gr.inputs.Slider(minimum=320, maximum=1280, default=640, step=32, label="Image Size"),
44
+ gr.inputs.Slider(minimum=0.0, maximum=1.0, default= 0.25, step=0.05, label="Confidence Threshold"),
45
+ gr.inputs.Slider(minimum=0.0, maximum=1.0, default=0.45, step=0.05, label="IOU Threshold"),
46
+ ]
47
+
48
+ output_image = gr.outputs.Image(type="filepath", label="Output Image")
49
+ interface_image = gr.Interface(
50
+ fn=yolov8_img_inference,
51
+ inputs=inputs_images,
52
+ outputs=output_image,
53
+ title=model_heading,
54
+ description=description,
55
+ examples=image_path,
56
+ cache_examples=False,
57
+ theme='huggingface'
58
+ )
59
+
60
+ gr.TabbedInterface(
61
+ [interface_image],
62
+ tab_names=['Image inference']
63
+ ).queue().launch()