Spaces:
Runtime error
Runtime error
import cv2 | |
import gradio as gr | |
import mediapipe as mp | |
mp_drawing = mp.solutions.drawing_utils | |
mp_drawing_styles = mp.solutions.drawing_styles | |
mp_hands = mp.solutions.hands | |
def fun(img): | |
print(type(img)) | |
with mp_hands.Hands( model_complexity=0,min_detection_confidence=0.5,min_tracking_confidence=0.5) as hands: | |
img.flags.writeable = False | |
image = cv2.flip(img[:,:,::-1], 1) | |
# Convert the BGR image to RGB before processing. | |
results = hands.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) | |
image.flags.writeable = True | |
if results.multi_hand_landmarks: | |
for hand_landmarks in results.multi_hand_landmarks: | |
mp_drawing.draw_landmarks( | |
image, | |
hand_landmarks, | |
mp_hands.HAND_CONNECTIONS, | |
mp_drawing_styles.get_default_hand_landmarks_style(), | |
mp_drawing_styles.get_default_hand_connections_style()) | |
return cv2.flip(image[:,:,::-1],1) | |
with gr.Blocks(title="Realtime Keypoint Detection | Data Science Dojo", css="footer {display:none !important} .output-markdown{display:none !important}") as demo: | |
with gr.Row(): | |
with gr.Column(): | |
input = gr.Webcam(streaming=True) | |
with gr.Column(): | |
output = gr.outputs.Image() | |
input.stream(fn=fun, | |
inputs = input, | |
outputs = output) | |
demo.launch(debug=True) |