|
| 1 | +import cv2 |
| 2 | +import mediapipe as mp |
| 3 | +import numpy as np |
| 4 | +import time |
| 5 | + |
| 6 | +# Initialize MediaPipe hands module |
| 7 | +mp_hands = mp.solutions.hands |
| 8 | +hands = mp_hands.Hands() |
| 9 | +mp_drawing = mp.solutions.drawing_utils |
| 10 | + |
| 11 | +# Gesture mappings |
| 12 | +GESTURES = { |
| 13 | + "forward": "Move Forward", |
| 14 | + "backward": "Move Backward", |
| 15 | + "left": "Move Left", |
| 16 | + "right": "Move Right", |
| 17 | + "up": "Jump" |
| 18 | +} |
| 19 | + |
| 20 | +# Define a function to calculate the direction based on landmarks |
| 21 | +def detect_direction(landmarks): |
| 22 | + # Get coordinates of wrist and index finger tip |
| 23 | + wrist = landmarks[0] # Wrist coordinates |
| 24 | + index_finger_tip = landmarks[8] # Index finger tip |
| 25 | + |
| 26 | + # Calculate relative positions of index finger to wrist |
| 27 | + x_diff = index_finger_tip.x - wrist.x # Difference in X-axis |
| 28 | + y_diff = index_finger_tip.y - wrist.y # Difference in Y-axis |
| 29 | + |
| 30 | + # Use thresholds to determine the direction |
| 31 | + if abs(x_diff) > abs(y_diff): # Horizontal movement dominates |
| 32 | + if x_diff > 0.1: # Index finger is to the right of the wrist |
| 33 | + return "right" |
| 34 | + elif x_diff < -0.1: # Index finger is to the left of the wrist |
| 35 | + return "left" |
| 36 | + else: # Vertical movement dominates |
| 37 | + if y_diff > 0.1: # Fingers are below wrist |
| 38 | + return "backward" |
| 39 | + elif y_diff < -0.1: # Fingers are above wrist |
| 40 | + return "up" |
| 41 | + |
| 42 | + # If no significant difference in X or Y, assume pointing forward |
| 43 | + return "forward" |
| 44 | + |
| 45 | +# Video capture for hand gesture recognition |
| 46 | +cap = cv2.VideoCapture(0) |
| 47 | +prev_time = 0 # To implement delay between gesture changes |
| 48 | +delay_interval = 1.0 # 1 second delay between actions |
| 49 | + |
| 50 | +while True: |
| 51 | + ret, frame = cap.read() |
| 52 | + if not ret: |
| 53 | + break |
| 54 | + |
| 55 | + # Flip the frame horizontally for natural movement |
| 56 | + frame = cv2.flip(frame, 1) |
| 57 | + frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) |
| 58 | + |
| 59 | + # Detect hands |
| 60 | + results = hands.process(frame_rgb) |
| 61 | + |
| 62 | + landmark_list = [] |
| 63 | + |
| 64 | + # If hand landmarks are detected |
| 65 | + if results.multi_hand_landmarks: |
| 66 | + for hand_landmarks in results.multi_hand_landmarks: |
| 67 | + mp_drawing.draw_landmarks(frame, hand_landmarks, mp_hands.HAND_CONNECTIONS) |
| 68 | + |
| 69 | + # Collect hand landmark data |
| 70 | + for lm in hand_landmarks.landmark: |
| 71 | + landmark_list.append(lm) |
| 72 | + |
| 73 | + # Detect direction based on the landmarks |
| 74 | + direction = detect_direction(landmark_list) |
| 75 | + |
| 76 | + # Check for time delay between actions |
| 77 | + current_time = time.time() |
| 78 | + if current_time - prev_time > delay_interval: |
| 79 | + print(GESTURES[direction]) # Output corresponding action |
| 80 | + prev_time = current_time |
| 81 | + |
| 82 | + # Display the frame with landmarks |
| 83 | + cv2.imshow('Hand Gesture Recognition', frame) |
| 84 | + |
| 85 | + # Quit if 'q' is pressed |
| 86 | + if cv2.waitKey(1) & 0xFF == ord('q'): |
| 87 | + break |
| 88 | + |
| 89 | +cap.release() |
| 90 | +cv2.destroyAllWindows() |
0 commit comments