From bbc45ab40710c18b8cb516b0632aee31e9c02624 Mon Sep 17 00:00:00 2001 From: Mudassir Junejo Date: Sun, 19 Oct 2025 21:11:22 +0500 Subject: [PATCH] update --- Gesture Volume Control/README.md | 11 +++++ Gesture Volume Control/volume_control.py | 62 ++++++++++++++++++++++++ 2 files changed, 73 insertions(+) create mode 100644 Gesture Volume Control/README.md create mode 100644 Gesture Volume Control/volume_control.py diff --git a/Gesture Volume Control/README.md b/Gesture Volume Control/README.md new file mode 100644 index 0000000..c83274a --- /dev/null +++ b/Gesture Volume Control/README.md @@ -0,0 +1,11 @@ +# 🎚️ Gesture Volume Control + +Control your system volume with hand gestures using your webcam! +This project uses **OpenCV**, **MediaPipe**, and **pycaw** to detect your hand in real time and map the distance between your thumb and index finger to system volume. + +## ⚙️ Requirements +```bash +pip install opencv-python mediapipe pycaw comtypes +``` + +Check out my github: [10mudassir007](https://github.com/10mudassir007) \ No newline at end of file diff --git a/Gesture Volume Control/volume_control.py b/Gesture Volume Control/volume_control.py new file mode 100644 index 0000000..e4cd91d --- /dev/null +++ b/Gesture Volume Control/volume_control.py @@ -0,0 +1,62 @@ +import cv2 +import mediapipe as mp +from ctypes import cast, POINTER +from comtypes import CLSCTX_ALL +from pycaw.pycaw import AudioUtilities, IAudioEndpointVolume +import math + +# Mediapipe hands +mp_hands = mp.solutions.hands +hands = mp_hands.Hands(max_num_hands=1) +mp_draw = mp.solutions.drawing_utils + +# Audio setup +devices = AudioUtilities.GetSpeakers() +interface = devices.Activate(IAudioEndpointVolume._iid_, CLSCTX_ALL, None) +volume = cast(interface, POINTER(IAudioEndpointVolume)) +vol_range = volume.GetVolumeRange() # minVol, maxVol, step + +# Phone camera +cap = cv2.VideoCapture(0) + +while True: + ret, frame = cap.read() + if not ret: + break + frame = cv2.convertScaleAbs(frame, alpha=1.2, beta=30) + frame = cv2.flip(frame, 1) + rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) + results = hands.process(rgb) + + # Resize to 360x360 + frame = cv2.resize(frame, (640, 480)) + + if results.multi_hand_landmarks: + for handLms in results.multi_hand_landmarks: + mp_draw.draw_landmarks(frame, handLms, mp_hands.HAND_CONNECTIONS) + + # Thumb and index finger tips + thumb_tip = handLms.landmark[mp_hands.HandLandmark.THUMB_TIP] + index_tip = handLms.landmark[mp_hands.HandLandmark.INDEX_FINGER_TIP] + + h, w, _ = frame.shape + x1, y1 = int(thumb_tip.x * w), int(thumb_tip.y * h) + x2, y2 = int(index_tip.x * w), int(index_tip.y * h) + + cv2.circle(frame, (x1, y1), 8, (255, 0, 0), cv2.FILLED) + cv2.circle(frame, (x2, y2), 8, (255, 0, 0), cv2.FILLED) + cv2.line(frame, (x1, y1), (x2, y2), (0, 255, 0), 2) + + # Distance between fingers → volume + length = math.hypot(x2 - x1, y2 - y1) + min_vol, max_vol, _ = vol_range + vol = (length / 200) * (max_vol - min_vol) + min_vol + vol = max(min(vol, max_vol), min_vol) + volume.SetMasterVolumeLevel(vol, None) + + cv2.imshow("Hand Volume Control", frame) + if cv2.waitKey(1) & 0xFF == ord("q"): + break + +cap.release() +cv2.destroyAllWindows()