Track fist + left click
This commit is contained in:
parent
b10a6717fe
commit
b8809db927
4
.pylintrc
Normal file
4
.pylintrc
Normal file
@ -0,0 +1,4 @@
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code.
|
||||
extension-pkg-whitelist=cv2
|
27
README.md
27
README.md
@ -1,2 +1,29 @@
|
||||
# ZPPR
|
||||
|
||||
The applcation allows its users to control their mouse cursor using their hands BUT without a physical mouse.
|
||||
|
||||
Haar cascades aren't an effect of my work. Their owner is GitHub user [trane293](https://github.com/trane293).
|
||||
[Here](https://github.com/trane293/Palm-Fist-Gesture-Recognition/)'s the link to his project I used.
|
||||
|
||||
To run the project you have to install the following python3 libraries:
|
||||
- opencv-python
|
||||
- numpy
|
||||
- mouse
|
||||
|
||||
You can do it using `pip3`
|
||||
e.g. `pip3 install opencv-python`
|
||||
|
||||
To run the application simply run
|
||||
|
||||
`python3 main.py`
|
||||
|
||||
from the app directory.
|
||||
|
||||
Tutorials and other materials used during development:
|
||||
|
||||
- https://techtutorialsx.com/2019/04/13/python-opencv-getting-video-from-camera/
|
||||
- https://techtutorialsx.com/2019/04/13/python-opencv-getting-video-from-camera/
|
||||
- https://docs.opencv.org/3.4/db/d28/tutorial_cascade_classifier.html
|
||||
- https://docs.opencv.org/2.4/modules/objdetect/doc/cascade_classification.html
|
||||
- https://www.youtube.com/watch?v=W7Bs3uuMUSQ&t=6s
|
||||
- https://www.youtube.com/watch?v=88HdqNDQsEk
|
||||
|
66
main.py
Normal file
66
main.py
Normal file
@ -0,0 +1,66 @@
|
||||
import cv2
|
||||
import numpy
|
||||
import pynput.mouse
|
||||
|
||||
def openCamera(input):
|
||||
cv2.imshow("frame", input)
|
||||
key = cv2.waitKey(1)
|
||||
return key
|
||||
|
||||
def drawBox(finder, frame, b, g, r):
|
||||
for x, y, w, h in finder:
|
||||
cv2.rectangle(frame, (x, y), (x + w, y + h) ,(b, g, r), 2)
|
||||
|
||||
def exit(video):
|
||||
video.release()
|
||||
cv2.destroyAllWindows()
|
||||
|
||||
|
||||
mouse = pynput.mouse.Controller()
|
||||
video = cv2.VideoCapture(0)
|
||||
frame = video.read()
|
||||
findFistInit = None
|
||||
centerX = 0
|
||||
centerY = 0
|
||||
|
||||
while True:
|
||||
cascadeHand = cv2.CascadeClassifier("hand.xml")
|
||||
cascadeFist = cv2.CascadeClassifier("fist.xml")
|
||||
|
||||
frame = video.read()[1]
|
||||
fmask = cv2.GaussianBlur(cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY), (21,21), 0)
|
||||
cv2.imshow("fmask", fmask)
|
||||
|
||||
|
||||
findFist = cascadeFist.detectMultiScale(fmask, scaleFactor = 1.1, minNeighbors = 75)
|
||||
|
||||
if findFistInit is None:
|
||||
findFistInit = findFist
|
||||
|
||||
if findFist == ():
|
||||
findHand = cascadeHand.detectMultiScale(fmask, scaleFactor = 1.1, minNeighbors = 10)
|
||||
|
||||
if findHand != ():
|
||||
mouse.click(pynput.mouse.Button.left, 1)
|
||||
|
||||
if findFist != () and findFistInit != ():
|
||||
|
||||
dx, dy, dw, dh = findFist[0][0] - findFistInit[0][0], findFist[0][1] - findFistInit[0][1], findFist[0][2] - findFistInit[0][2], findFist[0][3] - findFistInit[0][3]
|
||||
|
||||
centerX = (dx + dx + dw) / 2
|
||||
centerY = (dy + dy + dh) / 2
|
||||
mouse.move( -1 * 7 * dx, 7 * dy)
|
||||
|
||||
findFistInit = findFist
|
||||
|
||||
drawBox(findFist, frame, 0, 0, 255)
|
||||
drawBox(findHand, frame, 0, 255, 0)
|
||||
|
||||
frame = cv2.flip(frame, 1)
|
||||
key = openCamera(frame)
|
||||
|
||||
if(key == ord('q')):
|
||||
break
|
||||
|
||||
|
||||
exit(video)
|
Loading…
Reference in New Issue
Block a user