@inproceedings{scholars18931, note = {cited By 0; Conference of 2023 Innovations in Power and Advanced Computing Technologies, i-PACT 2023 ; Conference Date: 8 December 2023 Through 10 December 2023; Conference Code:197662}, year = {2023}, doi = {10.1109/I-PACT58649.2023.10434449}, journal = {2023 Innovations in Power and Advanced Computing Technologies, i-PACT 2023}, title = {Hybrid Face and Eye Gesture Tracking Algorithm for Tello EDU RoboMaster TT Quadrotor Drone}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85186994921&doi=10.1109\%2fI-PACT58649.2023.10434449&partnerID=40&md5=6b90537ded8e659d645732ca3a163857}, keywords = {Aircraft detection; Drones; Eye tracking; Face recognition; Gesture recognition; High level languages; Learning systems, Drone control; Eye-gestures; Gesture tracking; Gestures recognition; Hybrid gesture; Quad rotors; Recognition algorithm; Tello EDU; Tracking; Tracking algorithm, Python}, abstract = {Controlling a drone requires accuracy and efficiency, especially regarding gesture recognition. It's crucial to ensure that these gestures are mapped correctly and that the recognition algorithms are safe and computationally efficient. To achieve this, a hybrid gesture recognition module is developed in this paper using machine learning techniques, such as MediaPipe, OpenCV, and djitellopy packages, and frameworks in a Python language environment. The module can precisely identify and categorize specified movements from a live video feed, creating a mapping between gesture movements and drone actions. The goal is to assess the gesture tracking and control system on the Tello EDU drone platform, which has limited hardware resources. The results show that the algorithm is effective in an indoor environment, allowing users to enjoy the benefits of gesture recognition without worrying about unintended or dangerous actions by the drone. {\^A}{\copyright} 2023 IEEE.}, author = {Iskandar, M. and Bingi, K. and Ibrahim, R. and Omar, M. and Arun Mozhi Devan, P.} }