-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathgesture_recognizer.py
201 lines (160 loc) · 8.18 KB
/
gesture_recognizer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
3 example gestures that work quite well (others do as well):
1) Z (drawn from top left as starting point)
2) ✔ (a checkmark, starting from left; just make sure the left arrow part is smaller than the right one, otherwise
it could also be a 'V' for example)
3) -> (an arrow, starting from left and drawing the upper half first)
"""
import ast
import pathlib
import sys
import pandas as pd
from enum import Enum
from PyQt5 import QtWidgets, uic, QtCore
from PyQt5.QtWidgets import QMessageBox
from dollar_one_recognizer import DollarOneRecognizer
class Mode(Enum):
LEARN = "Learn"
PREDICT = "Predict"
# noinspection PyAttributeOutsideInit
class GestureRecognizer(QtWidgets.QWidget):
"""
Uses a 1$ gesture recognizer to predict pre-defined gestures.
"""
def __init__(self):
super(GestureRecognizer, self).__init__()
self.__log_folder = "existing_gestures"
self.__log_file_path = pathlib.Path(self.__log_folder) / "gestures.csv"
self.dollar_one_recognizer = DollarOneRecognizer()
self._init_save_file()
self._init_ui()
def _init_save_file(self):
folder_path = pathlib.Path(self.__log_folder)
if not folder_path.is_dir():
folder_path.mkdir()
# check if the file already exists
if self.__log_file_path.exists():
# load existing gestures
self.existing_gestures = pd.read_csv(self.__log_file_path, sep=";")
# use a converter to convert saved list back to a list (by default it would be a string)
self.existing_gestures['gesture_data'] = self.existing_gestures['gesture_data'].apply(ast.literal_eval)
else:
# or create a new csv if it doesn't exist
self.existing_gestures = pd.DataFrame(columns=['gesture_name', 'gesture_data'])
def _save_to_file(self, gesture_name):
current_points = self.ui.draw_widget.get_current_points()
# normalize gesture before saving so it doesn't have to be done everytime again when trying to predict sth
normalized_gesture = self.custom_filter(current_points)
if gesture_name in self.existing_gestures['gesture_name'].unique():
# if the gesture already exists, ask the user if he wants to overwrite it
choice = QMessageBox.question(self, 'Overwrite gesture?',
"A gesture with this name already exists! Do you want to overwrite it?",
QMessageBox.Yes | QMessageBox.No)
if choice == QMessageBox.Yes:
# replace the existing data for this gesture_name with the new data
# normalized_gesture needs to be wrapped into a list otherwise replacing the np.array directly would
# lead to a crash: "ValueError: cannot copy sequence with size ... to array axis with dimension 1"
self.existing_gestures.loc[self.existing_gestures['gesture_name'] == gesture_name,
"gesture_data"] = [normalized_gesture]
self.ui.save_label.setText(f"Old content for gesture \"{gesture_name}\" was successfully overwritten!")
else:
return
else:
# save the new points as list instead of a numpy array for the same reason as above
new_gesture = {'gesture_name': gesture_name, 'gesture_data': [normalized_gesture]}
self.existing_gestures = self.existing_gestures.append(new_gesture, ignore_index=True)
self.ui.save_label.setText(f"Gesture \"{gesture_name}\" was successfully saved!")
self.existing_gestures.to_csv(self.__log_file_path, sep=";", index=False)
def _init_ui(self):
self.ui = uic.loadUi("gesture_recognizer.ui", self)
self.ui.mode_selection.setFocusPolicy(QtCore.Qt.NoFocus) # prevent auto-focus
self._setup_draw_widget()
self._setup_learn_ui()
self._setup_predict_ui()
# connect the dropdown menu to switch between adding and prediction gestures
self.ui.mode_selection.currentIndexChanged.connect(self.mode_changed)
self.ui.mode_selection.setCurrentIndex(0) # set the first item in the dropdown box as selected at the start
self.ui.predict_ui.hide()
self.ui.learn_ui.show()
def _setup_learn_ui(self):
self.ui.btn_save.setFocusPolicy(QtCore.Qt.NoFocus)
self.ui.btn_save.clicked.connect(self._save_template)
def _setup_predict_ui(self):
self.ui.btn_predict.setFocusPolicy(QtCore.Qt.NoFocus)
self.ui.btn_predict.clicked.connect(self._predict_gesture)
def mode_changed(self, index):
self.current_mode = self.mode_selection.currentText()
print(f"Current index {index}; selection changed to {self.current_mode}")
if self.current_mode == Mode.LEARN.value:
# reset data when switching from learn to predict and vice versa and show the correct ui
self._show_learn_ui()
elif self.current_mode == Mode.PREDICT.value:
self._show_predict_ui()
else:
print(f"Mode {self.current_mode} not known!")
def _show_learn_ui(self):
self.ui.draw_widget.reset_current_points()
self.ui.error_label.setText("")
self._reset_learn_ui()
self.ui.predict_ui.hide()
self.ui.learn_ui.show()
def _show_predict_ui(self):
self.ui.error_label.setText("")
self._reset_predict_ui()
# TODO let user delete specific ones
known_gestures = self.existing_gestures.gesture_name.unique()
self.ui.existing_gestures_list.setText(f"{known_gestures}")
self.ui.learn_ui.hide()
self.ui.predict_ui.show()
def custom_filter(self, points):
return self.dollar_one_recognizer.normalize(points)
def _setup_draw_widget(self):
# set the draw widgets custom filter variable to the function of the same way which applies our
# transformation stack
self.ui.draw_widget.set_custom_filter(self.custom_filter)
def _save_template(self):
# check if a name for this gesture has been entered
if not self.ui.gesture_name_input.text():
self.ui.error_label.setText("You have to enter a name for the drawn gesture to save it!")
return
elif len(self.ui.draw_widget.get_current_points()) < 1:
self.ui.error_label.setText("You have to draw more to save this as a gesture!")
return
self.ui.error_label.setText("") # hide error label
gesture_name = self.ui.gesture_name_input.text()
self._save_to_file(gesture_name)
self.ui.gesture_name_input.clear() # reset the name input field
self._reset_canvas() # reset the current gesture data on the canvas!
def _reset_learn_ui(self):
self.ui.gesture_name_input.setText("")
self.ui.save_label.setText("")
self._reset_canvas()
def _reset_predict_ui(self):
self.ui.prediction_result.setText("No gesture found!")
self._reset_canvas()
def _reset_canvas(self):
self.ui.draw_widget.reset_current_points()
self.ui.draw_widget.update() # update the draw widget immediately so it will be redrawn without the points
def _predict_gesture(self):
if len(self.ui.draw_widget.get_current_points()) < 1:
self.ui.error_label.setText("You have to draw a gesture on the canvas to predict it!")
return
self.ui.error_label.setText("")
drawn_gesture = self.ui.draw_widget.get_current_points()
normalized_gesture = self.custom_filter(drawn_gesture)
template_dict = dict(self.existing_gestures.values)
recognition_result = self.dollar_one_recognizer.recognize(normalized_gesture, template_dict)
if recognition_result is not None:
best_template, score = recognition_result
self.ui.prediction_result.setText(f"{best_template} (score: {score:.2f})")
else:
self.ui.prediction_result.setText(f"Couldn't predict a gesture!")
def main():
app = QtWidgets.QApplication(sys.argv)
recognizer = GestureRecognizer()
recognizer.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main()