Skip to content

Commit 01f68f6

Browse files
authored
Add files via upload
1 parent 773080f commit 01f68f6

File tree

2 files changed

+396
-0
lines changed

2 files changed

+396
-0
lines changed
Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
import numpy as np
2+
import math
3+
4+
# This replicates clf.decision_function(X)
5+
def decision_function(params, sv, nv, a, b, x_test):
6+
# calculate the kernels
7+
# k = kernel(params, sv, x_test)
8+
k = np.dot(sv[0], x_test.T)
9+
for i in range(1, len(sv)):
10+
k_tmp = np.dot(sv[i], x_test.T)
11+
k = np.vstack((k, k_tmp))
12+
13+
# define the start and end index for support vectors for each class
14+
start = [sum(nv[:i]) for i in range(len(nv))]
15+
end = [start[i] + nv[i] for i in range(len(nv))]
16+
17+
# calculate: sum(a_p * k(x_p, x)) between every 2 classes8
18+
c = [ sum(a[i][p] * k[p] for p in range(start[j], end[j])) +
19+
sum(a[j-1][p] * k[p] for p in range(start[i], end[i]))
20+
for i in range(len(nv)) for j in range(i+1,len(nv))]
21+
22+
# add the intercept
23+
return [sum(x) for x in zip(c, b)]
24+
25+
# This replicates clf.predict(X)
26+
def predict(params, sv, nv, a, b, cs, x_test):
27+
28+
decision = decision_function(params, sv, nv, a, b, x_test)
29+
votes = []
30+
for s in range(len(x_test)):
31+
votes.append([(i if decision[p][s] > 0 else j) for p, (i, j) in enumerate((i, j) for i in range(len(cs)) for j in range(i+1, len(cs)))])
32+
33+
cs = []
34+
for s in range(len(x_test)):
35+
cs.append(max(set(votes[s]), key=votes[s].count))
36+
37+
return cs
38+
39+
#X_tmp = np.loadtxt('final.txt')
40+
#X = X_tmp[:, :2]
41+
X = np.loadtxt('final.txt')
42+
Y = np.loadtxt('final_target.txt')
43+
44+
from sklearn.model_selection import train_test_split
45+
x_train, x_test, y_train, y_test = train_test_split(X, Y, test_size=0.3, random_state=87)
46+
47+
from sklearn.svm import SVC
48+
clf = SVC(kernel='linear')
49+
clf.fit(x_train, y_train)
50+
51+
# Get parameters from model
52+
params = clf.get_params()
53+
sv = clf.support_vectors_
54+
nv = clf.n_support_
55+
a = clf.dual_coef_
56+
b = clf.intercept_
57+
cs = clf.classes_
58+
59+
test = clf.decision_function(x_test)
60+
y_predict = clf.predict(x_test)
61+
print(y_predict)
62+
# Use the functions to predict
63+
predict_algorithm = predict(params, sv, nv, a, b, cs, x_test)
64+
print(predict_algorithm)
65+
print('OK')
Lines changed: 331 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,331 @@
1+
# import sys
2+
# sys.path.append("D:/openpose-master/openpose-master/build/examples/tutorial_api_python/")
3+
from openni import openni2
4+
import numpy as np
5+
import sys
6+
import cv2
7+
import os
8+
from sys import platform
9+
import ftdi_pybind
10+
import socket
11+
import struct
12+
import time
13+
from threading import Thread
14+
from playsound import playsound
15+
16+
# Define music to do parallel
17+
def play_piano_C1_1():
18+
playsound('music/piano/C1_1.mp3')
19+
def play_piano_C1_2():
20+
playsound('music/piano/C1_2.mp3')
21+
def play_piano_C1_3():
22+
playsound('music/piano/C1_3.mp3')
23+
def play_piano_C1_4():
24+
playsound('music/piano/C1_4.mp3')
25+
def play_piano_C1_5():
26+
playsound('music/piano/C1_5.mp3')
27+
def play_piano_C1_6():
28+
playsound('music/piano/C1_6.mp3')
29+
def play_piano_C1_7():
30+
playsound('music/piano/C1_7.mp3')
31+
32+
def play_piano_C2_1():
33+
playsound('music/piano/C2_1.mp3')
34+
def play_piano_C2_2():
35+
playsound('music/piano/C2_2.mp3')
36+
def play_piano_C2_3():
37+
playsound('music/piano/C2_3.mp3')
38+
def play_piano_C2_4():
39+
playsound('music/piano/C2_4.mp3')
40+
def play_piano_C2_5():
41+
playsound('music/piano/C2_5.mp3')
42+
def play_piano_C2_6():
43+
playsound('music/piano/C2_6.mp3')
44+
def play_piano_C2_7():
45+
playsound('music/piano/C2_7.mp3')
46+
47+
def play_piano_C3_1():
48+
playsound('music/piano/C3_1.mp3')
49+
def play_piano_C3_2():
50+
playsound('music/piano/C3_2.mp3')
51+
def play_piano_C3_3():
52+
playsound('music/piano/C3_3.mp3')
53+
def play_piano_C3_4():
54+
playsound('music/piano/C3_4.mp3')
55+
def play_piano_C3_5():
56+
playsound('music/piano/C3_5.mp3')
57+
def play_piano_C3_6():
58+
playsound('music/piano/C3_6.mp3')
59+
def play_piano_C3_7():
60+
playsound('music/piano/C3_7.mp3')
61+
62+
def play_piano_C4_1():
63+
playsound('music/piano/C4_1.mp3')
64+
def play_piano_C4_2():
65+
playsound('music/piano/C4_2.mp3')
66+
def play_piano_C4_3():
67+
playsound('music/piano/C4_3.mp3')
68+
def play_piano_C4_4():
69+
playsound('music/piano/C4_4.mp3')
70+
def play_piano_C4_5():
71+
playsound('music/piano/C4_5.mp3')
72+
def play_piano_C4_6():
73+
playsound('music/piano/C4_6.mp3')
74+
def play_piano_C4_7():
75+
playsound('music/piano/C4_7.mp3')
76+
77+
def play_drum():
78+
playsound('music/drum_front.mp3')
79+
80+
def play_guitar():
81+
playsound('music/guitar.mp3')
82+
83+
# Import Openpose (Windows/Ubuntu/OSX)
84+
dir_path = os.path.dirname(os.path.realpath(__file__))
85+
try:
86+
# Windows Import
87+
if platform == "win32":
88+
# Change these variables to point to the correct folder (Release/x64 etc.)
89+
sys.path.append(dir_path + '/../../python/openpose/Release');
90+
os.environ['PATH'] = os.environ['PATH'] + ';' + dir_path + '/../../x64/Release;' + dir_path + '/../../bin;'
91+
import pyopenpose as op
92+
else:
93+
# Change these variables to point to the correct folder (Release/x64 etc.)
94+
sys.path.append('../../python');
95+
# If you run `make install` (default path is `/usr/local/python` for Ubuntu), you can also access the OpenPose/python module from there. This will install OpenPose and the python library at your desired installation path. Ensure that this is in your python path in order to use it.
96+
# sys.path.append('/usr/local/python')
97+
from openpose import pyopenpose as op
98+
except ImportError as e:
99+
print('Error: OpenPose library could not be found. Did you enable `BUILD_PYTHON` in CMake and have this Python script in the right folder?')
100+
raise e
101+
102+
# Drive Kinect
103+
openni2.initialize()
104+
dev = openni2.Device.open_any()
105+
print('opening kinect', dev.get_device_info())
106+
107+
depth_stream = dev.create_depth_stream()
108+
color_stream = dev.create_color_stream()
109+
depth_stream.start()
110+
color_stream.start()
111+
112+
# Custom Params (refer to include/openpose/flags.hpp for more parameters)
113+
params = dict()
114+
params["model_folder"] = "../../../models/"
115+
params["hand"] = True
116+
params["disable_multi_thread"] = True
117+
params["number_people_max"] = 1
118+
params["hand_detector"] = 0
119+
params["body"] = 1
120+
121+
# Starting OpenPose
122+
opWrapper = op.WrapperPython()
123+
opWrapper.configure(params)
124+
opWrapper.start()
125+
datum = op.Datum()
126+
127+
# Make sure to connect to ARC
128+
ftdi_pybind.init()
129+
ftdi_pybind.send_data("vlsi", 4)
130+
count = 0
131+
132+
# -----------------------camera-------------------------------
133+
while True:
134+
# rgb map
135+
cframe = color_stream.read_frame()
136+
cframe_data = np.array(cframe.get_buffer_as_triplet()).reshape([480, 640, 3])
137+
cframe_data = cv2.cvtColor(cframe_data, cv2.COLOR_BGR2RGB)
138+
139+
# read hand rectangle locations
140+
handRectangles = [
141+
# Left/Right hands person 0
142+
[
143+
op.Rectangle(320.035889, 377.675049, 69.300949, 69.300949),
144+
op.Rectangle(0., 0., 0., 0.),
145+
],
146+
# Left/Right hands person 1
147+
[
148+
op.Rectangle(80.155792, 407.673492, 80.812706, 80.812706),
149+
op.Rectangle(46.449715, 404.559753, 98.898178, 98.898178),
150+
],
151+
# Left/Right hands person 2
152+
[
153+
op.Rectangle(185.692673, 303.112244, 157.587555, 157.587555),
154+
op.Rectangle(88.984360, 268.866547, 117.818230, 117.818230),
155+
]
156+
]
157+
158+
datum.cvInputData = cframe_data
159+
datum.handRectangles = handRectangles
160+
# Process and display image
161+
opWrapper.emplaceAndPop([datum])
162+
output_img = datum.cvOutputData
163+
164+
# Draw line on the window to help view clear
165+
piano_dis = 19
166+
vertical_line_color_320 = (0, 255, 0)
167+
cv2.line(output_img, (320, 0), (320, 640), vertical_line_color_320, 3)
168+
for i in range(1, 15):
169+
if i % 7 == 0:
170+
vertical_line_color = (0, 0, 225)
171+
else:
172+
vertical_line_color = (0, 0, 0)
173+
cv2.line(output_img, (320 - i * piano_dis, 0), (320 - i * piano_dis, 640), vertical_line_color, 3)
174+
cv2.line(output_img, (320 + i * piano_dis, 0), (320 + i * piano_dis, 640), vertical_line_color, 3)
175+
176+
cv2.imshow("OpenPose 1.4.0 - Tutorial Python API", output_img)
177+
key = cv2.waitKey(1)
178+
179+
# Start to recognize keypoints
180+
if datum.handKeypoints[1].shape == ():
181+
print("Cannot find people")
182+
elif datum.poseKeypoints[0][0][0] == 0 and datum.poseKeypoints[0][0][1] == 0 and datum.poseKeypoints[0][0][2] == 0:
183+
print("Cannot find body")
184+
elif datum.handKeypoints[0][0][0][0] == 0 and datum.handKeypoints[0][0][0][1] == 0 and datum.handKeypoints[0][0][0][2] == 0:
185+
print("Cannot find left hand")
186+
elif datum.handKeypoints[1][0][0][0] == 0 and datum.handKeypoints[1][0][0][1] == 0 and datum.handKeypoints[1][0][0][2] == 0:
187+
print("Cannot find right hand")
188+
else:
189+
# Send data to ARC
190+
output_data_list = [0 for n in range(168)]
191+
for i in range(0, 21):
192+
x = int(datum.handKeypoints[0][0][i][0])
193+
y = int(datum.handKeypoints[0][0][i][1])
194+
output_data_list[i*4] = x // 256
195+
output_data_list[i*4+1] = x % 256
196+
output_data_list[i*4+2] = y // 256
197+
output_data_list[i*4+3] = y % 256
198+
199+
for i in range(0, 21):
200+
x = int(datum.handKeypoints[1][0][i][0])
201+
y = int(datum.handKeypoints[1][0][i][1])
202+
output_data_list[i*4+84] = x // 256
203+
output_data_list[i*4+85] = x % 256
204+
output_data_list[i*4+86] = y // 256
205+
output_data_list[i*4+87] = y % 256
206+
207+
count = count + 1
208+
# print(count)
209+
210+
# Communicate with ARC
211+
ftdi_pybind.send_data("vlsi", 4)
212+
ftdi_pybind.send_point_data(ftdi_pybind.list_foo(output_data_list))
213+
ftdi_pybind.send_data("OK", 2)
214+
215+
# Read data from ARC
216+
result = ftdi_pybind.read_data()
217+
result2 = result & 0xffffffff
218+
# Ex: result2 = 3221225472: 2 / 1073741824: 0
219+
result2_bit_reverse = '{:032b}'.format(result2)
220+
# Ex: result2_bit_reverse = 1100 0000 0000 0000 0000 0000 0000 0000
221+
result2_bit = result2_bit_reverse[::-1]
222+
# Ex: result2_bit = 0000 0000 0000 0000 0000 0000 0000 0011
223+
print("result:", result2)
224+
225+
# Piano
226+
if result2_bit[31] == str(0) and result2_bit[30] == str(1):
227+
if result2_bit[27] == str(1):
228+
T = Thread(target=play_piano_C4_7)
229+
T.start()
230+
if result2_bit[26] == str(1):
231+
T = Thread(target=play_piano_C4_6)
232+
T.start()
233+
if result2_bit[25] == str(1):
234+
T = Thread(target=play_piano_C4_5)
235+
T.start()
236+
if result2_bit[24] == str(1):
237+
T = Thread(target=play_piano_C4_4)
238+
T.start()
239+
if result2_bit[23] == str(1):
240+
T = Thread(target=play_piano_C4_3)
241+
T.start()
242+
if result2_bit[22] == str(1):
243+
T = Thread(target=play_piano_C4_2)
244+
T.start()
245+
if result2_bit[21] == str(1):
246+
T = Thread(target=play_piano_C4_1)
247+
T.start()
248+
if result2_bit[20] == str(1):
249+
T = Thread(target=play_piano_C3_7)
250+
T.start()
251+
if result2_bit[19] == str(1):
252+
T = Thread(target=play_piano_C3_6)
253+
T.start()
254+
if result2_bit[18] == str(1):
255+
T = Thread(target=play_piano_C3_5)
256+
T.start()
257+
if result2_bit[17] == str(1):
258+
T = Thread(target=play_piano_C3_4)
259+
T.start()
260+
if result2_bit[16] == str(1):
261+
T = Thread(target=play_piano_C3_3)
262+
T.start()
263+
if result2_bit[15] == str(1):
264+
T = Thread(target=play_piano_C3_2)
265+
T.start()
266+
if result2_bit[14] == str(1):
267+
T = Thread(target=play_piano_C3_1)
268+
T.start()
269+
if result2_bit[13] == str(1):
270+
T = Thread(target=play_piano_C2_7)
271+
T.start()
272+
if result2_bit[12] == str(1):
273+
T = Thread(target=play_piano_C2_6)
274+
T.start()
275+
if result2_bit[11] == str(1):
276+
T = Thread(target=play_piano_C2_5)
277+
T.start()
278+
if result2_bit[10] == str(1):
279+
T = Thread(target=play_piano_C2_4)
280+
T.start()
281+
if result2_bit[9] == str(1):
282+
T = Thread(target=play_piano_C2_3)
283+
T.start()
284+
if result2_bit[8] == str(1):
285+
T = Thread(target=play_piano_C2_2)
286+
T.start()
287+
if result2_bit[7] == str(1):
288+
T = Thread(target=play_piano_C2_1)
289+
T.start()
290+
if result2_bit[6] == str(1):
291+
T = Thread(target=play_piano_C1_7)
292+
T.start()
293+
if result2_bit[5] == str(1):
294+
T = Thread(target=play_piano_C1_6)
295+
T.start()
296+
if result2_bit[4] == str(1):
297+
T = Thread(target=play_piano_C1_5)
298+
T.start()
299+
if result2_bit[3] == str(1):
300+
T = Thread(target=play_piano_C1_4)
301+
T.start()
302+
if result2_bit[2] == str(1):
303+
T = Thread(target=play_piano_C1_3)
304+
T.start()
305+
if result2_bit[1] == str(1):
306+
T = Thread(target=play_piano_C1_2)
307+
T.start()
308+
if result2_bit[0] == str(1):
309+
T = Thread(target=play_piano_C1_1)
310+
T.start()
311+
else:
312+
nonono = 0
313+
# Drum
314+
elif result2_bit[31] == str(1) and result2_bit[30] == str(0):
315+
if result2_bit[29] == str(1) or result2_bit[28] == str(1):
316+
T = Thread(target=play_drum)
317+
T.start()
318+
else:
319+
nonono = 0
320+
# Guitar
321+
elif result2_bit[31] == str(1) and result2_bit[30] == str(1):
322+
if result2_bit[28] == str(1):
323+
T = Thread(target=play_guitar)
324+
T.start()
325+
else:
326+
nonono = 0
327+
else:
328+
nonono = 0
329+
# close app
330+
color_stream.stop()
331+
dev.close()

0 commit comments

Comments
 (0)