-
Notifications
You must be signed in to change notification settings - Fork 0
/
demo.py
66 lines (52 loc) · 1.89 KB
/
demo.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
from __future__ import print_function
import json as js
import numpy as np
import time
import os
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
import matplotlib.pyplot as plt
from keras.models import load_model
def convertToNp(json,id):
json = js.loads(json)
data = json["data"]
output = np.zeros((int(len(data)/4), 4, 4))
for time_span in range(len(output)):
for time in range(len(output[time_span])):
object = data[time + time_span * 4]
output[time_span][time] = id, object["x"], object["y"], object["z"]
return output
andyJson = open("andy1.json", "r").read()
jamesJson = open("james1.json", "r").read()
andyJson = convertToNp(andyJson, 1)
jamesJson = convertToNp(jamesJson, 0)
usingJames = True
if usingJames:
x_train_pre = np.zeros((len(jamesJson), 4, 4))
for i in range(len(jamesJson)):
x_train_pre[i] = jamesJson[i]
# for i in range(len(jamesData)):
# x_train_pre[i + len(andyData)] = jamesData[i]
x_train = np.zeros((len(jamesJson), 4, 3))
for i in range(len(x_train_pre)):
for k in range(4):
x_train[i][k] = x_train_pre[i][k][1], x_train_pre[i][k][2], x_train_pre[i][k][3]
else:
x_train_pre = np.zeros((len(andyJson), 4, 4))
for i in range(len(andyJson)):
x_train_pre[i] = andyJson[i]
# for i in range(len(jamesData)):
# x_train_pre[i + len(andyData)] = jamesData[i]
x_train = np.zeros((len(andyJson), 4, 3))
for i in range(len(x_train_pre)):
for k in range(4):
x_train[i][k] = x_train_pre[i][k][1],x_train_pre[i][k][2],x_train_pre[i][k][3]
model = load_model("Model-88.hf")
new_sample = x_train[3].reshape(1,4,3)
if(model.predict(new_sample)[0][0] < 0.50):
print("Andy was walking!")
else:
print("Your phone has been stolen!")
# Andy [[0.25224862 0.7477514 ]]
# James [[0.7459176 0.2540824]]