-
Notifications
You must be signed in to change notification settings - Fork 0
/
server.py
119 lines (81 loc) · 3.17 KB
/
server.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
# server.py
import eventlet
eventlet.monkey_patch()
from user import User
from gaze import gazeDetect
from flask import Flask, request
from flask_socketio import SocketIO, emit
from flask_cors import CORS
from posture import doOneFrame
import numpy as np
import cv2
import base64
import cv2
import time
import math as m
import mediapipe as mp
import numpy as np
app = Flask(__name__)
clients = {}
refreshTimer = 0
CORS(app)
socketio = SocketIO(app, cors_allowed_origins="*")
@socketio.on('connect')
def handle_connect():
print('Client connected')
clients[request.sid] = User(0,0, 0)
@socketio.on('handleDisc')
def handleDisc():
if(clients.get(request.sid) != None):
user_data = clients[request.sid].to_dict()
emit("custom", user_data, room=request.sid)
clients.pop(request.sid)
@socketio.on('disconnect')
def handle_disconnect():
print('Client disconnected')
if(clients.get(request.sid) != None):
print(clients[request.sid].total_ticks)
@socketio.on('videoData')
def handle_video(data):
posture_text = "Good Posture"
gaze_text = "Looking At Screen"
if(clients.get(request.sid) != None):
currentClient = clients[request.sid]
currentClient.increaseTimer()
# Decode the incoming video data
nparr = np.frombuffer(data, np.uint8)
frame = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
if frame is not None:
currentPosture = doOneFrame(frame)
currentGaze = gazeDetect(frame)
if(currentPosture):
currentClient.increaseGoodTick()
if(currentPosture == False):
currentClient.increaseBadPostureTick()
posture_text = "Bad Posture"
if(currentGaze == True):
currentClient.increaseGoodGazeTick()
if(currentGaze == False):
gaze_text = "Not Looking At Screen"
print(currentGaze)
print(currentPosture)
print(currentClient.timer_())
currentClient.increaseTick()
if(currentClient.checkTimer()):
emit('response_posture', posture_text, room=request.sid)
emit('response_gaze', gaze_text, room = request.sid)
currentClient.resetTimer()
if(currentClient.postureBadForLongTime()):
emit('notify_posture', "You've Had Bad Posture For A While!", room=request.sid)
currentClient.resetBadTicks()
print(currentClient.good_ticks/currentClient.total_ticks);
# faces = face_cascade.detectMultiScale(gray, 1.2, 4)
# for x, y, w, h in faces:
# cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 0, 0), 2)
cv2.imshow('Video', frame)
cv2.waitKey(1)
# emit('response', len(faces), room=request.sid)
# Display the frame for 1 ms and continue
if __name__ == '__main__':
socketio.run(app, host='127.0.0.1', port=5000)
cv2.destroyAllWindows()