Merge pull request #1 from rmackinnon/master

Code review and PEP8 clean-ups
This commit is contained in:
NateSchoolfield 2021-01-26 23:32:18 -08:00 committed by GitHub
commit c23c3a4c81
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 103 additions and 70 deletions

View File

@ -51,7 +51,7 @@ last_tick_time = None
tick_seconds = 0.5 tick_seconds = 0.5
fieldnames = ['time'] fieldnames = ['time']
for data_type, _ in sleep_data.items(): for data_type in sleep_data:
periods = sleep_data[data_type]['periods'] periods = sleep_data[data_type]['periods']
for period in periods: for period in periods:
fieldnames.append(data_type + str(period)) fieldnames.append(data_type + str(period))
@ -78,14 +78,15 @@ def get_mac_address(filename):
mac_regex_pattern = re.compile(r'([0-9a-fA-F]{2}(?::[0-9a-fA-F]{2}){5})') mac_regex_pattern = re.compile(r'([0-9a-fA-F]{2}(?::[0-9a-fA-F]{2}){5})')
try: try:
with open(filename, "r") as f: with open(filename, "r") as f:
regex_match_from_file = re.search(mac_regex_pattern, f.read().strip()) hwaddr_search = re.search(mac_regex_pattern, f.read().strip())
if regex_match_from_file:
MAC_ADDR = regex_match_from_file[0] if hwaddr_search:
MAC_ADDR = hwaddr_search[0]
else: else:
print ("No valid MAC address found in " + str(filename)) print ("No valid MAC address found in {}".format(filename))
exit(1) exit(1)
except FileNotFoundError: except FileNotFoundError:
print ("MAC file not found: " + filename) print ("MAC file not found: {}".format(filename))
exit(1) exit(1)
return MAC_ADDR return MAC_ADDR
@ -94,14 +95,14 @@ def get_auth_key(filename):
authkey_regex_pattern = re.compile(r'([0-9a-fA-F]){32}') authkey_regex_pattern = re.compile(r'([0-9a-fA-F]){32}')
try: try:
with open(filename, "r") as f: with open(filename, "r") as f:
regex_match_from_file = re.search(authkey_regex_pattern, f.read().strip()) key_search = re.search(authkey_regex_pattern, f.read().strip())
if regex_match_from_file: if key_search:
AUTH_KEY = bytes.fromhex(regex_match_from_file[0]) AUTH_KEY = bytes.fromhex(key_search[0])
else: else:
print ("No valid auth key found in " + str(filename)) print ("No valid auth key found in {}".format(filename))
exit(1) exit(1)
except FileNotFoundError: except FileNotFoundError:
print ("Auth key file not found: " + filename) print ("Auth key file not found: {}".format(filename))
exit(1) exit(1)
return AUTH_KEY return AUTH_KEY
@ -110,19 +111,26 @@ def process_heartrate_data(heartrate_data, tick_time):
print("BPM: " + str(heartrate_data)) print("BPM: " + str(heartrate_data))
if heartrate_data > 0: if heartrate_data > 0:
value_name = sleep_data['heartrate']['value_name'] value_name = sleep_data['heartrate']['value_name']
sleep_data['heartrate']['raw_data'].append({ 'time': tick_time, value_name: heartrate_data } ) sleep_data['heartrate']['raw_data'].append({
'time': tick_time,
value_name: heartrate_data
} )
def process_gyro_data(gyro_data, tick_time): def process_gyro_data(gyro_data, tick_time):
# Each gyro reading from miband4 comes over as a group of three, each containing x,y,z values # Each gyro reading from miband4 comes over as a group of three,
# This function summarizes the values into a single consolidated movement value # each containing x,y,z values. This function summarizes the
# values into a single consolidated movement value.
global sleep_data global sleep_data
gyro_last_x = sleep_data['movement']['workspace']['gyro_last_x'] sleep_move = sleep_data['movement']
gyro_last_y = sleep_data['movement']['workspace']['gyro_last_y'] sleep_workspace = sleep_move['workspace']
gyro_last_z = sleep_data['movement']['workspace']['gyro_last_z']
value_name = sleep_data['movement']['value_name'] gyro_last_x = sleep_workspace['gyro_last_x']
gyro_last_y = sleep_workspace['gyro_last_y']
gyro_last_z = sleep_workspace['gyro_last_z']
value_name = sleep_move['value_name']
gyro_movement = 0 gyro_movement = 0
for gyro_datum in gyro_data: for gyro_datum in gyro_data:
gyro_delta_x = abs(gyro_datum['x'] - gyro_last_x) gyro_delta_x = abs(gyro_datum['x'] - gyro_last_x)
@ -134,27 +142,31 @@ def process_gyro_data(gyro_data, tick_time):
gyro_delta_sum = gyro_delta_x + gyro_delta_y + gyro_delta_z gyro_delta_sum = gyro_delta_x + gyro_delta_y + gyro_delta_z
gyro_movement += gyro_delta_sum gyro_movement += gyro_delta_sum
sleep_data['movement']['workspace']['gyro_last_x'] = gyro_last_x sleep_workspace['gyro_last_x'] = gyro_last_x
sleep_data['movement']['workspace']['gyro_last_y'] = gyro_last_y sleep_workspace['gyro_last_y'] = gyro_last_y
sleep_data['movement']['workspace']['gyro_last_z'] = gyro_last_z sleep_workspace['gyro_last_z'] = gyro_last_z
sleep_data['movement']['raw_data'].append({ 'time': tick_time, value_name: gyro_movement } ) sleep_move['raw_data'].append({
'time': tick_time,
value_name: gyro_movement
})
def flush_old_raw_data(tick_time): def flush_old_raw_data(tick_time):
global sleep_data global sleep_data
for data_type, _ in sleep_data.items(): for data_type in sleep_data:
periods = sleep_data[data_type]['periods'] s_data = sleep_data[data_type]
periods = s_datum['periods']
cleaned_raw_data = [] cleaned_raw_data = []
for raw_datum in sleep_data[data_type]['raw_data']: for raw_datum in s_data['raw_data']:
datum_age = tick_time - raw_datum['time'] datum_age = tick_time - raw_datum['time']
if datum_age < max(periods): if datum_age < max(periods):
cleaned_raw_data.append(raw_datum) cleaned_raw_data.append(raw_datum)
sleep_data[data_type]['raw_data'] = cleaned_raw_data s_data['raw_data'] = cleaned_raw_data
def average_raw_data(tick_time): def average_raw_data(tick_time):
@ -163,18 +175,18 @@ def average_raw_data(tick_time):
timestamp = datetime.fromtimestamp(tick_time) timestamp = datetime.fromtimestamp(tick_time)
csv_out = {'time': timestamp } csv_out = {'time': timestamp }
for data_type, _ in sleep_data.items(): for data_type in sleep_data:
period_averages_dict = {} s_data = sleep_data[data_type]
period_averages_dict['time'] = timestamp period_averages_dict = {'time': timestamp}
periods = sleep_data[data_type]['periods'] periods = s_data['periods']
value_name = sleep_data[data_type]['value_name'] value_name = s_data['value_name']
flush_old_raw_data(tick_time) flush_old_raw_data(tick_time)
for period_seconds in periods: for period_seconds in periods:
period_data = [] period_data = []
period_averages_dict[period_seconds] = 0 period_averages_dict[period_seconds] = 0
for raw_datum in sleep_data[data_type]['raw_data']: for raw_datum in s_datum['raw_data']:
datum_age_seconds = tick_time - raw_datum['time'] datum_age_seconds = tick_time - raw_datum['time']
if datum_age_seconds < period_seconds: if datum_age_seconds < period_seconds:
period_data.append(raw_datum[value_name]) period_data.append(raw_datum[value_name])
@ -182,21 +194,21 @@ def average_raw_data(tick_time):
if len(period_data) > 0: if len(period_data) > 0:
period_data_average = sum(period_data) / len(period_data) period_data_average = sum(period_data) / len(period_data)
else: else:
print ("(" + data_type + ") Period data empty: " + str(period_seconds)) print("({}) Period data empty: {}".format(data_type,
period_seconds))
period_data_average = 0 period_data_average = 0
period_averages_dict[period_seconds] = zero_to_nan(period_data_average) period_averages_dict[period_seconds] = zero_to_nan(period_data_average)
csv_out[data_type + str(period_seconds)] = zero_to_nan(period_data_average) csv_out[data_type + str(period_seconds)] = zero_to_nan(period_data_average)
sleep_data[data_type]['averaged_data'].append(period_averages_dict) s_data['averaged_data'].append(period_averages_dict)
write_csv(csv_out) write_csv(csv_out)
def zero_to_nan(value): def zero_to_nan(value):
if value == 0: if value == 0:
return (float('nan')) return (float('nan'))
else:
return int(value) return int(value)
@ -210,8 +222,7 @@ def sleep_monitor_callback(data):
if data[0] == "GYRO": if data[0] == "GYRO":
process_gyro_data(data[1], tick_time) process_gyro_data(data[1], tick_time)
elif data[0] == "HR":
if data[0] == "HR":
process_heartrate_data(data[1], tick_time) process_heartrate_data(data[1], tick_time)
if (tick_time - last_tick_time) >= tick_seconds: if (tick_time - last_tick_time) >= tick_seconds:
@ -220,7 +231,7 @@ def sleep_monitor_callback(data):
def init_graph_data(): def init_graph_data():
for data_type, _ in sleep_data.items(): for data_type in sleep_data:
data_periods = sleep_data[data_type]['periods'] data_periods = sleep_data[data_type]['periods']
graph_data[data_type] = { graph_data[data_type] = {
'time': [], 'time': [],
@ -229,23 +240,31 @@ def init_graph_data():
for period in data_periods: for period in data_periods:
graph_data[data_type]['data'][period] = [] graph_data[data_type]['data'][period] = []
def update_graph_data(): def update_graph_data():
global sleep_data global sleep_data
global graph_data global graph_data
for data_type, _ in sleep_data.items(): for data_type in sleep_data:
if len(sleep_data[data_type]['averaged_data']) > 1: s_data = sleep_data[data_type] # Re-referenced to shorten name
avg_data = s_data['averaged_data']
data_periods = sleep_data[data_type]['periods'] if len(avg_data) > 1:
starting_index = max([(len(graph_data[data_type]['time']) - 1), 0]) g_data = graph_data[data_type] # Re-referenced to short name
ending_index = len(sleep_data[data_type]['averaged_data']) - 1 data_periods = s_data['periods']
for sleep_datum in sleep_data[data_type]['averaged_data'][starting_index:ending_index]: starting_index = max([(len(g_data['time']) - 1), 0])
graph_data[data_type]['time'].append(sleep_datum['time']) ending_index = len(avg_data) - 1
# Re-referenced to shorten name
sleep_data_range = avg_data[starting_index:ending_index]
for sleep_datum in sleep_data_range:
g_data['time'].append(sleep_datum['time'])
for period in data_periods: for period in data_periods:
if graph_data[data_type]['data'][period] != 'nan': if g_data['data'][period] != 'nan':
graph_data[data_type]['data'][period].append(sleep_datum[period]) g_data['data'][period].append(sleep_datum[period])
def graph_animation(i): def graph_animation(i):
@ -259,28 +278,35 @@ def graph_animation(i):
update_graph_data() update_graph_data()
for data_type, _ in graph_data.items(): for data_type in graph_data:
if len(graph_data[data_type]['time']) > 0: if len(graph_data[data_type]['time']) > 0:
graph_axes.clear() graph_axes.clear()
break break
for data_type, _ in sleep_data.items(): for data_type in sleep_data:
if len(graph_data[data_type]['time']) > 0: s_data = sleep_data[data_type]
g_data = graph_data[data_type]
if len(g_datum['time']) > 0:
plotflag = True plotflag = True
data_periods = sleep_data[data_type]['periods'] data_periods = sleep_data[data_type]['periods']
for period in data_periods: for period in data_periods:
axis_label = sleep_data[data_type]['value_name'] + " " + str(period) + "sec" axis_label = "{} {} sec".format(s_data['value_name'], period)
graph_axes.plot(graph_data[data_type]['time'], graph_data[data_type]['data'][period], label=axis_label) graph_axes.plot(g_data['time'],
g_data['data'][period],
label=axis_label)
if plotflag: if plotflag:
plt.legend() plt.legend()
def connect(): def connect():
global band global band
global mac_filename global mac_filename
global auth_key_filename global auth_key_filename
success = False success = False
timeout = 3
msg = 'Connection to the MIBand failed. Trying again in {} seconds'
MAC_ADDR = get_mac_address(mac_filename) MAC_ADDR = get_mac_address(mac_filename)
AUTH_KEY = get_auth_key(auth_key_filename) AUTH_KEY = get_auth_key(auth_key_filename)
@ -289,15 +315,14 @@ def connect():
try: try:
band = miband(MAC_ADDR, AUTH_KEY, debug=True) band = miband(MAC_ADDR, AUTH_KEY, debug=True)
success = band.initialize() success = band.initialize()
break
except BTLEDisconnectError: except BTLEDisconnectError:
print('Connection to the MIBand failed. Trying out again in 3 seconds') print(msg.format(timeout))
time.sleep(3) time.sleep(timeout)
continue
except KeyboardInterrupt: except KeyboardInterrupt:
print("\nExit.") print("\nExit.")
exit() exit()
def start_data_pull(): def start_data_pull():
global band global band

View File

@ -1,13 +1,20 @@
import sys, os, time import sys, os, time
import logging import logging
from bluepy.btle import Peripheral, DefaultDelegate, ADDR_TYPE_RANDOM,ADDR_TYPE_PUBLIC, BTLEException, BTLEDisconnectError
from constants import UUIDS, AUTH_STATES, ALERT_TYPES, QUEUE_TYPES, MUSICSTATE
import struct import struct
from bluepy.btle import (
Peripheral, DefaultDelegate,
ADDR_TYPE_RANDOM, ADDR_TYPE_PUBLIC,
BTLEException, BTLEDisconnectError
)
from datetime import datetime, timedelta from datetime import datetime, timedelta
from Crypto.Cipher import AES from Crypto.Cipher import AES
from datetime import datetime from datetime import datetime
from constants import (
UUIDS, AUTH_STATES, ALERT_TYPES, QUEUE_TYPES, MUSICSTATE
)
try: try:
from Queue import Queue, Empty from Queue import Queue, Empty
except ImportError: except ImportError:
@ -96,6 +103,7 @@ class Delegate(DefaultDelegate):
else: else:
print ("Unhandled handle: " + str(hnd) + " | Data: " + str(data)) print ("Unhandled handle: " + str(hnd) + " | Data: " + str(data))
class miband(Peripheral): class miband(Peripheral):
_send_rnd_cmd = struct.pack('<2s', b'\x02\x00') _send_rnd_cmd = struct.pack('<2s', b'\x02\x00')
_send_enc_key = struct.pack('<2s', b'\x03\x00') _send_enc_key = struct.pack('<2s', b'\x03\x00')