Split data output into average and individual files for each biometric, moved gyro averaging into its own class
This commit is contained in:
parent
093e7a5006
commit
4dfd194483
|
@ -0,0 +1,21 @@
|
||||||
|
time,heartrate_2,heartrate_5,heartrate_10,heartrate_15,movement_10,movement_30,movement_60
|
||||||
|
2021-01-31 16:04:05.010676,nan,nan,nan,nan,48,48,48
|
||||||
|
2021-01-31 16:04:35.476006,79,79,79,79,16,16,17
|
||||||
|
2021-01-31 16:04:36.241533,79,79,79,79,16,16,17
|
||||||
|
2021-01-31 16:04:37.005988,79,79,79,79,16,16,17
|
||||||
|
2021-01-31 16:04:37.816041,nan,79,79,79,16,16,17
|
||||||
|
2021-01-31 16:04:38.625689,nan,79,79,79,16,16,18
|
||||||
|
2021-01-31 16:04:39.436024,nan,79,79,79,16,16,17
|
||||||
|
2021-01-31 16:04:40.246131,nan,nan,79,79,16,16,17
|
||||||
|
2021-01-31 16:04:41.011184,nan,nan,79,79,16,16,17
|
||||||
|
2021-01-31 16:04:41.821067,nan,nan,79,79,16,16,17
|
||||||
|
2021-01-31 16:04:42.631347,nan,nan,79,79,16,16,17
|
||||||
|
2021-01-31 16:04:43.441469,nan,nan,79,79,16,16,17
|
||||||
|
2021-01-31 16:04:44.251060,nan,nan,79,79,16,16,17
|
||||||
|
2021-01-31 16:04:45.016751,nan,nan,79,79,16,16,17
|
||||||
|
2021-01-31 16:04:45.825831,nan,nan,nan,79,16,16,17
|
||||||
|
2021-01-31 16:04:46.680977,nan,nan,nan,79,16,16,17
|
||||||
|
2021-01-31 16:04:47.446140,nan,nan,nan,79,15,16,17
|
||||||
|
2021-01-31 16:04:48.210846,nan,nan,nan,79,15,16,17
|
||||||
|
2021-01-31 16:04:49.021353,nan,nan,nan,79,15,16,17
|
||||||
|
2021-01-31 16:04:49.831218,nan,nan,nan,79,15,16,17
|
|
@ -0,0 +1 @@
|
||||||
|
time,heartrate_2,heartrate_5,heartrate_10,heartrate_15,movement_10,movement_30,movement_60
|
|
@ -114,8 +114,6 @@ def vibrate_random(duration):
|
||||||
band.vibrate(vibrate_ms)
|
band.vibrate(vibrate_ms)
|
||||||
time.sleep(vibro_delay)
|
time.sleep(vibro_delay)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def sleep_monitor_callback(data):
|
def sleep_monitor_callback(data):
|
||||||
tick_time = time.time()
|
tick_time = time.time()
|
||||||
if not sleepdata.last_tick_time:
|
if not sleepdata.last_tick_time:
|
||||||
|
@ -123,7 +121,7 @@ def sleep_monitor_callback(data):
|
||||||
process_data(data, tick_time)
|
process_data(data, tick_time)
|
||||||
average_data(tick_time)
|
average_data(tick_time)
|
||||||
|
|
||||||
def connect(mac_filename, auth_key_filename):
|
def connect():
|
||||||
global band
|
global band
|
||||||
success = False
|
success = False
|
||||||
timeout = 3
|
timeout = 3
|
||||||
|
@ -179,7 +177,7 @@ def vibrate_rolling():
|
||||||
band.vibrate(x)
|
band.vibrate(x)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
connect(mac_filename, auth_key_filename)
|
connect()
|
||||||
threading.Thread(target=start_data_pull).start()
|
threading.Thread(target=start_data_pull).start()
|
||||||
threading.Thread(target=timed_buzzing, args=([buzz_delay, 15])).start()
|
threading.Thread(target=timed_buzzing, args=([buzz_delay, 15])).start()
|
||||||
#sleepdata.init_graph()
|
#sleepdata.init_graph()
|
||||||
|
|
115
sleepdata.py
115
sleepdata.py
|
@ -1,14 +1,10 @@
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from os import path
|
from os import path
|
||||||
|
|
||||||
import csv
|
import csv
|
||||||
|
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
import matplotlib.animation as animation
|
import matplotlib.animation as animation
|
||||||
|
|
||||||
#Todo: separate graph animation from data averaging
|
|
||||||
#Todo: log raw data separately from average data
|
|
||||||
|
|
||||||
|
|
||||||
sleep_data = {
|
sleep_data = {
|
||||||
'heartrate': {
|
'heartrate': {
|
||||||
|
@ -34,14 +30,9 @@ last_heartrate = 0
|
||||||
last_tick_time = None
|
last_tick_time = None
|
||||||
tick_seconds = 0.5
|
tick_seconds = 0.5
|
||||||
|
|
||||||
csv_filename = "sleep_data.csv"
|
datestamp = datetime.now().strftime("%Y_%m_%d")
|
||||||
|
csv_header_name_format = '{}_{}'
|
||||||
fieldnames = ['time']
|
csv_filename_format = '{}_{}.csv'
|
||||||
for data_type in sleep_data:
|
|
||||||
periods = sleep_data[data_type]['periods']
|
|
||||||
for period in periods:
|
|
||||||
fieldnames.append(data_type + str(period))
|
|
||||||
|
|
||||||
|
|
||||||
plt.style.use('dark_background')
|
plt.style.use('dark_background')
|
||||||
graph_figure = plt.figure()
|
graph_figure = plt.figure()
|
||||||
|
@ -49,17 +40,52 @@ graph_axes = graph_figure.add_subplot(1, 1, 1)
|
||||||
graph_data = {}
|
graph_data = {}
|
||||||
|
|
||||||
|
|
||||||
def write_csv(data):
|
|
||||||
global fieldnames
|
class Average_Gyro_Data():
|
||||||
global csv_filename
|
gyro_last_x = 0
|
||||||
|
gyro_last_y = 0
|
||||||
|
gyro_last_z = 0
|
||||||
|
# Each gyro reading from miband4 comes over as a group of three,
|
||||||
|
# each containing x,y,z values. This function summarizes the
|
||||||
|
# values into a single consolidated movement value.
|
||||||
|
def process(self, gyro_data):
|
||||||
|
gyro_movement = 0
|
||||||
|
for gyro_datum in gyro_data:
|
||||||
|
gyro_delta_x = abs(gyro_datum['x'] - self.gyro_last_x)
|
||||||
|
self.gyro_last_x = gyro_datum['x']
|
||||||
|
gyro_delta_y = abs(gyro_datum['y'] - self.gyro_last_y)
|
||||||
|
self.gyro_last_y = gyro_datum['y']
|
||||||
|
gyro_delta_z = abs(gyro_datum['z'] - self.gyro_last_z)
|
||||||
|
self.gyro_last_z = gyro_datum['z']
|
||||||
|
gyro_delta_sum = gyro_delta_x + gyro_delta_y + gyro_delta_z
|
||||||
|
gyro_movement += gyro_delta_sum
|
||||||
|
return gyro_movement
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def write_csv(data, name):
|
||||||
|
fieldnames = ['time']
|
||||||
|
for fieldname in data[0]:
|
||||||
|
if fieldname != 'time':
|
||||||
|
fieldnames.append(fieldname)
|
||||||
|
if name == 'raw':
|
||||||
|
name = '{}_{}'.format(name, fieldname)
|
||||||
|
|
||||||
|
csv_filename = csv_filename_format.format(datestamp, name)
|
||||||
|
|
||||||
if not path.exists(csv_filename):
|
if not path.exists(csv_filename):
|
||||||
with open(csv_filename, 'w', newline='') as csvfile:
|
open_handle = 'w'
|
||||||
csv_writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
|
|
||||||
csv_writer.writeheader()
|
|
||||||
csv_writer.writerow(data)
|
|
||||||
else:
|
else:
|
||||||
with open(csv_filename, 'a', newline='') as csvfile:
|
open_handle = 'a'
|
||||||
|
|
||||||
|
with open(csv_filename, open_handle, newline='') as csvfile:
|
||||||
csv_writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
|
csv_writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
|
||||||
|
if open_handle == 'w':
|
||||||
|
csv_writer.writeheader()
|
||||||
|
if type(data) is list:
|
||||||
|
for row in data:
|
||||||
|
csv_writer.writerow(row)
|
||||||
|
else:
|
||||||
csv_writer.writerow(data)
|
csv_writer.writerow(data)
|
||||||
|
|
||||||
|
|
||||||
|
@ -69,13 +95,18 @@ def flush_old_raw_data(tick_time):
|
||||||
periods = s_data['periods']
|
periods = s_data['periods']
|
||||||
|
|
||||||
cleaned_raw_data = []
|
cleaned_raw_data = []
|
||||||
|
old_raw_data = []
|
||||||
|
|
||||||
for raw_datum in s_data['raw_data']:
|
for raw_datum in s_data['raw_data']:
|
||||||
datum_age = tick_time - raw_datum['time']
|
datum_age = tick_time - raw_datum['time']
|
||||||
if datum_age < max(periods):
|
if datum_age < max(periods):
|
||||||
cleaned_raw_data.append(raw_datum)
|
cleaned_raw_data.append(raw_datum)
|
||||||
|
else:
|
||||||
|
old_raw_data.append(raw_datum)
|
||||||
|
|
||||||
s_data['raw_data'] = cleaned_raw_data
|
s_data['raw_data'] = cleaned_raw_data
|
||||||
|
if old_raw_data:
|
||||||
|
write_csv(old_raw_data, 'raw')
|
||||||
|
|
||||||
def average_raw_data(tick_time):
|
def average_raw_data(tick_time):
|
||||||
global last_heartrate
|
global last_heartrate
|
||||||
|
@ -101,8 +132,6 @@ def average_raw_data(tick_time):
|
||||||
if len(period_data) > 0:
|
if len(period_data) > 0:
|
||||||
period_data_average = sum(period_data) / len(period_data)
|
period_data_average = sum(period_data) / len(period_data)
|
||||||
else:
|
else:
|
||||||
print("({}) Period data empty: {}".format(data_type,
|
|
||||||
period_seconds))
|
|
||||||
if data_type == "heartrate" and period_seconds == min(periods):
|
if data_type == "heartrate" and period_seconds == min(periods):
|
||||||
period_data_average = last_heartrate
|
period_data_average = last_heartrate
|
||||||
else:
|
else:
|
||||||
|
@ -110,38 +139,19 @@ def average_raw_data(tick_time):
|
||||||
|
|
||||||
period_averages_dict[period_seconds] = zero_to_nan(period_data_average)
|
period_averages_dict[period_seconds] = zero_to_nan(period_data_average)
|
||||||
|
|
||||||
csv_out[data_type + str(period_seconds)] = zero_to_nan(period_data_average)
|
csv_header_field_name = csv_header_name_format.format(data_type, period_seconds)
|
||||||
|
csv_out[csv_header_field_name] = zero_to_nan(period_data_average)
|
||||||
|
|
||||||
s_data['averaged_data'].append(period_averages_dict)
|
s_data['averaged_data'].append(period_averages_dict)
|
||||||
write_csv(csv_out)
|
|
||||||
|
write_csv([csv_out], 'avg')
|
||||||
|
|
||||||
|
|
||||||
def process_gyro_data(gyro_data, tick_time):
|
def process_gyro_data(gyro_data, tick_time):
|
||||||
# Each gyro reading from miband4 comes over as a group of three,
|
|
||||||
# each containing x,y,z values. This function summarizes the
|
|
||||||
# values into a single consolidated movement value.
|
|
||||||
|
|
||||||
sleep_move = sleep_data['movement']
|
sleep_move = sleep_data['movement']
|
||||||
sleep_workspace = sleep_move['workspace']
|
|
||||||
|
|
||||||
gyro_last_x = sleep_workspace['gyro_last_x']
|
|
||||||
gyro_last_y = sleep_workspace['gyro_last_y']
|
|
||||||
gyro_last_z = sleep_workspace['gyro_last_z']
|
|
||||||
value_name = sleep_move['value_name']
|
value_name = sleep_move['value_name']
|
||||||
gyro_movement = 0
|
gyro_movement = average_gyro_data.process(gyro_data)
|
||||||
for gyro_datum in gyro_data:
|
print("Gyro: {}".format(gyro_movement))
|
||||||
gyro_delta_x = abs(gyro_datum['x'] - gyro_last_x)
|
|
||||||
gyro_last_x = gyro_datum['x']
|
|
||||||
gyro_delta_y = abs(gyro_datum['y'] - gyro_last_y)
|
|
||||||
gyro_last_y = gyro_datum['y']
|
|
||||||
gyro_delta_z = abs(gyro_datum['z'] - gyro_last_z)
|
|
||||||
gyro_last_z = gyro_datum['z']
|
|
||||||
gyro_delta_sum = gyro_delta_x + gyro_delta_y + gyro_delta_z
|
|
||||||
gyro_movement += gyro_delta_sum
|
|
||||||
|
|
||||||
sleep_workspace['gyro_last_x'] = gyro_last_x
|
|
||||||
sleep_workspace['gyro_last_y'] = gyro_last_y
|
|
||||||
sleep_workspace['gyro_last_z'] = gyro_last_z
|
|
||||||
|
|
||||||
sleep_move['raw_data'].append({
|
sleep_move['raw_data'].append({
|
||||||
'time': tick_time,
|
'time': tick_time,
|
||||||
value_name: gyro_movement
|
value_name: gyro_movement
|
||||||
|
@ -157,11 +167,13 @@ def process_heartrate_data(heartrate_data, tick_time):
|
||||||
value_name: heartrate_data
|
value_name: heartrate_data
|
||||||
} )
|
} )
|
||||||
|
|
||||||
|
|
||||||
def zero_to_nan(value):
|
def zero_to_nan(value):
|
||||||
if value == 0:
|
if value == 0:
|
||||||
return (float('nan'))
|
return (float('nan'))
|
||||||
return int(value)
|
return int(value)
|
||||||
|
|
||||||
|
|
||||||
def update_graph_data():
|
def update_graph_data():
|
||||||
for data_type in sleep_data:
|
for data_type in sleep_data:
|
||||||
s_data = sleep_data[data_type] # Re-referenced to shorten name
|
s_data = sleep_data[data_type] # Re-referenced to shorten name
|
||||||
|
@ -175,7 +187,6 @@ def update_graph_data():
|
||||||
starting_index = max([(len(g_data['time']) - 1), 0])
|
starting_index = max([(len(g_data['time']) - 1), 0])
|
||||||
ending_index = len(avg_data) - 1
|
ending_index = len(avg_data) - 1
|
||||||
|
|
||||||
# Re-referenced to shorten name
|
|
||||||
sleep_data_range = avg_data[starting_index:ending_index]
|
sleep_data_range = avg_data[starting_index:ending_index]
|
||||||
|
|
||||||
for sleep_datum in sleep_data_range:
|
for sleep_datum in sleep_data_range:
|
||||||
|
@ -184,6 +195,7 @@ def update_graph_data():
|
||||||
if g_data['data'][period] != 'nan':
|
if g_data['data'][period] != 'nan':
|
||||||
g_data['data'][period].append(sleep_datum[period])
|
g_data['data'][period].append(sleep_datum[period])
|
||||||
|
|
||||||
|
|
||||||
def init_graph_data():
|
def init_graph_data():
|
||||||
for data_type in sleep_data:
|
for data_type in sleep_data:
|
||||||
data_periods = sleep_data[data_type]['periods']
|
data_periods = sleep_data[data_type]['periods']
|
||||||
|
@ -225,6 +237,11 @@ def graph_animation(i):
|
||||||
if plotflag:
|
if plotflag:
|
||||||
plt.legend()
|
plt.legend()
|
||||||
|
|
||||||
|
|
||||||
def init_graph():
|
def init_graph():
|
||||||
ani = animation.FuncAnimation(graph_figure, graph_animation, interval=1000)
|
ani = animation.FuncAnimation(graph_figure, graph_animation, interval=1000)
|
||||||
plt.show()
|
plt.show()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == 'sleepdata':
|
||||||
|
average_gyro_data = Average_Gyro_Data()
|
Loading…
Reference in New Issue