Skip to content
Snippets Groups Projects
Commit 40e01903 authored by Raymond Chia's avatar Raymond Chia
Browse files

sync bvp and load events

parent c3e27328
Branches
No related merge requests found
File added
File added
File added
File added
No preview for this file type
...@@ -156,7 +156,7 @@ def get_file_list(starts_with:str, sbj=None): ...@@ -156,7 +156,7 @@ def get_file_list(starts_with:str, sbj=None):
if sbj is not None: if sbj is not None:
f_glob = path_join(DATA_DIR, sbj, '**') f_glob = path_join(DATA_DIR, sbj, '**')
else: else:
f_glob = path_join(DATA_DIR, 'S*', '**') f_glob = path_join(DATA_DIR, 'Pilot*', '**')
if starts_with is not None: if starts_with is not None:
f_glob = path_join(f_glob, f'{starts_with}*') f_glob = path_join(f_glob, f'{starts_with}*')
......
...@@ -178,15 +178,32 @@ def load_e4_file(e4_file:str): ...@@ -178,15 +178,32 @@ def load_e4_file(e4_file:str):
t0_datetime = datetime.utcfromtimestamp(t0) t0_datetime = datetime.utcfromtimestamp(t0)
t0_local = datetime_from_utc_to_local(t0_datetime) t0_local = datetime_from_utc_to_local(t0_datetime)
ipdb.set_trace()
time = [t0_local.timestamp() + ind*(1/fs) for ind in time = [t0_local.timestamp() + ind*(1/fs) for ind in
range(nsamples)] range(nsamples)]
tmp = [np.nan, np.nan] tmp = [np.nan, np.nan]
time = tmp + time time = tmp + time
bvp.rename(columns={0: "bvp"}, inplace=True)
bvp['sec'] = np.array(time) bvp['sec'] = np.array(time)
return bvp head = bvp.iloc[[0, 1]]
bvp.drop(inplace=True, index=[0, 1])
hdr = {'start_time': head.iloc[0,0],
'fs': head.iloc[0,1]}
return bvp, hdr
def load_e4_files(f_list:list):
tmp = []
data = []
hdr = []
for f in f_list:
tmp.append(load_e4_file(f))
for d, h in tmp:
data.append(d)
hdr.append(h)
data_df = pd.concat(data, axis=0)
return data_df, hdr
# Synchronising data # Synchronising data
def sync_to_ref(df0, df1): def sync_to_ref(df0, df1):
...@@ -356,6 +373,11 @@ def load_and_sync_xsens(subject): ...@@ -356,6 +373,11 @@ def load_and_sync_xsens(subject):
br_list = get_file_list('*Summary*', sbj=subject) br_list = get_file_list('*Summary*', sbj=subject)
# load e4 wristband
e4_list = get_file_list('*.zip', sbj=subject)
bvp_df_all, bvp_hdr = load_e4_files(e4_list)
bvp_fs = bvp_hdr[0]['fs']
xsens_list = [] xsens_list = []
# skip the first and last x minute(s) # skip the first and last x minute(s)
minutes_to_skip = 2 minutes_to_skip = 2
...@@ -380,6 +402,7 @@ def load_and_sync_xsens(subject): ...@@ -380,6 +402,7 @@ def load_and_sync_xsens(subject):
# sync # sync
br_df, imu_df = sync_to_ref(br_df, imu_df_all.copy()) br_df, imu_df = sync_to_ref(br_df, imu_df_all.copy())
pss_df, _ = sync_to_ref(pss_df, imu_df_all.copy()) pss_df, _ = sync_to_ref(pss_df, imu_df_all.copy())
bvp_df, _ = sync_to_ref(bvp_df_all.copy(), pss_df.copy())
# extract relevant data # extract relevant data
acc_data = np.stack(imu_df['accelerometer'].values) acc_data = np.stack(imu_df['accelerometer'].values)
...@@ -398,11 +421,15 @@ def load_and_sync_xsens(subject): ...@@ -398,11 +421,15 @@ def load_and_sync_xsens(subject):
br_data = np.interp(x_time, br_df['sec'].values, br_data)\ br_data = np.interp(x_time, br_df['sec'].values, br_data)\
.reshape(-1, 1) .reshape(-1, 1)
bvp_data = bvp_df['bvp'].values
bvp_data = np.interp(x_time, bvp_df['sec'].values, bvp_data)\
.reshape(-1, 1)
xsens_data = np.concatenate( xsens_data = np.concatenate(
(x_time, br_data, pss_data, acc_data, gyr_data), (x_time, br_data, pss_data, bvp_data, acc_data, gyr_data),
axis=1) axis=1)
columns=['sec' , 'BR' , 'PSS' , columns=['sec' , 'BR' , 'PSS' , 'BVP' ,
'acc_x' , 'acc_y' , 'acc_z' , 'acc_x' , 'acc_y' , 'acc_z' ,
'gyr_x' , 'gyr_y' , 'gyr_z' , ] 'gyr_x' , 'gyr_y' , 'gyr_z' , ]
xsens_df_tmp = pd.DataFrame(xsens_data, columns=columns) xsens_df_tmp = pd.DataFrame(xsens_data, columns=columns)
...@@ -455,6 +482,16 @@ def load_tsfresh(subject, project_dir, ...@@ -455,6 +482,16 @@ def load_tsfresh(subject, project_dir,
df_out.to_pickle(pkl_file) df_out.to_pickle(pkl_file)
return df_out return df_out
def get_activity_log(subject):
activity_list = get_file_list('activity*.csv', sbj=subject)
activity_dfs = [pd.read_csv(f) for f in activity_list]
return pd.concat(activity_dfs, axis=0)
def get_respiration_log(subject):
log_list = get_file_list('*.json', sbj=subject)
log_dfs = [pd.read_json(f) for f in log_list]
return pd.concat(log_dfs, axis=0)
# save evaluation metrics in single file that handles the models for the # save evaluation metrics in single file that handles the models for the
# subject and config # subject and config
class EvalHandler(): class EvalHandler():
...@@ -711,10 +748,11 @@ if __name__ == '__main__': ...@@ -711,10 +748,11 @@ if __name__ == '__main__':
# '[!M]*' # '[!M]*'
np.random.seed(100) np.random.seed(100)
n_subject_max = 2 n_subject_max = 2
xsens_df = load_and_sync_xsens('Pilot02')
activity_df = get_activity_log('Pilot02')
event_df = get_respiration_log('Pilot02')
e4_glob = join(DATA_DIR, 'Pilot02', 'e4', '*')
e4_file = glob.glob(e4_glob)[0]
bvp_df = load_e4_file(e4_file)
ipdb.set_trace() ipdb.set_trace()
args = arg_parser() args = arg_parser()
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment