diff --git a/modules/.datapipeline.py.swo b/modules/.datapipeline.py.swo
new file mode 100644
index 0000000000000000000000000000000000000000..e1d1686c91104d8ab1be7c0477fcbb672876d299
Binary files /dev/null and b/modules/.datapipeline.py.swo differ
diff --git a/modules/.datapipeline.py.swp b/modules/.datapipeline.py.swp
new file mode 100644
index 0000000000000000000000000000000000000000..4213d68c350303ec5029eb85195c3fa423f0fba4
Binary files /dev/null and b/modules/.datapipeline.py.swp differ
diff --git a/modules/.digitalsignalprocessing.py.swp b/modules/.digitalsignalprocessing.py.swp
new file mode 100644
index 0000000000000000000000000000000000000000..b0450c45748586ac7d937004bbbaa7146ae127fe
Binary files /dev/null and b/modules/.digitalsignalprocessing.py.swp differ
diff --git a/modules/.utils.py.swp b/modules/.utils.py.swp
new file mode 100644
index 0000000000000000000000000000000000000000..3c3989aba08152442eb3211073976dc6e7063f06
Binary files /dev/null and b/modules/.utils.py.swp differ
diff --git a/modules/__pycache__/datapipeline.cpython-38.pyc b/modules/__pycache__/datapipeline.cpython-38.pyc
index 527236c49a3e1e135556397291a90d4d3a38c2f8..450a764cdbfb582417b9f1631e429109e238e31f 100644
Binary files a/modules/__pycache__/datapipeline.cpython-38.pyc and b/modules/__pycache__/datapipeline.cpython-38.pyc differ
diff --git a/modules/datapipeline.py b/modules/datapipeline.py
index 819ebf0bca8b3ebccec2e4916b4325f92f9a0fa7..7c17bc4175fa140b4653530f55da9e67ab697668 100644
--- a/modules/datapipeline.py
+++ b/modules/datapipeline.py
@@ -156,7 +156,7 @@ def get_file_list(starts_with:str, sbj=None):
     if sbj is not None:
         f_glob = path_join(DATA_DIR, sbj, '**')
     else:
-        f_glob = path_join(DATA_DIR, 'S*', '**')
+        f_glob = path_join(DATA_DIR, 'Pilot*', '**')
 
     if starts_with is not None:
         f_glob = path_join(f_glob, f'{starts_with}*')
diff --git a/regress_rr.py b/regress_rr.py
index 282fe46e2e041e72941186544219efc0038c60ee..111a9f99c06c74385051ed0635053f110637face 100644
--- a/regress_rr.py
+++ b/regress_rr.py
@@ -178,15 +178,32 @@ def load_e4_file(e4_file:str):
 
     t0_datetime = datetime.utcfromtimestamp(t0)
     t0_local = datetime_from_utc_to_local(t0_datetime)
-    ipdb.set_trace()
     time = [t0_local.timestamp() + ind*(1/fs) for ind in
             range(nsamples)]
     tmp = [np.nan, np.nan]
     time = tmp + time
+    bvp.rename(columns={0: "bvp"}, inplace=True)
     bvp['sec'] = np.array(time)
 
-    return bvp
+    head = bvp.iloc[[0, 1]]
+    bvp.drop(inplace=True, index=[0, 1])
+
+    hdr = {'start_time': head.iloc[0,0],
+           'fs': head.iloc[0,1]}
 
+    return bvp, hdr
+
+def load_e4_files(f_list:list):
+    tmp = []
+    data = []
+    hdr = []
+    for f in f_list:
+        tmp.append(load_e4_file(f))
+    for d, h in tmp:
+        data.append(d)
+        hdr.append(h)
+    data_df = pd.concat(data, axis=0)
+    return data_df, hdr
 
 # Synchronising data
 def sync_to_ref(df0, df1):
@@ -356,6 +373,11 @@ def load_and_sync_xsens(subject):
 
     br_list = get_file_list('*Summary*', sbj=subject)
 
+    # load e4 wristband
+    e4_list = get_file_list('*.zip', sbj=subject)
+    bvp_df_all, bvp_hdr = load_e4_files(e4_list)
+    bvp_fs = bvp_hdr[0]['fs']
+
     xsens_list = []
     # skip the first and last x minute(s)
     minutes_to_skip = 2
@@ -380,6 +402,7 @@ def load_and_sync_xsens(subject):
         # sync
         br_df, imu_df = sync_to_ref(br_df, imu_df_all.copy())
         pss_df, _ = sync_to_ref(pss_df, imu_df_all.copy())
+        bvp_df, _ = sync_to_ref(bvp_df_all.copy(), pss_df.copy())
 
         # extract relevant data
         acc_data = np.stack(imu_df['accelerometer'].values)
@@ -398,11 +421,15 @@ def load_and_sync_xsens(subject):
         br_data = np.interp(x_time, br_df['sec'].values, br_data)\
                 .reshape(-1, 1)
 
+        bvp_data = bvp_df['bvp'].values
+        bvp_data = np.interp(x_time, bvp_df['sec'].values, bvp_data)\
+                .reshape(-1, 1)
+
         xsens_data = np.concatenate(
-            (x_time, br_data, pss_data, acc_data, gyr_data),
+            (x_time, br_data, pss_data, bvp_data, acc_data, gyr_data),
             axis=1)
 
-        columns=['sec'   , 'BR'    , 'PSS'   ,
+        columns=['sec'   , 'BR'    , 'PSS'   , 'BVP' ,
                  'acc_x' , 'acc_y' , 'acc_z' ,
                  'gyr_x' , 'gyr_y' , 'gyr_z' , ]
         xsens_df_tmp = pd.DataFrame(xsens_data, columns=columns)
@@ -455,6 +482,16 @@ def load_tsfresh(subject, project_dir,
     df_out.to_pickle(pkl_file)
     return df_out
 
+def get_activity_log(subject):
+    activity_list = get_file_list('activity*.csv', sbj=subject)
+    activity_dfs = [pd.read_csv(f) for f in activity_list]
+    return pd.concat(activity_dfs, axis=0)
+
+def get_respiration_log(subject):
+    log_list = get_file_list('*.json', sbj=subject)
+    log_dfs = [pd.read_json(f) for f in log_list]
+    return pd.concat(log_dfs, axis=0)
+
 # save evaluation metrics in single file that handles the models for the
 # subject and config
 class EvalHandler():
@@ -711,10 +748,11 @@ if __name__ == '__main__':
     # '[!M]*'
     np.random.seed(100)
     n_subject_max = 2
+    xsens_df = load_and_sync_xsens('Pilot02')
+
+    activity_df = get_activity_log('Pilot02')
+    event_df = get_respiration_log('Pilot02')
 
-    e4_glob = join(DATA_DIR, 'Pilot02', 'e4', '*')
-    e4_file = glob.glob(e4_glob)[0]
-    bvp_df = load_e4_file(e4_file)
     ipdb.set_trace()
     
     args = arg_parser()