Patching IBI with BVP - selecting appropriate pipeline entry point. WIP
parent
2a8f58f5c8
commit
bb62497ba6
|
@ -359,8 +359,6 @@ for provider in config["EMPATICA_INTER_BEAT_INTERVAL"]["PROVIDERS"].keys():
|
||||||
if config["EMPATICA_INTER_BEAT_INTERVAL"]["PROVIDERS"][provider]["COMPUTE"]:
|
if config["EMPATICA_INTER_BEAT_INTERVAL"]["PROVIDERS"][provider]["COMPUTE"]:
|
||||||
files_to_compute.extend(expand("data/raw/{pid}/empatica_inter_beat_interval_raw.csv", pid=config["PIDS"]))
|
files_to_compute.extend(expand("data/raw/{pid}/empatica_inter_beat_interval_raw.csv", pid=config["PIDS"]))
|
||||||
files_to_compute.extend(expand("data/raw/{pid}/empatica_inter_beat_interval_with_datetime.csv", pid=config["PIDS"]))
|
files_to_compute.extend(expand("data/raw/{pid}/empatica_inter_beat_interval_with_datetime.csv", pid=config["PIDS"]))
|
||||||
files_to_compute.extend(expand("data/raw/{pid}/empatica_blood_volume_pulse_raw.csv", pid=config["PIDS"]))
|
|
||||||
files_to_compute.extend(expand("data/raw/{pid}/empatica_blood_volume_pulse_with_datetime.csv", pid=config["PIDS"]))
|
|
||||||
files_to_compute.extend(expand("data/interim/{pid}/empatica_inter_beat_interval_features/empatica_inter_beat_interval_{language}_{provider_key}.csv", pid=config["PIDS"], language=get_script_language(config["EMPATICA_INTER_BEAT_INTERVAL"]["PROVIDERS"][provider]["SRC_SCRIPT"]), provider_key=provider.lower()))
|
files_to_compute.extend(expand("data/interim/{pid}/empatica_inter_beat_interval_features/empatica_inter_beat_interval_{language}_{provider_key}.csv", pid=config["PIDS"], language=get_script_language(config["EMPATICA_INTER_BEAT_INTERVAL"]["PROVIDERS"][provider]["SRC_SCRIPT"]), provider_key=provider.lower()))
|
||||||
files_to_compute.extend(expand("data/processed/features/{pid}/empatica_inter_beat_interval.csv", pid=config["PIDS"]))
|
files_to_compute.extend(expand("data/processed/features/{pid}/empatica_inter_beat_interval.csv", pid=config["PIDS"]))
|
||||||
files_to_compute.extend(expand("data/processed/features/{pid}/all_sensor_features.csv", pid=config["PIDS"]))
|
files_to_compute.extend(expand("data/processed/features/{pid}/all_sensor_features.csv", pid=config["PIDS"]))
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
########################################################################################################################
|
########################################################################################################################
|
||||||
|
|
||||||
# See https://www.rapids.science/latest/setup/configuration/#participant-files
|
# See https://www.rapids.science/latest/setup/configuration/#participant-files
|
||||||
PIDS: [p01] #p02, p03]
|
PIDS: [p01] #p01, p02, p03]
|
||||||
|
|
||||||
# See https://www.rapids.science/latest/setup/configuration/#automatic-creation-of-participant-files
|
# See https://www.rapids.science/latest/setup/configuration/#automatic-creation-of-participant-files
|
||||||
CREATE_PARTICIPANT_FILES:
|
CREATE_PARTICIPANT_FILES:
|
||||||
|
@ -43,12 +43,15 @@ TIMEZONE:
|
||||||
|
|
||||||
# See https://www.rapids.science/latest/setup/configuration/#data-stream-configuration
|
# See https://www.rapids.science/latest/setup/configuration/#data-stream-configuration
|
||||||
PHONE_DATA_STREAMS:
|
PHONE_DATA_STREAMS:
|
||||||
USE: aware_csv
|
USE: aware_postgresql
|
||||||
|
|
||||||
# AVAILABLE:
|
# AVAILABLE:
|
||||||
aware_mysql:
|
aware_mysql:
|
||||||
DATABASE_GROUP: MY_GROUP
|
DATABASE_GROUP: MY_GROUP
|
||||||
|
|
||||||
|
aware_postgresql:
|
||||||
|
DATABASE_GROUP: PSQL_STRAW
|
||||||
|
|
||||||
aware_csv:
|
aware_csv:
|
||||||
FOLDER: data/external/aware_csv
|
FOLDER: data/external/aware_csv
|
||||||
|
|
||||||
|
@ -177,7 +180,7 @@ PHONE_BLUETOOTH:
|
||||||
|
|
||||||
# See https://www.rapids.science/latest/features/phone-calls/
|
# See https://www.rapids.science/latest/features/phone-calls/
|
||||||
PHONE_CALLS:
|
PHONE_CALLS:
|
||||||
CONTAINER: calls.csv
|
CONTAINER: call
|
||||||
PROVIDERS:
|
PROVIDERS:
|
||||||
RAPIDS:
|
RAPIDS:
|
||||||
COMPUTE: False
|
COMPUTE: False
|
||||||
|
|
|
@ -8,6 +8,8 @@ import csv
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from io import BytesIO, StringIO
|
from io import BytesIO, StringIO
|
||||||
|
|
||||||
|
from cr_features.hrv import get_HRV_features
|
||||||
|
|
||||||
def processAcceleration(x, y, z):
|
def processAcceleration(x, y, z):
|
||||||
x = float(x)
|
x = float(x)
|
||||||
y = float(y)
|
y = float(y)
|
||||||
|
@ -94,6 +96,14 @@ def pull_data(data_configuration, device, sensor, container, columns_to_download
|
||||||
print("Extracting {} data from {} for {}".format(sensor, zipfile, device))
|
print("Extracting {} data from {} for {}".format(sensor, zipfile, device))
|
||||||
with ZipFile(zipfile, 'r') as zipFile:
|
with ZipFile(zipfile, 'r') as zipFile:
|
||||||
listOfFileNames = zipFile.namelist()
|
listOfFileNames = zipFile.namelist()
|
||||||
|
if sensor == "EMPATICA_INTER_BEAT_INTERVAL":
|
||||||
|
extracted_bvp_data = extract_empatica_data(zipFile.read('BVP.csv'), "EMPATICA_BLOOD_VOLUME_PULSE")
|
||||||
|
hrv_time_and_freq_features, sample, bvp_rr, bvp_timings, peak_indx = \
|
||||||
|
get_HRV_features(extracted_bvp_data['blood_volume_pulse'].to_numpy(), ma=False, detrend=False, m_deternd=False,
|
||||||
|
low_pass=False, winsorize=True, winsorize_value=25,
|
||||||
|
hampel_fiter=False, median_filter=False, mod_z_score_filter=True,
|
||||||
|
sampling=64, feature_names=['meanHr'])
|
||||||
|
print(bvp_rr, bvp_timings)
|
||||||
for fileName in listOfFileNames:
|
for fileName in listOfFileNames:
|
||||||
if fileName == sensor_csv:
|
if fileName == sensor_csv:
|
||||||
participant_data = pd.concat([participant_data, extract_empatica_data(zipFile.read(fileName), sensor)], axis=0)
|
participant_data = pd.concat([participant_data, extract_empatica_data(zipFile.read(fileName), sensor)], axis=0)
|
||||||
|
|
Loading…
Reference in New Issue