Fix a bug when fitbit data is empty

pull/128/head
JulioV 2021-03-11 14:51:16 -05:00
parent 2ee45995f2
commit 13174b0c2a
8 changed files with 17 additions and 10 deletions

View File

@ -13,11 +13,11 @@ PIDS: [test01]
CREATE_PARTICIPANT_FILES: CREATE_PARTICIPANT_FILES:
CSV_FILE_PATH: "data/external/example_participants.csv" # see docs for required format CSV_FILE_PATH: "data/external/example_participants.csv" # see docs for required format
PHONE_SECTION: PHONE_SECTION:
ADD: TRUE ADD: True
DEVICE_ID_COLUMN: device_id # column name DEVICE_ID_COLUMN: device_id # column name
IGNORED_DEVICE_IDS: [] IGNORED_DEVICE_IDS: []
FITBIT_SECTION: FITBIT_SECTION:
ADD: FALSE ADD: True
DEVICE_ID_COLUMN: fitbit_id # column name DEVICE_ID_COLUMN: fitbit_id # column name
IGNORED_DEVICE_IDS: [] IGNORED_DEVICE_IDS: []
EMPATICA_SECTION: EMPATICA_SECTION:

View File

@ -3,7 +3,7 @@ This [data stream](../../datastreams/data-streams-introduction) handles Empatica
## Container ## Container
You need to create a subfolder for every participant named after their `pid` in the path specified by `[EMPATICA_DATA_STREAMS][empatica_zipfiles][FOLDER]`. You can add one or more Empatica zip files to any subfolder. You need to create a subfolder for every participant named after their `device id` inside the folder specified by `[EMPATICA_DATA_STREAMS][empatica_zipfiles][FOLDER]`. You can add one or more Empatica zip files to any subfolder.
The script to connect and download data from this container is at: The script to connect and download data from this container is at:
```bash ```bash

View File

@ -79,5 +79,6 @@ def parseHeartrateData(heartrate_data):
def main(json_raw, stream_parameters): def main(json_raw, stream_parameters):
parsed_data = parseHeartrateData(json_raw) parsed_data = parseHeartrateData(json_raw)
parsed_data["timestamp"] = 0 # this column is added at readable_datetime.R because we neeed to take into account multiple timezones parsed_data["timestamp"] = 0 # this column is added at readable_datetime.R because we neeed to take into account multiple timezones
if pd.api.types.is_datetime64_any_dtype( parsed_data['local_date_time']):
parsed_data['local_date_time'] = parsed_data['local_date_time'].dt.strftime('%Y-%m-%d %H:%M:%S') parsed_data['local_date_time'] = parsed_data['local_date_time'].dt.strftime('%Y-%m-%d %H:%M:%S')
return(parsed_data) return(parsed_data)

View File

@ -68,5 +68,6 @@ def parseHeartrateData(heartrate_data):
def main(json_raw, stream_parameters): def main(json_raw, stream_parameters):
parsed_data = parseHeartrateData(json_raw) parsed_data = parseHeartrateData(json_raw)
parsed_data["timestamp"] = 0 # this column is added at readable_datetime.R because we neeed to take into account multiple timezones parsed_data["timestamp"] = 0 # this column is added at readable_datetime.R because we neeed to take into account multiple timezones
if pd.api.types.is_datetime64_any_dtype( parsed_data['local_date_time']):
parsed_data['local_date_time'] = parsed_data['local_date_time'].dt.strftime('%Y-%m-%d %H:%M:%S') parsed_data['local_date_time'] = parsed_data['local_date_time'].dt.strftime('%Y-%m-%d %H:%M:%S')
return(parsed_data) return(parsed_data)

View File

@ -137,6 +137,7 @@ def parseSleepData(sleep_data):
def main(json_raw, stream_parameters): def main(json_raw, stream_parameters):
parsed_data = parseSleepData(json_raw) parsed_data = parseSleepData(json_raw)
parsed_data["timestamp"] = 0 # this column is added at readable_datetime.R because we neeed to take into account multiple timezones parsed_data["timestamp"] = 0 # this column is added at readable_datetime.R because we neeed to take into account multiple timezones
if pd.api.types.is_datetime64_any_dtype( parsed_data['local_date_time']):
parsed_data['local_date_time'] = parsed_data['local_date_time'].dt.strftime('%Y-%m-%d %H:%M:%S') parsed_data['local_date_time'] = parsed_data['local_date_time'].dt.strftime('%Y-%m-%d %H:%M:%S')
return(parsed_data) return(parsed_data)

View File

@ -59,7 +59,9 @@ def parseSleepData(sleep_data):
def main(json_raw, stream_parameters): def main(json_raw, stream_parameters):
parsed_data = parseSleepData(json_raw) parsed_data = parseSleepData(json_raw)
parsed_data["timestamp"] = 0 # this column is added at readable_datetime.R because we neeed to take into account multiple timezones parsed_data["timestamp"] = 0 # this column is added at readable_datetime.R because we neeed to take into account multiple timezones
if pd.api.types.is_datetime64_any_dtype( parsed_data['local_start_date_time']):
parsed_data['local_start_date_time'] = parsed_data['local_start_date_time'].dt.strftime('%Y-%m-%d %H:%M:%S') parsed_data['local_start_date_time'] = parsed_data['local_start_date_time'].dt.strftime('%Y-%m-%d %H:%M:%S')
if pd.api.types.is_datetime64_any_dtype( parsed_data['local_end_date_time']):
parsed_data['local_end_date_time'] = parsed_data['local_end_date_time'].dt.strftime('%Y-%m-%d %H:%M:%S') parsed_data['local_end_date_time'] = parsed_data['local_end_date_time'].dt.strftime('%Y-%m-%d %H:%M:%S')
if stream_parameters["SLEEP_SUMMARY_EPISODE_DAY_ANCHOR"] == "start": if stream_parameters["SLEEP_SUMMARY_EPISODE_DAY_ANCHOR"] == "start":

View File

@ -40,5 +40,6 @@ def parseStepsData(steps_data):
def main(json_raw, stream_parameters): def main(json_raw, stream_parameters):
parsed_data = parseStepsData(json_raw) parsed_data = parseStepsData(json_raw)
parsed_data["timestamp"] = 0 # this column is added at readable_datetime.R because we neeed to take into account multiple timezones parsed_data["timestamp"] = 0 # this column is added at readable_datetime.R because we neeed to take into account multiple timezones
if pd.api.types.is_datetime64_any_dtype( parsed_data['local_date_time']):
parsed_data['local_date_time'] = parsed_data['local_date_time'].dt.strftime('%Y-%m-%d %H:%M:%S') parsed_data['local_date_time'] = parsed_data['local_date_time'].dt.strftime('%Y-%m-%d %H:%M:%S')
return(parsed_data) return(parsed_data)

View File

@ -33,5 +33,6 @@ def parseStepsData(steps_data):
def main(json_raw, stream_parameters): def main(json_raw, stream_parameters):
parsed_data = parseStepsData(json_raw) parsed_data = parseStepsData(json_raw)
parsed_data["timestamp"] = 0 # this column is added at readable_datetime.R because we neeed to take into account multiple timezones parsed_data["timestamp"] = 0 # this column is added at readable_datetime.R because we neeed to take into account multiple timezones
if pd.api.types.is_datetime64_any_dtype( parsed_data['local_date_time']):
parsed_data['local_date_time'] = parsed_data['local_date_time'].dt.strftime('%Y-%m-%d %H:%M:%S') parsed_data['local_date_time'] = parsed_data['local_date_time'].dt.strftime('%Y-%m-%d %H:%M:%S')
return(parsed_data) return(parsed_data)