rapids/config.yaml

211 lines
10 KiB
YAML
Raw Normal View History

2019-10-24 18:11:24 +02:00
# Valid database table names
SENSORS: [applications_crashes, applications_foreground, applications_notifications, battery, bluetooth, calls, locations, messages, plugin_ambient_noise, plugin_device_usage, plugin_google_activity_recognition, plugin_ios_activity_recognition, screen]
2019-10-24 18:11:24 +02:00
2020-01-15 23:18:10 +01:00
FITBIT_TABLE: [fitbit_data]
FITBIT_SENSORS: [heartrate, steps, sleep, calories]
FITBIT_DATA_TYPE: [summary, intraday]
2020-01-15 23:18:10 +01:00
2019-10-24 18:11:24 +02:00
# Participants to include in the analysis
# You must create a file for each participant
# named pXXX containing their device_id
2019-10-24 22:27:43 +02:00
PIDS: [p01, p02]
# Global var with common day segments
DAY_SEGMENTS: &day_segments
[daily, morning, afternoon, evening, night]
2019-11-05 21:17:20 +01:00
# Global timezone
2019-11-06 23:12:06 +01:00
# Use codes from https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
# Double check your code, for example EST is not US Eastern Time.
2019-11-05 21:17:20 +01:00
TIMEZONE: &timezone
2019-11-06 23:12:06 +01:00
America/New_York
2019-11-05 21:17:20 +01:00
DATABASE_GROUP: &database_group
2020-03-09 17:55:43 +01:00
MY_GROUP
DOWNLOAD_PARTICIPANTS:
IGNORED_DEVICE_IDS: [] # for example "5a1dd68c-6cd1-48fe-ae1e-14344ac5215f"
GROUP: *database_group
# Download data config
DOWNLOAD_DATASET:
GROUP: *database_group
# Readable datetime config
READABLE_DATETIME:
2019-11-05 21:17:20 +01:00
FIXED_TIMEZONE: *timezone
# Communication SMS features config, TYPES and FEATURES keys need to match
SMS:
TYPES : [received, sent]
FEATURES:
2019-11-12 21:53:59 +01:00
received: [count, distinctcontacts, timefirstsms, timelastsms, countmostfrequentcontact]
sent: [count, distinctcontacts, timefirstsms, timelastsms, countmostfrequentcontact]
DAY_SEGMENTS: *day_segments
# Communication call features config, TYPES and FEATURES keys need to match
CALLS:
TYPES: [missed, incoming, outgoing]
FEATURES:
missed: [count, distinctcontacts, timefirstcall, timelastcall, countmostfrequentcontact]
incoming: [count, distinctcontacts, meanduration, sumduration, minduration, maxduration, stdduration, modeduration, entropyduration, timefirstcall, timelastcall, countmostfrequentcontact]
outgoing: [count, distinctcontacts, meanduration, sumduration, minduration, maxduration, stdduration, modeduration, entropyduration, timefirstcall, timelastcall, countmostfrequentcontact]
2019-10-25 16:21:09 +02:00
DAY_SEGMENTS: *day_segments
2019-11-05 18:34:22 +01:00
APPLICATION_GENRES:
CATALOGUE_SOURCE: FILE # FILE (genres are read from CATALOGUE_FILE) or GOOGLE (genres are scrapped from the Play Store)
CATALOGUE_FILE: "data/external/stachl_application_genre_catalogue.csv"
UPDATE_CATALOGUE_FILE: false # if CATALOGUE_SOURCE is equal to FILE, whether or not to update CATALOGUE_FILE, if CATALOGUE_SOURCE is equal to GOOGLE all scraped genres will be saved to CATALOGUE_FILE
SCRAPE_MISSING_GENRES: false # whether or not to scrape missing genres, only effective if CATALOGUE_SOURCE is equal to FILE. If CATALOGUE_SOURCE is equal to GOOGLE, all genres are scraped anyway
2019-11-05 18:34:22 +01:00
PHONE_VALID_SENSED_DAYS:
BIN_SIZE: 5 # (in minutes)
MIN_VALID_HOURS: 20 # (out of 24)
2019-11-05 21:17:20 +01:00
MIN_BINS_PER_HOUR: 8 # (out of 60min/BIN_SIZE bins)
2019-12-10 00:23:00 +01:00
RESAMPLE_FUSED_LOCATION:
CONSECUTIVE_THRESHOLD: 30 # minutes, only replicate location samples to the next sensed bin if the phone did not stop collecting data for more than this threshold
TIME_SINCE_VALID_LOCATION: 720 # minutes, only replicate location samples to consecutive sensed bins if they were logged within this threshold after a valid location row
2019-12-10 00:23:00 +01:00
TIMEZONE: *timezone
2019-11-05 21:17:20 +01:00
BARNETT_LOCATION:
DAY_SEGMENTS: [daily] # These features are only available on a daily basis
FEATURES: ["hometime","disttravelled","rog","maxdiam","maxhomedist","siglocsvisited","avgflightlen","stdflightlen","avgflightdur","stdflightdur","probpause","siglocentropy","circdnrtn","wkenddayrtn"]
LOCATIONS_TO_USE: ALL # ALL, ALL_EXCEPT_FUSED OR RESAMPLE_FUSED
ACCURACY_LIMIT: 51 # meters, drops location coordinates with an accuracy higher than this. This number means there's a 68% probability the true location is within this radius
2019-11-06 18:19:30 +01:00
TIMEZONE: *timezone
MINUTES_DATA_USED: False # Use this for quality control purposes, how many minutes of data (location coordinates gruped by minute) were used to compute features
2019-11-06 18:19:30 +01:00
BLUETOOTH:
DAY_SEGMENTS: *day_segments
FEATURES: ["countscans", "uniquedevices", "countscansmostuniquedevice"]
2019-11-18 20:22:08 +01:00
ACTIVITY_RECOGNITION:
2019-11-18 20:22:08 +01:00
DAY_SEGMENTS: *day_segments
2020-06-05 01:32:28 +02:00
FEATURES: ["count","mostcommonactivity","countuniqueactivities","activitychangecount","sumstationary","summobile","sumvehicle"]
BATTERY:
DAY_SEGMENTS: *day_segments
FEATURES: ["countdischarge", "sumdurationdischarge", "countcharge", "sumdurationcharge", "avgconsumptionrate", "maxconsumptionrate"]
2019-11-27 20:25:17 +01:00
SCREEN:
DAY_SEGMENTS: *day_segments
REFERENCE_HOUR_FIRST_USE: 0
FEATURES_DELTAS: ["countepisode", "episodepersensedminutes", "sumduration", "maxduration", "minduration", "avgduration", "stdduration", "firstuseafter"]
EPISODE_TYPES: ["unlock"]
2020-01-14 15:51:39 +01:00
LIGHT:
DAY_SEGMENTS: *day_segments
FEATURES: ["count", "maxlux", "minlux", "avglux", "medianlux", "stdlux"]
2020-01-15 20:15:24 +01:00
ACCELEROMETER:
DAY_SEGMENTS: *day_segments
FEATURES: ["maxmagnitude", "minmagnitude", "avgmagnitude", "medianmagnitude", "stdmagnitude", "ratioexertionalactivityepisodes", "sumexertionalactivityepisodes", "longestexertionalactivityepisode", "longestnonexertionalactivityepisode", "countexertionalactivityepisodes", "countnonexertionalactivityepisodes"]
2020-01-29 22:22:53 +01:00
2020-02-07 17:52:55 +01:00
APPLICATIONS_FOREGROUND:
DAY_SEGMENTS: *day_segments
SINGLE_CATEGORIES: ["all", "video"]
MULTIPLE_CATEGORIES:
social: ["socialnetworks", "socialmediatools"]
entertainment: ["entertainment", "gamingknowledge", "gamingcasual", "gamingadventure", "gamingstrategy", "gamingtoolscommunity", "gamingroleplaying", "gamingaction", "gaminglogic", "gamingsports", "gamingsimulation"]
SINGLE_APPS: ["top1global", "com.facebook.moments", "com.google.android.youtube", "com.twitter.android"] # There's no entropy for single apps
EXCLUDED_CATEGORIES: ["system_apps", "video"]
EXCLUDED_APPS: ["com.fitbit.FitbitMobile", "com.aware.plugin.upmc.cancer"]
FEATURES: ["count", "timeoffirstuse", "timeoflastuse", "frequencyentropy"]
2020-02-07 17:52:55 +01:00
2020-02-07 17:35:15 +01:00
HEARTRATE:
DAY_SEGMENTS: *day_segments
FEATURES: ["maxhr", "minhr", "avghr", "medianhr", "modehr", "stdhr", "diffmaxmodehr", "diffminmodehr", "entropyhr", "lengthoutofrange", "lengthfatburn", "lengthcardio", "lengthpeak"]
DAILY_FEATURES_FROM_SUMMARY_DATA: ["restinghr"] # calories related features might be inaccurate: ["caloriesoutofrange", "caloriesfatburn", "caloriescardio", "caloriespeak"]
2020-02-07 17:35:15 +01:00
2020-01-29 22:22:53 +01:00
STEP:
DAY_SEGMENTS: *day_segments
FEATURES:
2020-01-29 22:22:53 +01:00
ALL_STEPS: ["sumallsteps", "maxallsteps", "minallsteps", "avgallsteps", "stdallsteps"]
SEDENTARY_BOUT: ["countsedentarybout", "maxdurationsedentarybout", "mindurationsedentarybout", "avgdurationsedentarybout", "stddurationsedentarybout", "sumdurationsedentarybout"]
2020-01-29 22:22:53 +01:00
ACTIVE_BOUT: ["countactivebout", "maxdurationactivebout", "mindurationactivebout", "avgdurationactivebout", "stddurationactivebout"]
THRESHOLD_ACTIVE_BOUT: 10 # steps
INCLUDE_ZERO_STEP_ROWS: True
SLEEP:
DAY_SEGMENTS: *day_segments
SLEEP_TYPES: ["main", "nap", "all"]
DAILY_FEATURES_FROM_SUMMARY_DATA: ["sumdurationafterwakeup", "sumdurationasleep", "sumdurationawake", "sumdurationtofallasleep", "sumdurationinbed", "avgefficiency", "countepisode"]
2020-04-13 19:24:52 +02:00
WIFI:
DAY_SEGMENTS: *day_segments
FEATURES: ["countscans", "uniquedevices", "countscansmostuniquedevice"]
PARAMS_FOR_ANALYSIS:
GROUNDTRUTH_TABLE: participant_info
SOURCES: &sources ["phone_features", "fitbit_features", "phone_fitbit_features"]
DAY_SEGMENTS: *day_segments
PHONE_FEATURES: [accelerometer, applications_foreground, battery, call_incoming, call_missed, call_outgoing, activity_recognition, light, location_barnett, screen, sms_received, sms_sent]
FITBIT_FEATURES: [fitbit_heartrate, fitbit_step, fitbit_sleep]
PHONE_FITBIT_FEATURES: "" # This array is merged in the input_merge_features_of_single_participant function in models.snakefile
DEMOGRAPHIC_FEATURES: [age, gender, inpatientdays]
2020-04-30 00:53:54 +02:00
CATEGORICAL_DEMOGRAPHIC_FEATURES: ["gender"]
# Whether or not to include only days with enough valid sensed hours
# logic can be found in rule phone_valid_sensed_days of rules/preprocessing.snakefile
DROP_VALID_SENSED_DAYS:
ENABLED: True
# Whether or not to include certain days in the analysis, logic can be found in rule days_to_analyse of rules/mystudy.snakefile
# If you want to include all days downloaded for each participant, set ENABLED to False
DAYS_TO_ANALYSE:
ENABLED: True
DAYS_BEFORE_SURGERY: 15
DAYS_IN_HOSPITAL: F # T or F
DAYS_AFTER_DISCHARGE: 7
# Cleaning Parameters
COLS_NAN_THRESHOLD: 0.5
COLS_VAR_THRESHOLD: True
ROWS_NAN_THRESHOLD: 0.5
PARTICIPANT_DAYS_BEFORE_THRESHOLD: 7
PARTICIPANT_DAYS_AFTER_THRESHOLD: 4
2020-04-30 00:53:54 +02:00
# Extract summarised features from daily features with any of the following substrings
NUMERICAL_OPERATORS: ["count", "sum", "length", "avg", "restinghr"]
2020-04-30 00:53:54 +02:00
CATEGORICAL_OPERATORS: ["mostcommon"]
2020-04-30 00:53:54 +02:00
MODEL_NAMES: ["LogReg", "kNN", "SVM", "DT", "RF", "GB", "XGBoost", "LightGBM"]
CV_METHODS: ["LeaveOneOut"]
SUMMARISED: ["summarised"] # "summarised" or "notsummarised"
2020-04-30 00:53:54 +02:00
RESULT_COMPONENTS: ["fold_predictions", "fold_metrics", "overall_results", "fold_feature_importances"]
MODEL_SCALER:
LogReg: ["notnormalized", "minmaxscaler", "standardscaler", "robustscaler"]
kNN: ["minmaxscaler", "standardscaler", "robustscaler"]
SVM: ["minmaxscaler", "standardscaler", "robustscaler"]
DT: ["notnormalized"]
RF: ["notnormalized"]
GB: ["notnormalized"]
XGBoost: ["notnormalized"]
LightGBM: ["notnormalized"]
2020-04-30 00:53:54 +02:00
MODEL_HYPERPARAMS:
LogReg:
{"clf__C": [0.01, 0.1, 1, 10, 100], "clf__solver": ["newton-cg", "lbfgs", "liblinear", "saga"], "clf__penalty": ["l2"]}
kNN:
{"clf__n_neighbors": [1, 3, 5], "clf__weights": ["uniform", "distance"], "clf__metric": ["euclidean", "manhattan", "minkowski"]}
2020-04-30 00:53:54 +02:00
SVM:
{"clf__C": [0.01, 0.1, 1, 10, 100], "clf__gamma": ["scale", "auto"], "clf__kernel": ["rbf", "poly", "sigmoid"]}
DT:
{"clf__criterion": ["gini", "entropy"], "clf__max_depth": [null, 3, 5, 7, 9], "clf__max_features": [null, "auto", "sqrt", "log2"]}
2020-04-30 00:53:54 +02:00
RF:
{"clf__n_estimators": [2, 5, 10, 100],"clf__max_depth": [null, 3, 5, 7, 9]}
2020-04-30 00:53:54 +02:00
GB:
{"clf__learning_rate": [0.01, 0.1, 1], "clf__n_estimators": [5, 10, 100, 200], "clf__subsample": [0.5, 0.7, 1.0], "clf__max_depth": [3, 5, 7, 9]}
XGBoost:
{"clf__learning_rate": [0.01, 0.1, 1], "clf__n_estimators": [5, 10, 100, 200], "clf__num_leaves": [5, 16, 31, 62]}
LightGBM:
{"clf__learning_rate": [0.01, 0.1, 1], "clf__n_estimators": [5, 10, 100, 200], "clf__num_leaves": [5, 16, 31, 62]}
# Target Settings:
# 1 => TARGETS_RATIO_THRESHOLD (ceiling) or more of available CESD scores were TARGETS_VALUE_THRESHOLD or higher; 0 => otherwise
TARGETS_RATIO_THRESHOLD: 0.5
TARGETS_VALUE_THRESHOLD: 16