[WIP] Methods to get the labels and data plus aggregate them.
parent
622477f19f
commit
b06ec6e1ae
|
@ -156,9 +156,20 @@ lin_reg_proximity.score(
|
|||
from machine_learning import pipeline
|
||||
|
||||
# %%
|
||||
ml_pipeline = pipeline.MachineLearningPipeline()
|
||||
ml_pipeline = pipeline.MachineLearningPipeline(
|
||||
labels_questionnaire="PANAS", data_types="proximity"
|
||||
)
|
||||
|
||||
# %%
|
||||
ml_pipeline.participants_usernames
|
||||
ml_pipeline.get_labels()
|
||||
|
||||
# %% tags=[]
|
||||
ml_pipeline.get_sensor_data()
|
||||
|
||||
# %%
|
||||
ml_pipeline.aggregate_daily()
|
||||
|
||||
# %%
|
||||
ml_pipeline.df_full_data_daily_means
|
||||
|
||||
# %%
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
QUESTIONNAIRE_IDS = {"PANAS": {"PA": 8.0, "NA": 9.0}}
|
||||
|
||||
QUESTIONNAIRE_IDS_RENAME = {}
|
||||
|
||||
for questionnaire in QUESTIONNAIRE_IDS.items():
|
||||
for k, v in questionnaire[1].items():
|
||||
QUESTIONNAIRE_IDS_RENAME[v] = k
|
|
@ -1,12 +1,79 @@
|
|||
import datetime
|
||||
|
||||
import pandas as pd
|
||||
|
||||
import participants.query_db
|
||||
from features import esm, helper, proximity
|
||||
from machine_learning import QUESTIONNAIRE_IDS, QUESTIONNAIRE_IDS_RENAME
|
||||
|
||||
|
||||
class MachineLearningPipeline:
|
||||
def __init__(self, participants_usernames=None):
|
||||
def __init__(self, labels_questionnaire, data_types, participants_usernames=None):
|
||||
if participants_usernames is None:
|
||||
participants_usernames = participants.query_db.get_usernames(
|
||||
collection_start=datetime.date.fromisoformat("2020-08-01")
|
||||
)
|
||||
self.participants_usernames = participants_usernames
|
||||
self.labels_questionnaire = labels_questionnaire
|
||||
self.data_types = data_types
|
||||
|
||||
self.df_esm = pd.DataFrame()
|
||||
self.df_esm_preprocessed = pd.DataFrame()
|
||||
self.df_esm_interest = pd.DataFrame()
|
||||
self.df_esm_clean = pd.DataFrame()
|
||||
|
||||
self.df_proximity = pd.DataFrame()
|
||||
|
||||
self.df_full_data_daily_means = pd.DataFrame()
|
||||
self.df_esm_daily_means = pd.DataFrame()
|
||||
self.df_proximity_daily_counts = pd.DataFrame()
|
||||
|
||||
def get_labels(self):
|
||||
self.df_esm = esm.get_esm_data(self.participants_usernames)
|
||||
self.df_esm_preprocessed = esm.preprocess_esm(self.df_esm)
|
||||
if self.labels_questionnaire == "PANAS":
|
||||
self.df_esm_interest = self.df_esm_preprocessed[
|
||||
(
|
||||
self.df_esm_preprocessed["questionnaire_id"]
|
||||
== QUESTIONNAIRE_IDS.get("PANAS").get("PA")
|
||||
)
|
||||
| (
|
||||
self.df_esm_preprocessed["questionnaire_id"]
|
||||
== QUESTIONNAIRE_IDS.get("PANAS").get("NA")
|
||||
)
|
||||
]
|
||||
self.df_esm_clean = esm.clean_up_esm(self.df_esm_interest)
|
||||
|
||||
def get_sensor_data(self):
|
||||
if "proximity" in self.data_types:
|
||||
self.df_proximity = proximity.get_proximity_data(
|
||||
self.participants_usernames
|
||||
)
|
||||
self.df_proximity = helper.get_date_from_timestamp(self.df_proximity)
|
||||
self.df_proximity = proximity.recode_proximity(self.df_proximity)
|
||||
|
||||
def aggregate_daily(self):
|
||||
self.df_esm_daily_means = (
|
||||
self.df_esm_clean.groupby(["participant_id", "date_lj", "questionnaire_id"])
|
||||
.esm_user_answer_numeric.agg("mean")
|
||||
.reset_index()
|
||||
.rename(columns={"esm_user_answer_numeric": "esm_numeric_mean"})
|
||||
)
|
||||
self.df_esm_daily_means = (
|
||||
self.df_esm_daily_means.pivot(
|
||||
index=["participant_id", "date_lj"],
|
||||
columns="questionnaire_id",
|
||||
values="esm_numeric_mean",
|
||||
)
|
||||
.reset_index(col_level=1)
|
||||
.rename(columns=QUESTIONNAIRE_IDS_RENAME)
|
||||
.set_index(["participant_id", "date_lj"])
|
||||
)
|
||||
self.df_full_data_daily_means = self.df_esm_daily_means.copy()
|
||||
if "proximity" in self.data_types:
|
||||
self.df_proximity_daily_counts = proximity.count_proximity(
|
||||
self.df_proximity, ["participant_id", "date_lj"]
|
||||
)
|
||||
self.df_full_data_daily_means = self.df_full_data_daily_means.join(
|
||||
self.df_proximity_daily_counts
|
||||
)
|
||||
|
|
Loading…
Reference in New Issue