2019-11-27 20:25:17 +01:00
|
|
|
import pandas as pd
|
|
|
|
import numpy as np
|
|
|
|
import datetime
|
2019-12-04 18:04:20 +01:00
|
|
|
import itertools
|
2019-11-27 20:25:17 +01:00
|
|
|
from datetime import datetime, timedelta, time
|
|
|
|
from features_utils import splitOvernightEpisodes, splitMultiSegmentEpisodes
|
|
|
|
|
2020-03-04 18:21:36 +01:00
|
|
|
def getEpisodeDurationFeatures(screen_deltas, episode, metrics, phone_sensed_bins, bin_size, reference_hour_first_use):
|
2019-11-27 20:25:17 +01:00
|
|
|
screen_deltas_episode = screen_deltas[screen_deltas["episode"] == episode]
|
|
|
|
duration_helper = pd.DataFrame()
|
2020-03-03 23:31:15 +01:00
|
|
|
if "countepisode" in metrics:
|
|
|
|
duration_helper = pd.concat([duration_helper, screen_deltas_episode.groupby(["local_start_date"]).count()[["time_diff"]].rename(columns = {"time_diff": "screen_" + day_segment + "_countepisode" + episode})], axis = 1)
|
|
|
|
if "episodepersensedminutes" in metrics:
|
|
|
|
for date, row in screen_deltas_episode.groupby(["local_start_date"]).count()[["time_diff"]].iterrows():
|
|
|
|
sensed_minutes = phone_sensed_bins.loc[date, :].sum() * bin_size
|
|
|
|
episode_per_sensedminutes = row["time_diff"] / (1 if sensed_minutes == 0 else sensed_minutes)
|
|
|
|
duration_helper.loc[date, "screen_" + day_segment + "_episodepersensedminutes" + episode] = episode_per_sensedminutes
|
2019-11-27 20:25:17 +01:00
|
|
|
if "sumduration" in metrics:
|
|
|
|
duration_helper = pd.concat([duration_helper, screen_deltas_episode.groupby(["local_start_date"]).sum()[["time_diff"]].rename(columns = {"time_diff": "screen_" + day_segment + "_sumduration" + episode})], axis = 1)
|
|
|
|
if "maxduration" in metrics:
|
|
|
|
duration_helper = pd.concat([duration_helper, screen_deltas_episode.groupby(["local_start_date"]).max()[["time_diff"]].rename(columns = {"time_diff": "screen_" + day_segment + "_maxduration" + episode})], axis = 1)
|
|
|
|
if "minduration" in metrics:
|
|
|
|
duration_helper = pd.concat([duration_helper, screen_deltas_episode.groupby(["local_start_date"]).min()[["time_diff"]].rename(columns = {"time_diff": "screen_" + day_segment + "_minduration" + episode})], axis = 1)
|
|
|
|
if "avgduration" in metrics:
|
|
|
|
duration_helper = pd.concat([duration_helper, screen_deltas_episode.groupby(["local_start_date"]).mean()[["time_diff"]].rename(columns = {"time_diff":"screen_" + day_segment + "_avgduration" + episode})], axis = 1)
|
|
|
|
if "stdduration" in metrics:
|
|
|
|
duration_helper = pd.concat([duration_helper, screen_deltas_episode.groupby(["local_start_date"]).std()[["time_diff"]].rename(columns = {"time_diff":"screen_" + day_segment + "_stdduration" + episode})], axis = 1)
|
2020-03-04 18:21:36 +01:00
|
|
|
if "firstuseafter" + "{0:0=2d}".format(reference_hour_first_use) in metrics:
|
|
|
|
duration_helper = pd.concat([duration_helper, pd.DataFrame(screen_deltas_episode.groupby(["local_start_date"]).first()[["local_start_date_time"]].local_start_date_time.apply(lambda x: (x.to_pydatetime().hour - reference_hour_first_use) * 3600 + x.to_pydatetime().minute * 60 + x.to_pydatetime().second)).rename(columns = {"local_start_date_time":"screen_" + day_segment + "_firstuseafter" + "{0:0=2d}".format(reference_hour_first_use) + episode})], axis = 1)
|
2019-11-27 20:25:17 +01:00
|
|
|
return duration_helper
|
|
|
|
|
|
|
|
|
|
|
|
screen_deltas = pd.read_csv(snakemake.input["screen_deltas"], parse_dates=["local_start_date_time", "local_end_date_time", "local_start_date", "local_end_date"])
|
2019-12-04 22:04:37 +01:00
|
|
|
phone_sensed_bins = pd.read_csv(snakemake.input["phone_sensed_bins"], parse_dates=["local_date"], index_col="local_date")
|
|
|
|
phone_sensed_bins[phone_sensed_bins > 0] = 1
|
|
|
|
|
2019-11-27 20:25:17 +01:00
|
|
|
day_segment = snakemake.params["day_segment"]
|
2020-03-04 18:21:36 +01:00
|
|
|
reference_hour_first_use = snakemake.params["reference_hour_first_use"]
|
2019-12-02 17:22:50 +01:00
|
|
|
metrics_deltas = snakemake.params["metrics_deltas"]
|
2020-03-03 23:31:15 +01:00
|
|
|
episode_types = snakemake.params["episode_types"]
|
2019-12-05 17:07:40 +01:00
|
|
|
bin_size = snakemake.params["bin_size"]
|
2019-11-27 20:25:17 +01:00
|
|
|
|
2020-03-04 18:21:36 +01:00
|
|
|
metrics_deltas = ["firstuseafter" + "{0:0=2d}".format(reference_hour_first_use) if feature_name == "firstuseafter" else feature_name for feature_name in metrics_deltas]
|
|
|
|
|
2020-03-03 23:31:15 +01:00
|
|
|
metrics_deltas_name = ["".join(metric) for metric in itertools.product(metrics_deltas, episode_types)]
|
2019-11-27 20:25:17 +01:00
|
|
|
|
2020-03-03 23:31:15 +01:00
|
|
|
screen_features = pd.DataFrame(columns=["local_date"]+["screen_" + day_segment + "_" + x for x in metrics_deltas_name])
|
|
|
|
if not screen_deltas.empty:
|
2019-11-27 20:25:17 +01:00
|
|
|
# preprocess day_segment and episodes
|
|
|
|
screen_deltas = splitOvernightEpisodes(screen_deltas, [], ["episode"])
|
2020-03-03 23:31:15 +01:00
|
|
|
if (not screen_deltas.empty) and (day_segment != "daily"):
|
2019-11-27 20:25:17 +01:00
|
|
|
screen_deltas = splitMultiSegmentEpisodes(screen_deltas, day_segment, [])
|
|
|
|
screen_deltas.set_index(["local_start_date"],inplace=True)
|
|
|
|
|
2020-03-03 23:31:15 +01:00
|
|
|
if not screen_deltas.empty:
|
|
|
|
screen_features = pd.DataFrame()
|
|
|
|
for episode in episode_types:
|
2020-03-04 18:21:36 +01:00
|
|
|
screen_features = pd.concat([screen_features, getEpisodeDurationFeatures(screen_deltas, episode, metrics_deltas, phone_sensed_bins, bin_size, reference_hour_first_use)], axis=1)
|
2019-11-27 20:25:17 +01:00
|
|
|
|
2019-12-02 17:22:50 +01:00
|
|
|
screen_features = screen_features.rename_axis("local_date").reset_index()
|
2019-11-27 20:25:17 +01:00
|
|
|
|
|
|
|
screen_features.to_csv(snakemake.output[0], index=False)
|