2019-11-06 19:34:47 +01:00
|
|
|
import pandas as pd
|
|
|
|
import numpy as np
|
|
|
|
import scipy.stats as stats
|
2019-11-28 00:06:31 +01:00
|
|
|
from features_utils import splitOvernightEpisodes, splitMultiSegmentEpisodes
|
2019-11-06 19:34:47 +01:00
|
|
|
|
2019-11-18 20:22:08 +01:00
|
|
|
day_segment = snakemake.params["segment"]
|
2020-04-08 20:36:36 +02:00
|
|
|
features = snakemake.params["features"]
|
2019-11-18 20:22:08 +01:00
|
|
|
|
2019-11-06 19:34:47 +01:00
|
|
|
#Read csv into a pandas dataframe
|
2019-11-28 00:06:31 +01:00
|
|
|
data = pd.read_csv(snakemake.input['gar_events'],parse_dates=['local_date_time'])
|
|
|
|
ar_deltas = pd.read_csv(snakemake.input['gar_deltas'],parse_dates=["local_start_date_time", "local_end_date_time", "local_start_date", "local_end_date"])
|
2020-04-08 20:36:36 +02:00
|
|
|
columns = list("ar_" + str(day_segment) + "_" + column for column in features)
|
2019-11-28 00:06:31 +01:00
|
|
|
|
2019-11-12 20:48:19 +01:00
|
|
|
if data.empty:
|
2019-11-18 20:22:08 +01:00
|
|
|
finalDataset = pd.DataFrame(columns = columns)
|
2019-11-12 20:48:19 +01:00
|
|
|
else:
|
2020-01-17 17:39:30 +01:00
|
|
|
finalDataset = pd.DataFrame()
|
2019-11-28 00:06:31 +01:00
|
|
|
ar_deltas = splitOvernightEpisodes(ar_deltas, [],['activity'])
|
|
|
|
|
|
|
|
if day_segment != "daily":
|
|
|
|
ar_deltas = splitMultiSegmentEpisodes(ar_deltas, day_segment, [])
|
|
|
|
|
2019-11-12 20:48:19 +01:00
|
|
|
data.local_date_time = pd.to_datetime(data.local_date_time)
|
|
|
|
resampledData = data.set_index(data.local_date_time)
|
|
|
|
resampledData.drop(columns=['local_date_time'],inplace=True)
|
|
|
|
|
2019-11-18 20:22:08 +01:00
|
|
|
if(day_segment!='daily'):
|
|
|
|
resampledData = resampledData.loc[resampledData['local_day_segment'] == str(day_segment)]
|
2019-11-12 20:48:19 +01:00
|
|
|
|
2019-12-04 18:04:20 +01:00
|
|
|
if resampledData.empty:
|
|
|
|
finalDataset = pd.DataFrame(columns = columns)
|
|
|
|
else:
|
2020-01-17 17:39:30 +01:00
|
|
|
#Finding the count of samples of the day
|
2020-04-08 20:36:36 +02:00
|
|
|
if("count" in features):
|
2020-01-17 17:39:30 +01:00
|
|
|
finalDataset["ar_" + str(day_segment) + "_count"] = resampledData['activity_type'].resample('D').count()
|
2019-12-04 18:04:20 +01:00
|
|
|
|
|
|
|
#Finding most common activity of the day
|
2020-04-08 20:36:36 +02:00
|
|
|
if("mostcommonactivity" in features):
|
2020-03-09 18:54:28 +01:00
|
|
|
finalDataset["ar_" + str(day_segment) + "_mostcommonactivity"] = resampledData['activity_type'].resample('D').apply(lambda x: stats.mode(x)[0] if len(stats.mode(x)[0]) != 0 else None)
|
2019-12-04 18:04:20 +01:00
|
|
|
|
|
|
|
#finding different number of activities during a day
|
2020-04-08 20:36:36 +02:00
|
|
|
if("countuniqueactivities" in features):
|
2020-01-17 17:39:30 +01:00
|
|
|
finalDataset["ar_" + str(day_segment) + "_countuniqueactivities"] = resampledData['activity_type'].resample('D').nunique()
|
2019-12-04 18:04:20 +01:00
|
|
|
|
|
|
|
#finding Number of times activity changed
|
2020-04-08 20:36:36 +02:00
|
|
|
if("activitychangecount" in features):
|
2020-01-17 17:39:30 +01:00
|
|
|
resampledData['activity_type_shift'] = resampledData['activity_type'].shift().fillna(resampledData['activity_type'].head(1))
|
|
|
|
resampledData['different_activity'] = np.where(resampledData['activity_type']!=resampledData['activity_type_shift'],1,0)
|
|
|
|
finalDataset["ar_" + str(day_segment) + "_activitychangecount"] = resampledData['different_activity'].resample('D').sum()
|
|
|
|
|
2019-12-04 18:04:20 +01:00
|
|
|
|
2020-04-08 20:36:36 +02:00
|
|
|
deltas_features = {'sumstationary':['still','tilting'],
|
2019-12-04 18:04:20 +01:00
|
|
|
'summobile':['on_foot','running','on_bicycle'],
|
|
|
|
'sumvehicle':['in_vehicle']}
|
2020-01-17 17:39:30 +01:00
|
|
|
|
2020-04-08 20:36:36 +02:00
|
|
|
for column, activity_labels in deltas_features.items():
|
|
|
|
if column in features:
|
2020-01-17 17:39:30 +01:00
|
|
|
finalDataset["ar_" + str(day_segment) + "_"+str(column)] = (ar_deltas[ar_deltas['activity'].isin(pd.Series(activity_labels))]
|
|
|
|
.groupby(['local_start_date'])['time_diff']
|
|
|
|
.agg({"ar_" + str(day_segment) + "_" + str(column) :'sum'}))
|
2019-11-28 00:06:31 +01:00
|
|
|
|
2019-11-18 20:22:08 +01:00
|
|
|
finalDataset.index.names = ['local_date']
|
|
|
|
finalDataset.to_csv(snakemake.output[0])
|