Cleaning existing CF mains' and preparing src script for ACC.
parent
ca59a54d8f
commit
8b76c96e47
37
config.yaml
37
config.yaml
|
@ -477,9 +477,20 @@ EMPATICA_ACCELEROMETER:
|
||||||
CONTAINER: ACC
|
CONTAINER: ACC
|
||||||
PROVIDERS:
|
PROVIDERS:
|
||||||
DBDP:
|
DBDP:
|
||||||
COMPUTE: False
|
COMPUTE: True
|
||||||
FEATURES: ["maxmagnitude", "minmagnitude", "avgmagnitude", "medianmagnitude", "stdmagnitude"]
|
FEATURES: ["maxmagnitude", "minmagnitude", "avgmagnitude", "medianmagnitude", "stdmagnitude"]
|
||||||
SRC_SCRIPT: src/features/empatica_accelerometer/dbdp/main.py
|
SRC_SCRIPT: src/features/empatica_accelerometer/dbdp/main.py
|
||||||
|
CF:
|
||||||
|
COMPUTE: True
|
||||||
|
FEATURES: ["meanLow", "areaLow", "totalAbsoluteAreaBand", "totalMagnitudeBand", "entropyBand", "skewnessBand", "kurtosisBand",
|
||||||
|
"postureDistanceLow", "absoluteMeanBand", "absoluteAreaBand", "quartilesBand", "interQuartileRangeBand", "varianceBand",
|
||||||
|
"coefficientOfVariationBand", "amplitudeBand", "totalEnergyBand", "dominantFrequencyEnergyBand", "meanCrossingRateBand",
|
||||||
|
"correlationBand", "quartilesMagnitudesBand", "interQuartileRangeMagnitudesBand", "areaUnderAccelerationMagnitude",
|
||||||
|
"peaksDataLow", "sumPerComponentBand", "velocityBand", "meanKineticEnergyBand", "totalKineticEnergyBand", "squareSumOfComponent",
|
||||||
|
"sumOfSquareComponents", "averageVectorLength", "averageVectorLengthPower", "rollAvgLow", "pitchAvgLow", "rollStdDevLow",
|
||||||
|
"pitchStdDevLow", "rollMotionAmountLow", "rollMotionRegularityLow", "manipulationLow", "rollPeaks", "pitchPeaks", "rollPitchCorrelation"]
|
||||||
|
SRC_SCRIPT: src/features/empatica_accelerometer/cf/main.py
|
||||||
|
|
||||||
|
|
||||||
# See https://www.rapids.science/latest/features/empatica-heartrate/
|
# See https://www.rapids.science/latest/features/empatica-heartrate/
|
||||||
EMPATICA_HEARTRATE:
|
EMPATICA_HEARTRATE:
|
||||||
|
@ -495,16 +506,16 @@ EMPATICA_TEMPERATURE:
|
||||||
CONTAINER: TEMP
|
CONTAINER: TEMP
|
||||||
PROVIDERS:
|
PROVIDERS:
|
||||||
DBDP:
|
DBDP:
|
||||||
COMPUTE: True
|
COMPUTE: False
|
||||||
FEATURES: ["maxtemp", "mintemp", "avgtemp", "mediantemp", "modetemp", "stdtemp", "diffmaxmodetemp", "diffminmodetemp", "entropytemp"]
|
FEATURES: ["maxtemp", "mintemp", "avgtemp", "mediantemp", "modetemp", "stdtemp", "diffmaxmodetemp", "diffminmodetemp", "entropytemp"]
|
||||||
SRC_SCRIPT: src/features/empatica_temperature/dbdp/main.py
|
SRC_SCRIPT: src/features/empatica_temperature/dbdp/main.py
|
||||||
CF:
|
CF:
|
||||||
COMPUTE: True
|
COMPUTE: False
|
||||||
FEATURES: ["autocorrelations", "countAboveMean", "countBelowMean", "maximum", "minimum", "meanAbsChange", "longestStrikeAboveMean",
|
FEATURES: ["autocorrelations", "countAboveMean", "countBelowMean", "maximum", "minimum", "meanAbsChange", "longestStrikeAboveMean",
|
||||||
"longestStrikeBelowMean", "stdDev", "median", "meanChange", "numberOfZeroCrossings", "absEnergy", "linearTrendSlope",
|
"longestStrikeBelowMean", "stdDev", "median", "meanChange", "numberOfZeroCrossings", "absEnergy", "linearTrendSlope",
|
||||||
"ratioBeyondRSigma", "binnedEntropy", "numOfPeaksAutocorr", "numberOfZeroCrossingsAutocorr", "areaAutocorr",
|
"ratioBeyondRSigma", "binnedEntropy", "numOfPeaksAutocorr", "numberOfZeroCrossingsAutocorr", "areaAutocorr",
|
||||||
"calcMeanCrossingRateAutocorr", "countAboveMeanAutocorr", "sumPer", "sumSquared", "squareSumOfComponent",
|
"calcMeanCrossingRateAutocorr", "countAboveMeanAutocorr", "sumPer", "sumSquared", "squareSumOfComponent",
|
||||||
"sumOfSquareComponents"]
|
"sumOfSquareComponents"]
|
||||||
SRC_SCRIPT: src/features/empatica_temperature/cf/main.py
|
SRC_SCRIPT: src/features/empatica_temperature/cf/main.py
|
||||||
|
|
||||||
# See https://www.rapids.science/latest/features/empatica-electrodermal-activity/
|
# See https://www.rapids.science/latest/features/empatica-electrodermal-activity/
|
||||||
|
@ -512,16 +523,16 @@ EMPATICA_ELECTRODERMAL_ACTIVITY:
|
||||||
CONTAINER: EDA
|
CONTAINER: EDA
|
||||||
PROVIDERS:
|
PROVIDERS:
|
||||||
DBDP:
|
DBDP:
|
||||||
COMPUTE: True
|
COMPUTE: False
|
||||||
FEATURES: ["maxeda", "mineda", "avgeda", "medianeda", "modeeda", "stdeda", "diffmaxmodeeda", "diffminmodeeda", "entropyeda"]
|
FEATURES: ["maxeda", "mineda", "avgeda", "medianeda", "modeeda", "stdeda", "diffmaxmodeeda", "diffminmodeeda", "entropyeda"]
|
||||||
SRC_SCRIPT: src/features/empatica_electrodermal_activity/dbdp/main.py
|
SRC_SCRIPT: src/features/empatica_electrodermal_activity/dbdp/main.py
|
||||||
CF:
|
CF:
|
||||||
COMPUTE: True
|
COMPUTE: False
|
||||||
FEATURES: ['mean', 'std', 'q25', 'q75', 'qd', 'deriv', 'power', 'numPeaks', 'ratePeaks', 'powerPeaks', 'sumPosDeriv', 'propPosDeriv', 'derivTonic',
|
FEATURES: ['mean', 'std', 'q25', 'q75', 'qd', 'deriv', 'power', 'numPeaks', 'ratePeaks', 'powerPeaks', 'sumPosDeriv', 'propPosDeriv', 'derivTonic',
|
||||||
'sigTonicDifference', 'freqFeats','maxPeakAmplitudeChangeBefore', 'maxPeakAmplitudeChangeAfter', 'avgPeakAmplitudeChangeBefore',
|
'sigTonicDifference', 'freqFeats','maxPeakAmplitudeChangeBefore', 'maxPeakAmplitudeChangeAfter', 'avgPeakAmplitudeChangeBefore',
|
||||||
'avgPeakAmplitudeChangeAfter', 'avgPeakChangeRatio', 'maxPeakIncreaseTime', 'maxPeakDecreaseTime', 'maxPeakDuration', 'maxPeakChangeRatio',
|
'avgPeakAmplitudeChangeAfter', 'avgPeakChangeRatio', 'maxPeakIncreaseTime', 'maxPeakDecreaseTime', 'maxPeakDuration', 'maxPeakChangeRatio',
|
||||||
'avgPeakIncreaseTime', 'avgPeakDecreaseTime', 'avgPeakDuration', 'maxPeakResponseSlopeBefore', 'maxPeakResponseSlopeAfter', 'signalOverallChange',
|
'avgPeakIncreaseTime', 'avgPeakDecreaseTime', 'avgPeakDuration', 'maxPeakResponseSlopeBefore', 'maxPeakResponseSlopeAfter',
|
||||||
'changeDuration', 'changeRate', 'significantIncrease', 'significantDecrease']
|
'signalOverallChange', 'changeDuration', 'changeRate', 'significantIncrease', 'significantDecrease']
|
||||||
SRC_SCRIPT: src/features/empatica_electrodermal_activity/cf/main.py
|
SRC_SCRIPT: src/features/empatica_electrodermal_activity/cf/main.py
|
||||||
|
|
||||||
# See https://www.rapids.science/latest/features/empatica-blood-volume-pulse/
|
# See https://www.rapids.science/latest/features/empatica-blood-volume-pulse/
|
||||||
|
|
|
@ -0,0 +1,86 @@
|
||||||
|
import pandas as pd
|
||||||
|
from scipy.stats import entropy
|
||||||
|
|
||||||
|
from CalculatingFeatures.helper_functions import convert3DEmpaticaToArray, convertInputInto2d, gsrFeatureNames
|
||||||
|
from CalculatingFeatures.calculate_features import calculateFeatures
|
||||||
|
|
||||||
|
|
||||||
|
def getSampleRate(data):
|
||||||
|
try:
|
||||||
|
timestamps_diff = data['timestamp'].iloc[1] - data['timestamp'].iloc[0]
|
||||||
|
except:
|
||||||
|
raise Exception("Error occured while trying to get the sample rate from the first two sequential timestamps.")
|
||||||
|
|
||||||
|
return 1000/timestamps_diff
|
||||||
|
|
||||||
|
def extractAccFeaturesFromIntradayData(acc_intraday_data, features, time_segment, filter_data_by_segment):
|
||||||
|
acc_intraday_features = pd.DataFrame(columns=["local_segment"] + features)
|
||||||
|
|
||||||
|
if not acc_intraday_data.empty:
|
||||||
|
sample_rate = getSampleRate(acc_intraday_data)
|
||||||
|
|
||||||
|
acc_intraday_data = filter_data_by_segment(acc_intraday_data, time_segment)
|
||||||
|
|
||||||
|
if not acc_intraday_data.empty:
|
||||||
|
|
||||||
|
acc_intraday_features = pd.DataFrame()
|
||||||
|
|
||||||
|
# apply methods from calculate features module
|
||||||
|
acc_intraday_features = \
|
||||||
|
acc_intraday_data.groupby('local_segment').apply(\
|
||||||
|
lambda x: calculateFeatures(convertInputInto2d(x['accelerometer'], x.shape[0]), fs=sample_rate, featureNames=features))
|
||||||
|
|
||||||
|
acc_intraday_features.reset_index(inplace=True)
|
||||||
|
|
||||||
|
return acc_intraday_features
|
||||||
|
|
||||||
|
|
||||||
|
def cf_features(sensor_data_files, time_segment, provider, filter_data_by_segment, *args, **kwargs):
|
||||||
|
eda_intraday_data = pd.read_csv(sensor_data_files["sensor_data"])
|
||||||
|
|
||||||
|
requested_intraday_features = provider["FEATURES"]
|
||||||
|
# name of the features this function can compute
|
||||||
|
base_intraday_features_names = gsrFeatureNames
|
||||||
|
# the subset of requested features this function can compute
|
||||||
|
intraday_features_to_compute = list(set(requested_intraday_features) & set(base_intraday_features_names))
|
||||||
|
|
||||||
|
# extract features from intraday data
|
||||||
|
eda_intraday_features = extractAccFeaturesFromIntradayData(eda_intraday_data,
|
||||||
|
intraday_features_to_compute, time_segment,
|
||||||
|
filter_data_by_segment)
|
||||||
|
|
||||||
|
return eda_intraday_features
|
||||||
|
|
||||||
|
def dbdp_features(sensor_data_files, time_segment, provider, filter_data_by_segment, *args, **kwargs):
|
||||||
|
|
||||||
|
acc_data = pd.read_csv(sensor_data_files["sensor_data"])
|
||||||
|
requested_features = provider["FEATURES"]
|
||||||
|
# name of the features this function can compute
|
||||||
|
base_features_names = ["maxmagnitude", "minmagnitude", "avgmagnitude", "medianmagnitude", "stdmagnitude"]
|
||||||
|
# the subset of requested features this function can compute
|
||||||
|
features_to_compute = list(set(requested_features) & set(base_features_names))
|
||||||
|
|
||||||
|
acc_features = pd.DataFrame(columns=["local_segment"] + features_to_compute)
|
||||||
|
if not acc_data.empty:
|
||||||
|
acc_data = filter_data_by_segment(acc_data, time_segment)
|
||||||
|
|
||||||
|
if not acc_data.empty:
|
||||||
|
acc_features = pd.DataFrame()
|
||||||
|
# get magnitude related features: magnitude = sqrt(x^2+y^2+z^2)
|
||||||
|
magnitude = acc_data.apply(lambda row: np.sqrt(row["double_values_0"] ** 2 + row["double_values_1"] ** 2 + row["double_values_2"] ** 2), axis=1)
|
||||||
|
acc_data = acc_data.assign(magnitude = magnitude.values)
|
||||||
|
|
||||||
|
if "maxmagnitude" in features_to_compute:
|
||||||
|
acc_features["maxmagnitude"] = acc_data.groupby(["local_segment"])["magnitude"].max()
|
||||||
|
if "minmagnitude" in features_to_compute:
|
||||||
|
acc_features["minmagnitude"] = acc_data.groupby(["local_segment"])["magnitude"].min()
|
||||||
|
if "avgmagnitude" in features_to_compute:
|
||||||
|
acc_features["avgmagnitude"] = acc_data.groupby(["local_segment"])["magnitude"].mean()
|
||||||
|
if "medianmagnitude" in features_to_compute:
|
||||||
|
acc_features["medianmagnitude"] = acc_data.groupby(["local_segment"])["magnitude"].median()
|
||||||
|
if "stdmagnitude" in features_to_compute:
|
||||||
|
acc_features["stdmagnitude"] = acc_data.groupby(["local_segment"])["magnitude"].std()
|
||||||
|
|
||||||
|
acc_features = acc_features.reset_index()
|
||||||
|
|
||||||
|
return acc_features
|
|
@ -4,8 +4,6 @@ from scipy.stats import entropy
|
||||||
from CalculatingFeatures.helper_functions import convert1DEmpaticaToArray, convertInputInto2d, gsrFeatureNames
|
from CalculatingFeatures.helper_functions import convert1DEmpaticaToArray, convertInputInto2d, gsrFeatureNames
|
||||||
from CalculatingFeatures.calculate_features import calculateFeatures
|
from CalculatingFeatures.calculate_features import calculateFeatures
|
||||||
|
|
||||||
pd.set_option('display.max_columns', None)
|
|
||||||
|
|
||||||
|
|
||||||
def getSampleRate(data):
|
def getSampleRate(data):
|
||||||
try:
|
try:
|
||||||
|
@ -30,7 +28,7 @@ def extractEDAFeaturesFromIntradayData(eda_intraday_data, features, time_segment
|
||||||
# apply methods from calculate features module
|
# apply methods from calculate features module
|
||||||
eda_intraday_features = \
|
eda_intraday_features = \
|
||||||
eda_intraday_data.groupby('local_segment').apply(\
|
eda_intraday_data.groupby('local_segment').apply(\
|
||||||
lambda x: calculateFeatures(convertInputInto2d(x['electrodermal_activity'], x.shape[0]), fs=sample_rate, featureNames=features)) # To-Do: Hardcoded f/s ...
|
lambda x: calculateFeatures(convertInputInto2d(x['electrodermal_activity'], x.shape[0]), fs=sample_rate, featureNames=features))
|
||||||
|
|
||||||
eda_intraday_features.reset_index(inplace=True)
|
eda_intraday_features.reset_index(inplace=True)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from scipy.stats import entropy
|
from scipy.stats import entropy
|
||||||
|
|
||||||
from CalculatingFeatures.helper_functions import convert1DEmpaticaToArray, convertInputInto2d, genericFeatureNames
|
from CalculatingFeatures.helper_functions import convert1DEmpaticaToArray, convertInputInto2d, accelerometerFeatureNames
|
||||||
from CalculatingFeatures.calculate_features import calculateFeatures
|
from CalculatingFeatures.calculate_features import calculateFeatures
|
||||||
|
|
||||||
|
|
||||||
|
@ -13,7 +13,7 @@ def getSampleRate(data):
|
||||||
|
|
||||||
return 1000/timestamps_diff
|
return 1000/timestamps_diff
|
||||||
|
|
||||||
def extractTempFeaturesFromIntradayData(temperature_intraday_data, features, time_segment, filter_data_by_segment):
|
def extractAccFeaturesFromIntradayData(temperature_intraday_data, features, time_segment, filter_data_by_segment):
|
||||||
temperature_intraday_features = pd.DataFrame(columns=["local_segment"] + features)
|
temperature_intraday_features = pd.DataFrame(columns=["local_segment"] + features)
|
||||||
|
|
||||||
if not temperature_intraday_data.empty:
|
if not temperature_intraday_data.empty:
|
||||||
|
@ -28,7 +28,7 @@ def extractTempFeaturesFromIntradayData(temperature_intraday_data, features, tim
|
||||||
# apply methods from calculate features module
|
# apply methods from calculate features module
|
||||||
temperature_intraday_features = \
|
temperature_intraday_features = \
|
||||||
temperature_intraday_data.groupby('local_segment').apply(\
|
temperature_intraday_data.groupby('local_segment').apply(\
|
||||||
lambda x: calculateFeatures(convertInputInto2d(x['temperature'], x.shape[0]), fs=sample_rate, featureNames=features)) # To-Do: Hardcoded f/s ...
|
lambda x: calculateFeatures(convertInputInto2d(x['temperature'], x.shape[0]), fs=sample_rate, featureNames=features))
|
||||||
|
|
||||||
temperature_intraday_features.reset_index(inplace=True)
|
temperature_intraday_features.reset_index(inplace=True)
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue