Delete CF features folder
parent
191e53e543
commit
f389ac9d89
|
@ -1,224 +0,0 @@
|
|||
.ipynb_checkpoints
|
||||
build
|
||||
CalculatingFeatures.egg-info
|
||||
dist
|
||||
venv
|
||||
|
||||
# From the website: https://github.com/github/gitignore/blob/master/Python.gitignore
|
||||
# Also from the website: https://github.com/github/gitignore/blob/master/Global/JetBrains.gitignore
|
||||
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||
|
||||
# User-specific stuff
|
||||
.idea/**/workspace.xml
|
||||
.idea/**/tasks.xml
|
||||
.idea/**/usage.statistics.xml
|
||||
.idea/**/dictionaries
|
||||
.idea/**/shelf
|
||||
|
||||
# Generated files
|
||||
.idea/**/contentModel.xml
|
||||
|
||||
# Sensitive or high-churn files
|
||||
.idea/**/dataSources/
|
||||
.idea/**/dataSources.ids
|
||||
.idea/**/dataSources.local.xml
|
||||
.idea/**/sqlDataSources.xml
|
||||
.idea/**/dynamic.xml
|
||||
.idea/**/uiDesigner.xml
|
||||
.idea/**/dbnavigator.xml
|
||||
|
||||
# Gradle
|
||||
.idea/**/gradle.xml
|
||||
.idea/**/libraries
|
||||
|
||||
# Gradle and Maven with auto-import
|
||||
# When using Gradle or Maven with auto-import, you should exclude module files,
|
||||
# since they will be recreated, and may cause churn. Uncomment if using
|
||||
# auto-import.
|
||||
# .idea/artifacts
|
||||
# .idea/compiler.xml
|
||||
# .idea/jarRepositories.xml
|
||||
# .idea/modules.xml
|
||||
# .idea/*.iml
|
||||
# .idea/modules
|
||||
# *.iml
|
||||
# *.ipr
|
||||
|
||||
# CMake
|
||||
cmake-build-*/
|
||||
|
||||
# Mongo Explorer plugin
|
||||
.idea/**/mongoSettings.xml
|
||||
|
||||
# File-based project format
|
||||
*.iws
|
||||
|
||||
# IntelliJ
|
||||
out/
|
||||
|
||||
# mpeltonen/sbt-idea plugin
|
||||
.idea_modules/
|
||||
|
||||
# JIRA plugin
|
||||
atlassian-ide-plugin.xml
|
||||
|
||||
# Cursive Clojure plugin
|
||||
.idea/replstate.xml
|
||||
|
||||
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||
com_crashlytics_export_strings.xml
|
||||
crashlytics.properties
|
||||
crashlytics-build.properties
|
||||
fabric.properties
|
||||
|
||||
# Editor-based Rest Client
|
||||
.idea/httpRequests
|
||||
|
||||
# Android studio 3.1+ serialized cache file
|
||||
.idea/caches/build_file_checksums.ser
|
||||
|
||||
# Output images
|
||||
cf_tests/output_images/
|
||||
|
|
@ -1,610 +0,0 @@
|
|||
from CalculatingFeatures import feature_functions as f
|
||||
import numpy as np
|
||||
|
||||
|
||||
def calculateFeaturesAcc(ax, ay, az, time, axBand, ayBand, azBand, axLow, ayLow, azLow, featureNames=None):
|
||||
""" Calculate features for the accelerometer
|
||||
|
||||
:param ax2d: 2D array including the X axis
|
||||
:param ay2d: 2D array including the Y axis
|
||||
:param az2d: 2D array including the Z axis
|
||||
:param time2d: 2D array of times, denoting when each measurement occurred
|
||||
:param afs: sampling frequency of the accelerometer
|
||||
:param prefix: prefix for column names in returned dataframe
|
||||
:return: features of the accelerometer
|
||||
"""
|
||||
if ay is not None:
|
||||
magnitudes = f.magnitudeVector(ax, ay, az)
|
||||
magnitudesLow = f.magnitudeVector(axLow, ayLow, azLow)
|
||||
magnitudesBand = f.magnitudeVector(axBand, ayBand, azBand)
|
||||
else:
|
||||
magnitudes = ax
|
||||
magnitudesLow = axLow
|
||||
magnitudesBand = axBand
|
||||
|
||||
ACsumPerXBand = f.calcSumPerComponent(axBand, time)
|
||||
if ay is not None:
|
||||
ACsumPerYBand = f.calcSumPerComponent(ayBand, time)
|
||||
ACsumPerZBand = f.calcSumPerComponent(azBand, time)
|
||||
|
||||
meanKineticEnergyX = f.calcMeanKineticEnergy(axBand, time)
|
||||
if ay is not None:
|
||||
meanKineticEnergyY = f.calcMeanKineticEnergy(ayBand, time)
|
||||
meanKineticEnergyZ = f.calcMeanKineticEnergy(azBand, time)
|
||||
totalKineticEnergy = f.calcTotalKineticEnergy(axBand, ayBand, azBand, time)
|
||||
|
||||
roll = f.calcRoll(ayLow, azLow)
|
||||
pitch = f.calcPitch(axLow, ayLow, azLow)
|
||||
|
||||
acAbsoluteArea = f.calcAcAbsoluteArea(axBand, ayBand, azBand)
|
||||
|
||||
row = {}
|
||||
if f.checkForFeature("meanLow", featureNames):
|
||||
row["mean_XLow"] = f.calcMean(axLow)
|
||||
if ay is not None:
|
||||
row["mean_YLow"] = f.calcMean(ayLow)
|
||||
row["mean_ZLow"] = f.calcMean(azLow)
|
||||
row["totalMeanLow"] = (row["mean_XLow"] + row["mean_YLow"] + row["mean_ZLow"]) * len(ax) / 3
|
||||
|
||||
if f.checkForFeature("areaLow", featureNames):
|
||||
row["area_XLow"] = f.calcArea(axLow)
|
||||
if ay is not None:
|
||||
row["area_YLow"] = f.calcArea(ayLow)
|
||||
row["area_ZLow"] = f.calcArea(azLow)
|
||||
|
||||
if ay is not None:
|
||||
if f.checkForFeature("totalAbsoluteAreaBand", featureNames):
|
||||
row["totalAbsoluteAreaBand"] = f.calcTotalAbsoluteArea(f.calcAcAbsoluteArea(axBand, ayBand, azBand))
|
||||
|
||||
if f.checkForFeature("totalMagnitudeBand", featureNames):
|
||||
row["totalMagnitudeBand"] = f.calcTotalMagnitude(axBand, ayBand, azBand)
|
||||
|
||||
# Measures of body posture
|
||||
if f.checkForFeature("postureDistanceLow", featureNames):
|
||||
postureDistance = f.calcPostureDistance(row["mean_XLow"], row["mean_YLow"],
|
||||
row["mean_ZLow"]) if "mean_XLow" in row \
|
||||
else f.calcPostureDistance(f.calcMean(axLow), f.calcMean(ayLow), f.calcMean(azLow))
|
||||
row["postureDistance_XLow"] = postureDistance[0]
|
||||
row["postureDistance_YLow"] = postureDistance[1]
|
||||
row["postureDistance_ZLow"] = postureDistance[2]
|
||||
|
||||
if f.checkForFeature("entropyBand", featureNames):
|
||||
row["entropy_XBand"] = f.calcEntropy(axBand)
|
||||
if ay is not None:
|
||||
row["entropy_YBand"] = f.calcEntropy(ayBand)
|
||||
row["entropy_ZBand"] = f.calcEntropy(azBand)
|
||||
|
||||
if f.checkForFeature("skewnessBand", featureNames):
|
||||
row["skewness_XBand"] = f.calcSkewness(axBand)
|
||||
if ay is not None:
|
||||
row["skewness_YBand"] = f.calcSkewness(ayBand)
|
||||
row["skewness_ZBand"] = f.calcSkewness(azBand)
|
||||
|
||||
if f.checkForFeature("kurtosisBand", featureNames):
|
||||
row["kurtosis_XBand"] = f.calcKurtosis(axBand)
|
||||
if ay is not None:
|
||||
row["kurtosis_YBand"] = f.calcKurtosis(ayBand)
|
||||
row["kurtosis_ZBand"] = f.calcKurtosis(azBand)
|
||||
|
||||
# Measures of motion shape
|
||||
if f.checkForFeature("absoluteMeanBand", featureNames):
|
||||
row["absoluteMean_XBand"] = f.calcMean(axBand)
|
||||
if ay is not None:
|
||||
row["absoluteMean_YBand"] = f.calcMean(ayBand)
|
||||
row["absoluteMean_ZBand"] = f.calcMean(azBand)
|
||||
|
||||
if f.checkForFeature("absoluteAreaBand", featureNames):
|
||||
row["absoluteArea_XBand"] = f.calcArea(axBand)
|
||||
if ay is not None:
|
||||
row["absoluteArea_YBand"] = f.calcArea(ayBand)
|
||||
row["absoluteArea_ZBand"] = f.calcArea(azBand)
|
||||
row["absoluteAreaAllBand"] = f.calcTotalAbsoluteArea(acAbsoluteArea)
|
||||
|
||||
acQuartilesX = f.calcQuartiles(axBand)
|
||||
if ay is not None:
|
||||
acQuartilesY = f.calcQuartiles(ayBand)
|
||||
acQuartilesZ = f.calcQuartiles(azBand)
|
||||
|
||||
if f.checkForFeature("quartilesBand", featureNames):
|
||||
row["quartilesQ1_XBand"] = acQuartilesX[0]
|
||||
row["quartilesQ2_XBand"] = acQuartilesX[1]
|
||||
row["quartilesQ3_XBand"] = acQuartilesX[2]
|
||||
if ay is not None:
|
||||
row["quartilesQ1_YBand"] = acQuartilesY[0]
|
||||
row["quartilesQ2_YBand"] = acQuartilesY[1]
|
||||
row["quartilesQ3_YBand"] = acQuartilesY[2]
|
||||
|
||||
row["quartilesQ1_ZBand"] = acQuartilesZ[0]
|
||||
row["quartilesQ2_ZBand"] = acQuartilesZ[1]
|
||||
row["quartilesQ3_ZBand"] = acQuartilesZ[2]
|
||||
|
||||
if f.checkForFeature("interQuartileRangeBand", featureNames):
|
||||
row["interQuartileRange_XBand"] = f.calcInterQuartileRange(acQuartilesX)
|
||||
if ay is not None:
|
||||
row["interQuartileRange_YBand"] = f.calcInterQuartileRange(acQuartilesY)
|
||||
row["interQuartileRange_ZBand"] = f.calcInterQuartileRange(acQuartilesZ)
|
||||
|
||||
# Measures of motion variation
|
||||
if f.checkForFeature("varianceBand", featureNames):
|
||||
row["variance_XBand"] = f.calcVariance(axBand)
|
||||
if ay is not None:
|
||||
row["variance_YBand"] = f.calcVariance(ayBand)
|
||||
row["variance_ZBand"] = f.calcVariance(azBand)
|
||||
|
||||
if f.checkForFeature("coefficientOfVariationBand", featureNames):
|
||||
row["coefficientOfVariation_XBand"] = f.calcCoefficientOfVariation(axBand)
|
||||
if ay is not None:
|
||||
row["coefficientOfVariation_YBand"] = f.calcCoefficientOfVariation(ayBand)
|
||||
row["coefficientOfVariation_ZBand"] = f.calcCoefficientOfVariation(azBand)
|
||||
|
||||
if f.checkForFeature("amplitudeBand", featureNames):
|
||||
row["amplitude_XBand"] = f.calcAmplitude(axBand)
|
||||
if ay is not None:
|
||||
row["amplitude_YBand"] = f.calcAmplitude(ayBand)
|
||||
row["amplitude_ZBand"] = f.calcAmplitude(azBand)
|
||||
|
||||
if f.checkForFeature("totalEnergyBand", featureNames):
|
||||
row["totalEnergy_XBand"] = f.calcTotalEnergy(axBand)
|
||||
if ay is not None:
|
||||
row["totalEnergy_YBand"] = f.calcTotalEnergy(ayBand)
|
||||
row["totalEnergy_ZBand"] = f.calcTotalEnergy(azBand)
|
||||
|
||||
if f.checkForFeature("dominantFrequencyEnergyBand", featureNames):
|
||||
row["dominantFrequencyEnergy_XBand"] = f.calcDominantFrequencyEnergy(axBand)
|
||||
if ay is not None:
|
||||
row["dominantFrequencyEnergy_YBand"] = f.calcDominantFrequencyEnergy(ayBand)
|
||||
row["dominantFrequencyEnergy_ZBand"] = f.calcDominantFrequencyEnergy(azBand)
|
||||
|
||||
if f.checkForFeature("meanCrossingRateBand", featureNames):
|
||||
row["meanCrossingRate_XBand"] = f.calcMeanCrossingRate(axBand)
|
||||
if ay is not None:
|
||||
row["meanCrossingRate_YBand"] = f.calcMeanCrossingRate(ayBand)
|
||||
row["meanCrossingRate_ZBand"] = f.calcMeanCrossingRate(azBand)
|
||||
|
||||
if ay is not None:
|
||||
if f.checkForFeature("correlationBand", featureNames):
|
||||
row["correlation_X_YBand"] = f.calcCorrelation(axBand, ayBand)
|
||||
row["correlation_X_ZBand"] = f.calcCorrelation(axBand, azBand)
|
||||
row["correlation_Y_ZBand"] = f.calcCorrelation(ayBand, azBand)
|
||||
|
||||
acQuartilesMagnitude = f.calcQuartiles(magnitudesBand)
|
||||
if f.checkForFeature("quartilesMagnitudesBand", featureNames):
|
||||
row["quartilesMagnitudes_XBand"] = acQuartilesMagnitude[0]
|
||||
row["quartilesMagnitudes_YBand"] = acQuartilesMagnitude[1]
|
||||
row["quartilesMagnitudes_ZBand"] = acQuartilesMagnitude[2]
|
||||
|
||||
if f.checkForFeature("interQuartileRangeMagnitudesBand", featureNames):
|
||||
row["interQuartileRangeMagnitudesBand"] = f.calcInterQuartileRange(acQuartilesMagnitude)
|
||||
|
||||
if f.checkForFeature("areaUnderAccelerationMagnitude", featureNames):
|
||||
row["areaUnderAccelerationMagnitude"] = f.calcAreaUnderAccelerationMagnitude(magnitudes, time)
|
||||
|
||||
if f.checkForFeature("peaksDataLow", featureNames):
|
||||
peakCount = f.calcPeakCount(magnitudesLow)
|
||||
row["peaksCountLow"] = peakCount[0]
|
||||
row["peaksSumLow"] = peakCount[1]
|
||||
row["peaksAmplitudeAvgLow"] = peakCount[3]
|
||||
row["peaksPeakAvgLow"] = peakCount[2]
|
||||
|
||||
if f.checkForFeature("sumPerComponentBand", featureNames):
|
||||
row["sumPerComponent_XBand"] = ACsumPerXBand
|
||||
if ay is not None:
|
||||
row["sumPerComponent_YBand"] = ACsumPerYBand
|
||||
row["sumPerComponent_ZBand"] = ACsumPerZBand
|
||||
|
||||
if f.checkForFeature("velocityBand", featureNames):
|
||||
row["velocity_XBand"] = f.computeACVelocity(axBand, time)
|
||||
if ay is not None:
|
||||
row["velocity_YBand"] = f.computeACVelocity(ayBand, time)
|
||||
row["velocity_ZBand"] = f.computeACVelocity(azBand, time)
|
||||
|
||||
if f.checkForFeature("meanKineticEnergyBand", featureNames):
|
||||
row["meanKineticEnergy_XBand"] = meanKineticEnergyX
|
||||
if ay is not None:
|
||||
row["meanKineticEnergy_YBand"] = meanKineticEnergyY
|
||||
row["meanKineticEnergy_ZBand"] = meanKineticEnergyZ
|
||||
|
||||
if ay is not None:
|
||||
if f.checkForFeature("totalKineticEnergyBand", featureNames):
|
||||
row["totalKineticEnergyBand"] = totalKineticEnergy
|
||||
|
||||
ACsumPerX = f.calcSumPerComponent(ax, time)
|
||||
acSumSquaredX = pow(ACsumPerX, 2)
|
||||
if ay is not None:
|
||||
ACsumPerY = f.calcSumPerComponent(ay, time)
|
||||
ACsumPerZ = f.calcSumPerComponent(az, time)
|
||||
acSumSquaredY = pow(ACsumPerY, 2)
|
||||
acSumSquaredZ = pow(ACsumPerZ, 2)
|
||||
|
||||
if f.checkForFeature("squareSumOfComponent", featureNames):
|
||||
row["squareSumOfComponent_X"] = acSumSquaredX
|
||||
if ay is not None:
|
||||
row["squareSumOfComponent_Y"] = acSumSquaredY
|
||||
row["squareSumOfComponent_Z"] = acSumSquaredZ
|
||||
|
||||
if f.checkForFeature("averageVectorLength", featureNames):
|
||||
row["averageVectorLength"] = f.calcAverageVectorLength(magnitudes)
|
||||
|
||||
if f.checkForFeature("averageVectorLengthPower", featureNames):
|
||||
row["averageVectorLengthPower"] = f.calcAverageVectorLengthPower(magnitudes)
|
||||
|
||||
if ay is not None:
|
||||
if f.checkForFeature("rollAvgLow", featureNames):
|
||||
row["rollAvgLow"] = (f.max(roll) - f.min(roll))
|
||||
|
||||
if f.checkForFeature("pitchAvgLow", featureNames):
|
||||
row["pitchAvgLow"] = (f.max(pitch) - f.min(pitch))
|
||||
|
||||
if f.checkForFeature("rollStdDevLow", featureNames):
|
||||
row["rollStdDevLow"] = f.stdDev(roll)
|
||||
|
||||
if f.checkForFeature("pitchStdDevLow", featureNames):
|
||||
row["pitchStdDevLow"] = f.stdDev(pitch)
|
||||
|
||||
if f.checkForFeature("rollMotionAmountLow", featureNames):
|
||||
row["rollMotionAmountLow"] = f.rollMotionAmount(roll)
|
||||
|
||||
if f.checkForFeature("rollMotionRegularityLow", featureNames):
|
||||
row["rollMotionRegularityLow"] = f.rollMotionRegularity(roll)
|
||||
|
||||
if f.checkForFeature("manipulationLow", featureNames):
|
||||
row["manipulationLow"] = f.manipulation(axLow, ayLow, azLow, roll, pitch)
|
||||
|
||||
if f.checkForFeature("rollPeaks", featureNames):
|
||||
roll_peaks = f.calcPeakCount(roll)
|
||||
row["rollPeak0"] = roll_peaks[0]
|
||||
row["rollPeak1"] = roll_peaks[1]
|
||||
row["rollPeak2"] = roll_peaks[2]
|
||||
row["rollPeak3"] = roll_peaks[3]
|
||||
|
||||
if f.checkForFeature("pitchPeaks", featureNames):
|
||||
pitch_peaks = f.calcPeakCount(pitch)
|
||||
row["pitchPeak0"] = pitch_peaks[0]
|
||||
row["pitchPeak1"] = pitch_peaks[1]
|
||||
row["pitchPeak2"] = pitch_peaks[2]
|
||||
row["pitchPeak3"] = pitch_peaks[3]
|
||||
|
||||
if f.checkForFeature("rollPitchCorrelation", featureNames):
|
||||
row["rollPitchCorrelation"] = f.calcCorrelation(roll, pitch)
|
||||
return row
|
||||
|
||||
|
||||
def calcCommonFeatures(x, y, z, time, featureNames=None):
|
||||
""" Calculate common features of accelerometer and gyroscope
|
||||
|
||||
:param prefix: prefix of all feature names
|
||||
:param x: array including the X axis
|
||||
:param y: array including the Y axis
|
||||
:param z: array including the Z axis
|
||||
:return: pandas dataframe with the calculated features
|
||||
"""
|
||||
row = {}
|
||||
if f.checkForFeature("autocorrelations", featureNames):
|
||||
row.update(calcAutocorrelations(x, "_X"))
|
||||
if y is not None:
|
||||
row.update(calcAutocorrelations(y, "_Y"))
|
||||
row.update(calcAutocorrelations(z, "_Z"))
|
||||
|
||||
if f.checkForFeature("countAboveMean", featureNames):
|
||||
row["countAboveMean_X"] = f.countAboveMean(x)
|
||||
if y is not None:
|
||||
row["countAboveMean_Y"] = f.countAboveMean(y)
|
||||
row["countAboveMean_Z"] = f.countAboveMean(z)
|
||||
|
||||
if f.checkForFeature("countBelowMean", featureNames):
|
||||
row["countBelowMean_X"] = f.countBelowMean(x)
|
||||
if y is not None:
|
||||
row["countBelowMean_Y"] = f.countBelowMean(y)
|
||||
row["countBelowMean_Z"] = f.countBelowMean(z)
|
||||
|
||||
if f.checkForFeature("maximum", featureNames):
|
||||
row["maximum_X"] = f.max(x)
|
||||
if y is not None:
|
||||
row["maximum_Y"] = f.max(y)
|
||||
row["maximum_Z"] = f.max(z)
|
||||
|
||||
if f.checkForFeature("minimum", featureNames):
|
||||
row["minimum_X"] = f.min(x)
|
||||
if y is not None:
|
||||
row["minimum_Y"] = f.min(y)
|
||||
row["minimum_Z"] = f.min(z)
|
||||
|
||||
if f.checkForFeature("meanAbsChange", featureNames):
|
||||
row["meanAbsChange_X"] = f.meanAbsChange(x)
|
||||
if y is not None:
|
||||
row["meanAbsChange_Y"] = f.meanAbsChange(y)
|
||||
row["meanAbsChange_Z"] = f.meanAbsChange(z)
|
||||
|
||||
if f.checkForFeature("longestStrikeAboveMean", featureNames):
|
||||
row["longestStrikeAboveMean_X"] = f._calcMaxLengthOfSequenceTrueOrOne(x > np.mean(x))
|
||||
if y is not None:
|
||||
row["longestStrikeAboveMean_Y"] = f._calcMaxLengthOfSequenceTrueOrOne(y > np.mean(y))
|
||||
row["longestStrikeAboveMean_Z"] = f._calcMaxLengthOfSequenceTrueOrOne(z > np.mean(z))
|
||||
|
||||
if f.checkForFeature("longestStrikeBelowMean", featureNames):
|
||||
row["longestStrikeBelowMean_X"] = f._calcMaxLengthOfSequenceTrueOrOne(x < np.mean(x))
|
||||
if y is not None:
|
||||
row["longestStrikeBelowMean_Y"] = f._calcMaxLengthOfSequenceTrueOrOne(y < np.mean(y))
|
||||
row["longestStrikeBelowMean_Z"] = f._calcMaxLengthOfSequenceTrueOrOne(z < np.mean(z))
|
||||
|
||||
if f.checkForFeature("stdDev", featureNames):
|
||||
row["stdDev_X"] = f.stdDev(x)
|
||||
if y is not None:
|
||||
row["stdDev_Y"] = f.stdDev(y)
|
||||
row["stdDev_Z"] = f.stdDev(z)
|
||||
|
||||
if f.checkForFeature("median", featureNames):
|
||||
row["median_X"] = np.median(x)
|
||||
if y is not None:
|
||||
row["median_Y"] = np.median(y)
|
||||
row["median_Z"] = np.median(z)
|
||||
|
||||
if f.checkForFeature("meanChange", featureNames):
|
||||
row["meanChange_X"] = f.meanChange(x)
|
||||
if y is not None:
|
||||
row["meanChange_Y"] = f.meanChange(y)
|
||||
row["meanChange_Z"] = f.meanChange(z)
|
||||
|
||||
if f.checkForFeature("numberOfZeroCrossings", featureNames):
|
||||
row["numberOfZeroCrossings_X"] = f.numberOfZeroCrossings(x)
|
||||
if y is not None:
|
||||
row["numberOfZeroCrossings_Y"] = f.numberOfZeroCrossings(y)
|
||||
row["numberOfZeroCrossings_Z"] = f.numberOfZeroCrossings(z)
|
||||
|
||||
if f.checkForFeature("absEnergy", featureNames):
|
||||
row["absEnergy_X"] = f.absEnergy(x)
|
||||
if y is not None:
|
||||
row["absEnergy_Y"] = f.absEnergy(y)
|
||||
row["absEnergy_Z"] = f.absEnergy(z)
|
||||
|
||||
if f.checkForFeature("linearTrendSlope", featureNames):
|
||||
row["linearTrendSlope_X"] = f.linearTrendSlope(x)
|
||||
if y is not None:
|
||||
row["linearTrendSlope_Y"] = f.linearTrendSlope(y)
|
||||
row["linearTrendSlope_Z"] = f.linearTrendSlope(z)
|
||||
|
||||
if f.checkForFeature("ratioBeyondRSigma", featureNames):
|
||||
r = 2.5
|
||||
row["ratioBeyondRSigma_X"] = f.ratioBeyondRSigma(x, r)
|
||||
if y is not None:
|
||||
row["ratioBeyondRSigma_Y"] = f.ratioBeyondRSigma(y, r)
|
||||
row["ratioBeyondRSigma_Z"] = f.ratioBeyondRSigma(z, r)
|
||||
|
||||
if f.checkForFeature("binnedEntropy", featureNames):
|
||||
max_bins = 10
|
||||
row["binnedEntropy_X"] = f.binnedEntropy(x, max_bins)
|
||||
if y is not None:
|
||||
row["binnedEntropy_Y"] = f.binnedEntropy(y, max_bins)
|
||||
row["binnedEntropy_Z"] = f.binnedEntropy(z, max_bins)
|
||||
|
||||
autocorrelationsX = f.autocorrelations(x)
|
||||
if y is not None:
|
||||
autocorrelationsY = f.autocorrelations(y)
|
||||
autocorrelationsZ = f.autocorrelations(z)
|
||||
|
||||
if f.checkForFeature("numOfPeaksAutocorr", featureNames):
|
||||
row["numOfPeaksAutocorr_X"] = f.calcPeakCount(autocorrelationsX)[0]
|
||||
if y is not None:
|
||||
row["numOfPeaksAutocorr_Y"] = f.calcPeakCount(autocorrelationsY)[0]
|
||||
row["numOfPeaksAutocorr_Z"] = f.calcPeakCount(autocorrelationsZ)[0]
|
||||
|
||||
if f.checkForFeature("numberOfZeroCrossingsAutocorr", featureNames):
|
||||
row["numberOfZeroCrossingsAutocorr_X"] = f.numberOfZeroCrossings(autocorrelationsX)
|
||||
if y is not None:
|
||||
row["numberOfZeroCrossingsAutocorr_Y"] = f.numberOfZeroCrossings(autocorrelationsY)
|
||||
row["numberOfZeroCrossingsAutocorr_Z"] = f.numberOfZeroCrossings(autocorrelationsZ)
|
||||
|
||||
if f.checkForFeature("areaAutocorr", featureNames):
|
||||
row["areaAutocorr_X"] = f.calcArea(autocorrelationsX)
|
||||
if y is not None:
|
||||
row["areaAutocorr_Y"] = f.calcArea(autocorrelationsY)
|
||||
row["areaAutocorr_Z"] = f.calcArea(autocorrelationsZ)
|
||||
|
||||
if f.checkForFeature("calcMeanCrossingRateAutocorr", featureNames):
|
||||
row["calcMeanCrossingRateAutocorr_X"] = f.calcMeanCrossingRate(autocorrelationsX)
|
||||
if y is not None:
|
||||
row["calcMeanCrossingRateAutocorr_Y"] = f.calcMeanCrossingRate(autocorrelationsY)
|
||||
row["calcMeanCrossingRateAutocorr_Z"] = f.calcMeanCrossingRate(autocorrelationsZ)
|
||||
|
||||
if f.checkForFeature("countAboveMeanAutocorr", featureNames):
|
||||
row["countAboveMeanAutocorr_X"] = f.countAboveMean(autocorrelationsX)
|
||||
if y is not None:
|
||||
row["countAboveMeanAutocorr_Y"] = f.countAboveMean(autocorrelationsY)
|
||||
row["countAboveMeanAutocorr_Z"] = f.countAboveMean(autocorrelationsZ)
|
||||
|
||||
GCsumPerX_gyro = f.calcSumPerComponent(x, time)
|
||||
if y is not None:
|
||||
GCsumPerY_gyro = f.calcSumPerComponent(y, time)
|
||||
GCsumPerZ_gyro = f.calcSumPerComponent(z, time)
|
||||
|
||||
if f.checkForFeature("sumPer", featureNames):
|
||||
row["sumPer_X"] = GCsumPerX_gyro
|
||||
if y is not None:
|
||||
row["sumPer_Y"] = GCsumPerY_gyro
|
||||
row["sumPer_Z"] = GCsumPerZ_gyro
|
||||
|
||||
GCsumSquaredX = pow(GCsumPerX_gyro, 2)
|
||||
if y is not None:
|
||||
GCsumSquaredY = pow(GCsumPerY_gyro, 2)
|
||||
GCsumSquaredZ = pow(GCsumPerZ_gyro, 2)
|
||||
|
||||
if f.checkForFeature("sumSquared", featureNames):
|
||||
row["sumSquared_X"] = GCsumSquaredX
|
||||
if y is not None:
|
||||
row["sumSquared_Y"] = GCsumSquaredY
|
||||
row["sumSquared_Z"] = GCsumSquaredZ
|
||||
|
||||
if y is not None:
|
||||
if f.checkForFeature("squareSumOfComponent", featureNames):
|
||||
row["squareSumOfComponent"] = pow((GCsumSquaredX + GCsumSquaredY + GCsumSquaredZ), 2)
|
||||
|
||||
if f.checkForFeature("sumOfSquareComponents", featureNames):
|
||||
row["sumOfSquareComponents"] = pow(GCsumSquaredX, 2) + pow(GCsumSquaredY, 2) + pow(GCsumSquaredZ, 2)
|
||||
|
||||
return row
|
||||
|
||||
|
||||
def calcAutocorrelations(signal, suffix):
|
||||
""" Calculate autocorrelations of the given signal with lags 5, 10, 20, 30, 50, 75 and 100
|
||||
|
||||
:param signal: signal on which to calculate the autocorrelations
|
||||
:param suffix: suffix of the feature name
|
||||
:return: dict with calculated autocorrelations
|
||||
"""
|
||||
row = {}
|
||||
for i in [5, 10, 20, 30, 50, 75, 100]:
|
||||
row["autocorrelation_" + str(i) + suffix] = f.autocorrelation(signal, i)
|
||||
return row
|
||||
|
||||
|
||||
def calculateFeaturesGyro(gx, gy, gz, time, gxLow, gyLow, gzLow, featureNames=None):
|
||||
"""
|
||||
|
||||
:param gx2d: 2D array including the X axis of the gyroscope
|
||||
:param gy2d: 2D array including the Y axis of the gyroscope
|
||||
:param gz2d: 2D array including the Z axis of the gyroscope
|
||||
:param gtime2d: 2D array of times, denoting when each measurement of the gyroscope occurred
|
||||
:param gFs: sampling frequency of the gyroscope
|
||||
:return: pandas dataframe including the calculated features
|
||||
"""
|
||||
if gy is not None:
|
||||
magnitudesLow_gyro = f.magnitudeVector(gxLow, gyLow, gzLow)
|
||||
else:
|
||||
magnitudesLow_gyro = gxLow
|
||||
|
||||
row = {}
|
||||
|
||||
if f.checkForFeature("meanLow", featureNames):
|
||||
row["mean_XLow"] = f.calcMean(gxLow)
|
||||
if gy is not None:
|
||||
row["mean_YLow"] = f.calcMean(gyLow)
|
||||
row["mean_ZLow"] = f.calcMean(gzLow)
|
||||
row["totalMeanLow"] = (row["mean_XLow"] + row["mean_YLow"] + row["mean_ZLow"]) * len(gx) / 3
|
||||
|
||||
if f.checkForFeature("areaLow", featureNames):
|
||||
row["area_XLow"] = f.calcArea(gxLow)
|
||||
if gy is not None:
|
||||
row["area_YLow"] = f.calcArea(gyLow)
|
||||
row["area_ZLow"] = f.calcArea(gzLow)
|
||||
|
||||
if gy is not None:
|
||||
if f.checkForFeature("totalAbsoluteAreaLow", featureNames):
|
||||
row["totalAbsoluteAreaLow"] = f.calcTotalAbsoluteArea(f.calcAcAbsoluteArea(gxLow, gyLow, gzLow))
|
||||
|
||||
if f.checkForFeature("totalMagnitudeLow", featureNames):
|
||||
row["totalMagnitudeLow"] = f.calcTotalMagnitude(gxLow, gyLow, gzLow)
|
||||
|
||||
if f.checkForFeature("entropyLow", featureNames):
|
||||
row["entropy_XLow"] = f.calcEntropy(gxLow)
|
||||
if gy is not None:
|
||||
row["entropy_YLow"] = f.calcEntropy(gyLow)
|
||||
row["entropy_ZLow"] = f.calcEntropy(gzLow)
|
||||
|
||||
if f.checkForFeature("skewnessLow", featureNames):
|
||||
row["skewness_XLow"] = f.calcSkewness(gxLow)
|
||||
if gy is not None:
|
||||
row["skewness_YLow"] = f.calcSkewness(gyLow)
|
||||
row["skewness_ZLow"] = f.calcSkewness(gzLow)
|
||||
|
||||
if f.checkForFeature("kurtosisLow", featureNames):
|
||||
row["kurtosis_XLow"] = f.calcKurtosis(gxLow)
|
||||
if gy is not None:
|
||||
row["kurtosis_YLow"] = f.calcKurtosis(gyLow)
|
||||
row["kurtosis_ZLow"] = f.calcKurtosis(gzLow)
|
||||
|
||||
gcQuartilesX = f.calcQuartiles(gxLow)
|
||||
if gy is not None:
|
||||
gcQuartilesY = f.calcQuartiles(gyLow)
|
||||
gcQuartilesZ = f.calcQuartiles(gzLow)
|
||||
|
||||
if f.checkForFeature("quartilesLow", featureNames):
|
||||
row["quartiles_Q1_XLow"] = gcQuartilesX[0]
|
||||
row["quartiles_Q2_XLow"] = gcQuartilesX[1]
|
||||
row["quartiles_Q3_XLow"] = gcQuartilesX[2]
|
||||
|
||||
if gy is not None:
|
||||
row["quartiles_Q1_YLow"] = gcQuartilesY[0]
|
||||
row["quartiles_Q2_YLow"] = gcQuartilesY[1]
|
||||
row["quartiles_Q3_YLow"] = gcQuartilesY[2]
|
||||
|
||||
row["quartiles_Q1_ZLow"] = gcQuartilesZ[0]
|
||||
row["quartiles_Q2_ZLow"] = gcQuartilesZ[1]
|
||||
row["quartiles_Q3_ZLow"] = gcQuartilesZ[2]
|
||||
|
||||
if f.checkForFeature("interQuartileRangeLow", featureNames):
|
||||
row["interQuartileRange_XLow"] = f.calcInterQuartileRange(gcQuartilesX)
|
||||
if gy is not None:
|
||||
row["interQuartileRange_YLow"] = f.calcInterQuartileRange(gcQuartilesY)
|
||||
row["interQuartileRange_ZLow"] = f.calcInterQuartileRange(gcQuartilesZ)
|
||||
|
||||
# Measures of motion variation
|
||||
if f.checkForFeature("varianceLow", featureNames):
|
||||
row["variance_XLow"] = f.calcVariance(gxLow)
|
||||
if gy is not None:
|
||||
row["variance_YLow"] = f.calcVariance(gyLow)
|
||||
row["variance_ZLow"] = f.calcVariance(gzLow)
|
||||
|
||||
if f.checkForFeature("coefficientOfVariationLow", featureNames):
|
||||
row["coefficientOfVariation_XLow"] = f.calcCoefficientOfVariation(gxLow)
|
||||
if gy is not None:
|
||||
row["coefficientOfVariation_YLow"] = f.calcCoefficientOfVariation(gyLow)
|
||||
row["coefficientOfVariation_ZLow"] = f.calcCoefficientOfVariation(gzLow)
|
||||
|
||||
if f.checkForFeature("amplitudeLow", featureNames):
|
||||
row["amplitude_XLow"] = f.calcAmplitude(gxLow)
|
||||
if gy is not None:
|
||||
row["amplitude_YLow"] = f.calcAmplitude(gyLow)
|
||||
row["amplitude_ZLow"] = f.calcAmplitude(gzLow)
|
||||
|
||||
if f.checkForFeature("totalEnergyLow", featureNames):
|
||||
row["totalEnergy_XLow"] = f.calcTotalEnergy(gxLow)
|
||||
if gy is not None:
|
||||
row["totalEnergy_YLow"] = f.calcTotalEnergy(gyLow)
|
||||
row["totalEnergy_ZLow"] = f.calcTotalEnergy(gzLow)
|
||||
|
||||
if f.checkForFeature("dominantFrequencyEnergyLow", featureNames):
|
||||
row["dominantFrequencyEnergy_XLow"] = f.calcDominantFrequencyEnergy(gxLow)
|
||||
if gy is not None:
|
||||
row["dominantFrequencyEnergy_YLow"] = f.calcDominantFrequencyEnergy(gyLow)
|
||||
row["dominantFrequencyEnergy_ZLow"] = f.calcDominantFrequencyEnergy(gzLow)
|
||||
|
||||
if f.checkForFeature("meanCrossingRateLow", featureNames):
|
||||
row["meanCrossingRate_XLow"] = f.calcMeanCrossingRate(gxLow)
|
||||
if gy is not None:
|
||||
row["meanCrossingRate_YLow"] = f.calcMeanCrossingRate(gyLow)
|
||||
row["meanCrossingRate_ZLow"] = f.calcMeanCrossingRate(gzLow)
|
||||
|
||||
if gy is not None:
|
||||
if f.checkForFeature("correlationLow", featureNames):
|
||||
row["correlation_X_YLow"] = f.calcCorrelation(gxLow, gyLow)
|
||||
row["correlation_X_ZLow"] = f.calcCorrelation(gxLow, gzLow)
|
||||
row["correlation_Y_ZLow"] = f.calcCorrelation(gyLow, gzLow)
|
||||
|
||||
gcQuartilesMagnitude = f.calcQuartiles(magnitudesLow_gyro)
|
||||
|
||||
if f.checkForFeature("quartilesMagnitudeLow", featureNames):
|
||||
row["quartilesMagnitudeLow_Q1"] = gcQuartilesMagnitude[0]
|
||||
row["quartilesMagnitudeLow_Q2"] = gcQuartilesMagnitude[1]
|
||||
row["quartilesMagnitudeLow_Q3"] = gcQuartilesMagnitude[2]
|
||||
|
||||
if f.checkForFeature("interQuartileRangeMagnitudesLow", featureNames):
|
||||
row["interQuartileRangeMagnitudesLow"] = f.calcInterQuartileRange(gcQuartilesMagnitude)
|
||||
|
||||
if gy is not None:
|
||||
if f.checkForFeature("areaUnderMagnitude", featureNames):
|
||||
row["areaUnderMagnitude"] = f.calcAreaUnderAccelerationMagnitude(f.magnitudeVector(gx, gy, gz), time)
|
||||
|
||||
if f.checkForFeature("peaksCountLow", featureNames):
|
||||
peaksCount_gyro = f.calcPeaks(magnitudesLow_gyro)
|
||||
row["peaksCountLow_Q1"] = peaksCount_gyro[0]
|
||||
row["peaksCountLow_Q2"] = peaksCount_gyro[1]
|
||||
row["peaksCountLow_Q3"] = peaksCount_gyro[0]
|
||||
row["peaksCountLow_Q4"] = peaksCount_gyro[1]
|
||||
|
||||
if f.checkForFeature("averageVectorLengthLow", featureNames):
|
||||
row["averageVectorLengthLow"] = f.calcAverageVectorLength(magnitudesLow_gyro)
|
||||
|
||||
if f.checkForFeature("averageVectorLengthPowerLow", featureNames):
|
||||
row["averageVectorLengthPowerLow"] = f.calcAverageVectorLengthPower(magnitudesLow_gyro)
|
||||
|
||||
return row
|
|
@ -1,138 +0,0 @@
|
|||
from tqdm import tqdm
|
||||
from CalculatingFeatures.helper_functions import *
|
||||
import CalculatingFeatures.gsr as gsr
|
||||
import CalculatingFeatures.hrv as hrv
|
||||
from CalculatingFeatures.calculate_acc_gyro_common_features import *
|
||||
|
||||
ALPHA_LOW = 0.3
|
||||
ALPHA_BAND_LOW = 0.3
|
||||
ALPHA_BAND_HIGH = 0.6
|
||||
|
||||
DEFAULT_DELTA_TIME = 0.02
|
||||
|
||||
MIN_INPUT_ARRAY_WIDTH = 256
|
||||
|
||||
|
||||
def calculateFeatures(x2d, y2d=None, z2d=None, time2d=None, fs=None, prefix=None, featureNames=None):
|
||||
""" Calculate features for the given data
|
||||
Feature names are stored at the end of the "helper_functions.py" file in variables frequencyFeatureNames, genericFeatureNames,
|
||||
accelerometerFeatureNames, gyroscopeFeatureNames, edaFeatureNames and bvpFeatureNames. For information about features
|
||||
read the README.md file
|
||||
|
||||
For calculation of features with 1D input shape (one axis), input only x2d parameter. Examples of inputs with 1D input shape
|
||||
are BVP and EDA signals.
|
||||
For calculation of features with 3D input shape (three axes), input also y2d and z2d parameter. Examples of inputs with 3D input
|
||||
shape are gyroscope and accelerometer signals.
|
||||
|
||||
Each individual input axis has to be in a 2D shape. This means that input signals have to be split into rows.
|
||||
If BVP features are being calculated, width of window has to be at least 256. The conversion from 1D to 2D can be
|
||||
made with "convertInputInto2d()" function, located in the helper_functions.py file.
|
||||
|
||||
If sampling frequency (fs) is not given, it will be calculated from time2d parameter.
|
||||
If array of times (time2d) is not given, it will be calculated from fs parameter.
|
||||
If none of them is given, fs will be calculated as 1/DEFAULT_DELTA_TIME
|
||||
|
||||
:param x2d: 2D array including the X axis
|
||||
:param y2d: 2D array including the Y axis
|
||||
:param z2d: 2D array including the Z axis
|
||||
:param time2d: 2D array of times, denoting when each measurement occurred
|
||||
:param fs: sampling frequency
|
||||
:param prefix: prefix to append before each column name
|
||||
:param featureNames: list of features to calculate. If it is None, all features will be calculated
|
||||
:return: pandas DataFrame of the calculated features
|
||||
"""
|
||||
|
||||
if len(x2d[0]) < MIN_INPUT_ARRAY_WIDTH:
|
||||
raise Exception("Input 2D array width has to be at least " + str(MIN_INPUT_ARRAY_WIDTH))
|
||||
|
||||
if type(x2d) is list:
|
||||
x2d = np.asarray(x2d)
|
||||
|
||||
if type(y2d) is list:
|
||||
y2d = np.asarray(y2d)
|
||||
|
||||
if type(z2d) is list:
|
||||
z2d = np.asarray(z2d)
|
||||
|
||||
if type(time2d) is list:
|
||||
time2d = np.asarray(time2d)
|
||||
|
||||
if (x2d is not None and y2d is not None and z2d is not None) or (x2d is not None and y2d is None and z2d is None):
|
||||
if y2d is not None and not (x2d.shape == y2d.shape and y2d.shape == z2d.shape):
|
||||
raise Exception("x2d, y2d, z2d shapes have to be the same!")
|
||||
# Verify fs and time array
|
||||
if time2d is not None and fs is not None and fs != 1 / (time2d[0, 1] - time2d[0, 0]):
|
||||
raise Exception("sampling frequency of the given time2D matrix and fs do not match!")
|
||||
if time2d is None:
|
||||
deltaTime = 1 / fs if fs is not None else DEFAULT_DELTA_TIME
|
||||
time2d = np.asarray(convertInputInto2d([i * deltaTime for i in range(x2d.size)], x2d.shape[1]))
|
||||
if fs is None:
|
||||
fs = 1 / (time2d[0][1] - time2d[0][0])
|
||||
|
||||
else:
|
||||
raise Exception("Incorrect input! Either x2d, y2d and z2d are given, or only x2d is given!")
|
||||
|
||||
fs = int(fs)
|
||||
|
||||
if y2d is None:
|
||||
y2d = z2d = [None] * len(x2d)
|
||||
|
||||
df = pd.DataFrame()
|
||||
for x, y, z, time in tqdm(zip(x2d, y2d, z2d, time2d), total=len(x2d)):
|
||||
xBand = f.bandPassFilter(x, ALPHA_BAND_LOW, ALPHA_BAND_HIGH)
|
||||
if y is not None:
|
||||
yBand = f.bandPassFilter(y, ALPHA_BAND_LOW, ALPHA_BAND_HIGH)
|
||||
zBand = f.bandPassFilter(z, ALPHA_BAND_LOW, ALPHA_BAND_HIGH)
|
||||
|
||||
xLow = f.lowPassFilter(x, ALPHA_LOW)
|
||||
if y is not None:
|
||||
yLow = f.lowPassFilter(y, ALPHA_LOW)
|
||||
zLow = f.lowPassFilter(z, ALPHA_LOW)
|
||||
|
||||
row = {}
|
||||
|
||||
if y is not None:
|
||||
row.update(calculateFeaturesAcc(x, y, z, time, xBand, yBand, zBand, xLow, yLow, zLow, featureNames))
|
||||
else:
|
||||
row.update(calculateFeaturesAcc(x, y, z, time, xBand, None, None, xLow, None, None, featureNames))
|
||||
|
||||
if y is not None:
|
||||
row.update(calculateFeaturesGyro(x, y, z, time, xLow, yLow, zLow, featureNames))
|
||||
else:
|
||||
row.update(calculateFeaturesGyro(x, y, z, time, xLow, None, None, featureNames))
|
||||
|
||||
row.update(calcCommonFeatures(x, y, z, time, featureNames))
|
||||
|
||||
# Add frequency features
|
||||
row.update({str(key) + "_X": val for key, val in f.computeFreqFeatures(x, featureNames, fs).items()})
|
||||
if y is not None:
|
||||
row.update({str(key) + "_Y": val for key, val in f.computeFreqFeatures(y, featureNames, fs).items()})
|
||||
row.update({str(key) + "_Z": val for key, val in f.computeFreqFeatures(z, featureNames, fs).items()})
|
||||
|
||||
# EDA features
|
||||
row.update({str(key) + "_X": val for key, val in
|
||||
gsr.extractGsrFeatures(x, sampleRate=fs, featureNames=featureNames).items()})
|
||||
if y is not None:
|
||||
row.update({str(key) + "_Y": val for key, val in
|
||||
gsr.extractGsrFeatures(y, sampleRate=fs, featureNames=featureNames).items()})
|
||||
row.update({str(key) + "_Z": val for key, val in
|
||||
gsr.extractGsrFeatures(z, sampleRate=fs, featureNames=featureNames).items()})
|
||||
|
||||
# BVP features
|
||||
row.update({str(key) + "_X": val for key, val in
|
||||
hrv.extractHrvFeatures(x, sampling=fs, featureNames=featureNames).items()})
|
||||
if y is not None:
|
||||
row.update({str(key) + "_Y": val for key, val in
|
||||
hrv.extractHrvFeatures(y, sampling=fs, featureNames=featureNames).items()})
|
||||
row.update({str(key) + "_Z": val for key, val in
|
||||
hrv.extractHrvFeatures(z, sampling=fs, featureNames=featureNames).items()})
|
||||
|
||||
df = df.append(row, ignore_index=True)
|
||||
|
||||
if prefix is not None:
|
||||
dfNewCols = []
|
||||
for col in df.columns:
|
||||
dfNewCols.append(prefix + "_" + col)
|
||||
df.columns = dfNewCols
|
||||
|
||||
return df
|
|
@ -1,90 +0,0 @@
|
|||
import numpy as np
|
||||
import math
|
||||
|
||||
|
||||
def peak_detector(ppg, Fs):
|
||||
"""
|
||||
Peak detector written by Gasper Slapnicar. Optimized for detecting peaks in PPG signal.
|
||||
:param ppg: Signal where peaks are to be detected. 1-D array like
|
||||
:param Fs: Sampling frequency
|
||||
:return: Peaks and valleys in PPG. Two arrays of indices in ppg input
|
||||
"""
|
||||
peak_duration = int(math.floor((1.0 / 9) * Fs))
|
||||
aver_pulse_rate = int(math.floor((2.0 / 3) * Fs))
|
||||
aver_level_window = int(math.floor(2 * Fs))
|
||||
|
||||
mean_coef_for_threshold = 0.02
|
||||
signal_length = len(ppg)
|
||||
|
||||
ppg_squared = np.square(ppg)
|
||||
ppg_squared[ppg < 0] = 0
|
||||
|
||||
mean_peak_level = np.convolve(ppg_squared, np.ones((peak_duration,)) / peak_duration, mode='same')
|
||||
mean_beat_level = np.convolve(ppg_squared, np.ones((aver_pulse_rate,)) / aver_pulse_rate, mode='same')
|
||||
|
||||
thresh1 = np.add(mean_beat_level,
|
||||
mean_coef_for_threshold * np.convolve(ppg_squared,
|
||||
np.ones((aver_level_window,)) / aver_level_window,
|
||||
mode='same'))
|
||||
block_of_interest = np.zeros(signal_length)
|
||||
block_of_interest[mean_peak_level > thresh1] = 1
|
||||
|
||||
block_edges = np.diff(block_of_interest)
|
||||
block_start = np.add(np.where(block_edges == 1), 1)[0]
|
||||
if block_start.size == 0:
|
||||
return np.array([]), np.array([])
|
||||
else:
|
||||
block_end = np.where(block_edges == -1)[0]
|
||||
|
||||
if block_end.size == 0:
|
||||
return np.array([]), np.array([])
|
||||
|
||||
if block_start[0] > block_end[0]:
|
||||
block_start = np.insert(block_start, 0, 1, axis=0)
|
||||
|
||||
if block_start[-1] > block_end[-1]:
|
||||
block_end = np.append(block_end, signal_length)
|
||||
|
||||
if len(block_start) != len(block_end):
|
||||
return np.array([]), np.array([])
|
||||
|
||||
length_block = np.subtract(block_end, block_start)
|
||||
correct_blocks = np.where(length_block > peak_duration)
|
||||
|
||||
peak_pos = np.zeros(len(correct_blocks[0]))
|
||||
i_peak = 0
|
||||
for iBlock in correct_blocks[0]:
|
||||
block_of_interest = ppg_squared[block_start[iBlock]:block_end[iBlock]]
|
||||
peak_pos[i_peak] = max(range(len(block_of_interest)), key=block_of_interest.__getitem__)
|
||||
peak_pos[i_peak] = peak_pos[i_peak] + (block_start[iBlock] - 1)
|
||||
i_peak += 1
|
||||
|
||||
interpeak_threshold_coeff = 0.65
|
||||
max_over_average = 1.15
|
||||
need_check = True
|
||||
while need_check:
|
||||
interpeak = np.diff(peak_pos)
|
||||
|
||||
if interpeak.size == 0:
|
||||
return np.array([]), np.array([])
|
||||
|
||||
mean_interpeak = np.mean(interpeak)
|
||||
interpeak_double = np.insert(np.add(interpeak[0:-1], interpeak[1:]), 0, 2 * interpeak[0], axis=0)
|
||||
interpeak_thresh = np.insert(interpeak_double[0:-2], 0, [2 * mean_interpeak, 2 * mean_interpeak], axis=0)
|
||||
interpeak_thresh[interpeak_thresh > 2 * max_over_average * mean_interpeak] = 2 * max_over_average \
|
||||
* mean_interpeak
|
||||
interpeak_thresh = interpeak_thresh * interpeak_threshold_coeff
|
||||
index_short_interval = np.where(interpeak_double < interpeak_thresh)[0]
|
||||
|
||||
if index_short_interval.size != 0:
|
||||
peak_pos = np.delete(peak_pos, index_short_interval.tolist())
|
||||
else:
|
||||
need_check = False
|
||||
|
||||
# Add valley (simple detection)
|
||||
valley_pos = []
|
||||
for start, end in zip(peak_pos[0:-1].astype('int'), peak_pos[1:].astype('int')):
|
||||
valley_pos.append(min(range(len(ppg[start:end])), key=ppg[start:end].__getitem__) + start)
|
||||
|
||||
# Addition because I missed it somewhere in the code...but i cant find where
|
||||
return np.add(peak_pos, 1).astype('int'), np.array(valley_pos)
|
|
@ -1,986 +0,0 @@
|
|||
import numpy as np
|
||||
from scipy.signal import welch
|
||||
from scipy.stats import entropy, skew, kurtosis, iqr, linregress
|
||||
import pandas as pd
|
||||
import itertools
|
||||
from CalculatingFeatures.helper_functions import checkForFeature
|
||||
|
||||
|
||||
def computeFreqFeatures(data, featureNames=None, Fs=50):
|
||||
""" Computes frequency features of the given array. The signal is converted to power spectral density signal and
|
||||
features are calculated on that signal
|
||||
|
||||
:param data: data on which to calculate the features
|
||||
:param featureNames: names of features to calculate
|
||||
:param Fs: sampling rate of the given data
|
||||
:return: the calculated features (names of the features are given in array "freqFeatureNames")
|
||||
"""
|
||||
features = {}
|
||||
|
||||
## 3 highest peaks and corresponding freqs
|
||||
f, Pxx_den = welch(data, Fs) # f: array of sample frequencies, Pxx_den: power spectrum of data
|
||||
|
||||
# arr.argsort() returns an array of indices of the same shape as arr that that would sort the array
|
||||
# arr[arr.argsort()] returns the sorted array arr (if one-dimensional)
|
||||
indices_of_max = Pxx_den.argsort()[-3:][::-1] # last three values (=highest peaks), reversed (?)
|
||||
|
||||
if checkForFeature("fqHighestPeakFreqs", featureNames):
|
||||
highestPeakFreqs = f[indices_of_max] # three frequencies corresponding to the largest peaks added to features
|
||||
features["fqHighestPeakFreq1"] = highestPeakFreqs[0]
|
||||
features["fqHighestPeakFreq2"] = highestPeakFreqs[1]
|
||||
features["fqHighestPeakFreq3"] = highestPeakFreqs[2]
|
||||
|
||||
if checkForFeature("fqHighestPeaks", featureNames):
|
||||
highestPeaks = Pxx_den[indices_of_max] # three largest peaks added to features
|
||||
features["fqHighestPeak1"] = highestPeaks[0]
|
||||
features["fqHighestPeak2"] = highestPeaks[1]
|
||||
features["fqHighestPeak3"] = highestPeaks[2]
|
||||
|
||||
## Energy and Entropy
|
||||
# np.fft.fft() computes the one-dimensional n-point discrete Fourier Transform (DFT) with the efficient FFT algorithm
|
||||
Y = np.fft.fft(data)
|
||||
# energy calculated as the sum of the squared FFT component magnitudes, and normalized
|
||||
Y_abs = np.abs(Y)
|
||||
energy_feat = np.sum(np.square(Y_abs)) / len(data) # np.abs = absolute value
|
||||
|
||||
entropy_feat = entropy(np.abs(Y)) if Y_abs.any() else np.NaN
|
||||
|
||||
if checkForFeature("fqEnergyFeat", featureNames):
|
||||
features["fqEnergyFeat"] = energy_feat
|
||||
|
||||
if checkForFeature("fqEntropyFeat", featureNames):
|
||||
features["fqEntropyFeat"] = entropy_feat
|
||||
|
||||
# Binned distribution (histogram)
|
||||
# First, the PSD is split into 10 equal-sized bins ranging from 0 Hz to 25 Hz.
|
||||
# Then, the fraction of magnitudes falling into each bin is calculated.
|
||||
if checkForFeature("fqHistogramBins", featureNames):
|
||||
total_fft_sum = np.sum(np.square(Pxx_den))
|
||||
|
||||
def getBin(start, end):
|
||||
return np.nan if total_fft_sum == 0 else np.sum(np.square(Pxx_den[start:end])) / total_fft_sum
|
||||
|
||||
features["fqHistogramBin1"] = getBin(0, 5)
|
||||
features["fqHistogramBin2"] = getBin(5, 10)
|
||||
features["fqHistogramBin3"] = getBin(10, 15)
|
||||
features["fqHistogramBin4"] = getBin(15, 20)
|
||||
features["fqHistogramBin5"] = getBin(20, 25)
|
||||
features["fqHistogramBin6"] = getBin(25, 30)
|
||||
features["fqHistogramBin7"] = getBin(30, 35)
|
||||
features["fqHistogramBin8"] = getBin(35, 40)
|
||||
features["fqHistogramBin9"] = getBin(40, 45)
|
||||
features["fqHistogramBin10"] = getBin(45, len(Pxx_den))
|
||||
|
||||
# Statistical features
|
||||
if checkForFeature("fqAbsMean", featureNames):
|
||||
features["fqAbsMean"] = np.mean(np.abs(data)) # this on raw signal
|
||||
if checkForFeature("fqAbsMean", featureNames):
|
||||
features["fqSkewness"] = skew(Pxx_den) # this on "distribution-like" periodogram
|
||||
if checkForFeature("fqKurtosis", featureNames):
|
||||
features["fqKurtosis"] = kurtosis(Pxx_den) # this on "distribution-like" periodogram
|
||||
if checkForFeature("fqInterquart", featureNames):
|
||||
features["fqInterquart"] = iqr(data) # this on raw signal
|
||||
|
||||
return features
|
||||
|
||||
|
||||
def calcAreaUnderAccelerationMagnitude(magnitudes, time):
|
||||
"""
|
||||
Calculates AreaUnderAccelerationMagnitude feature
|
||||
:param magnitudes: vector of magnitudes
|
||||
:param time: array of timestamps
|
||||
:return: AreaUnderAccelerationMagnitude for the selected axis
|
||||
"""
|
||||
|
||||
eeArea = 0.0
|
||||
dT = 35
|
||||
|
||||
for i in range(len(magnitudes)):
|
||||
eeArea += magnitudes[i] * dT # - gravity
|
||||
if i > 0:
|
||||
dT = time[i] - time[i - 1]
|
||||
else:
|
||||
dT = 35
|
||||
return eeArea
|
||||
|
||||
|
||||
def calcAverageVectorLength(magnitudes):
|
||||
"""
|
||||
Calculates mean of magnitude vector
|
||||
:param magnitudes: vector of magnitudes
|
||||
:return: mean of magnitude vector
|
||||
"""
|
||||
return np.sum(magnitudes) / (len(magnitudes))
|
||||
|
||||
|
||||
def calcAverageVectorLengthPower(magnitudes):
|
||||
"""
|
||||
Calculates square mean of magnitude vector
|
||||
:param magnitudes: vector of magnitudes
|
||||
:return: mean of magnitude vector squared
|
||||
"""
|
||||
return np.square(calcAverageVectorLength(magnitudes))
|
||||
|
||||
|
||||
def calcMeanKineticEnergy(data, time):
|
||||
"""
|
||||
Calculates mean kinetic energy for the selected axis
|
||||
:param data: data from accelerometer for selected axis (band-pass filtered)
|
||||
:param time: array of timestamps
|
||||
:return: mean kinetic energy 1/2*mV^2
|
||||
"""
|
||||
weight = 60.0
|
||||
dT = 1.0
|
||||
velocity = 0.0
|
||||
kinetic = 0.0
|
||||
|
||||
for i in range(len(data)):
|
||||
velocity += data[i] * dT
|
||||
kinetic += 0.5 * weight * velocity * velocity * dT
|
||||
if i < len(time) - 1:
|
||||
dT = (time[i + 1] - time[i]) / 1000.0
|
||||
|
||||
return kinetic
|
||||
|
||||
|
||||
def calcPeaks(magnitudes):
|
||||
"""
|
||||
Calculates number of peaks and sum of values
|
||||
:param magnitudes: vector of magnitudes
|
||||
:return: array of double - [0] number of peaks, [1] sum of peak values
|
||||
"""
|
||||
maxValue = 500.0
|
||||
previous = -200.0
|
||||
threshold = 3
|
||||
peaks = np.empty(0)
|
||||
sumOfPeakValues = 0.0
|
||||
peak = False
|
||||
|
||||
for curMagnitude in magnitudes:
|
||||
if curMagnitude > threshold:
|
||||
if curMagnitude >= maxValue:
|
||||
maxValue = curMagnitude
|
||||
peak = True
|
||||
elif curMagnitude < maxValue:
|
||||
if peak and previous > curMagnitude:
|
||||
peaks = np.append(peaks, maxValue)
|
||||
peak = False
|
||||
sumOfPeakValues += maxValue
|
||||
|
||||
if curMagnitude > previous and not peak:
|
||||
peak = True
|
||||
maxValue = -200.0
|
||||
previous = curMagnitude
|
||||
|
||||
return np.array([float(len(peaks)), sumOfPeakValues])
|
||||
|
||||
|
||||
def calcTotalKineticEnergy(x, y, z, t):
|
||||
"""
|
||||
Calculates total kinetic energy for all axes
|
||||
:param x: data from accelerometer for X axis (band-pass filtered)
|
||||
:param y: data from accelerometer for Y axis (band-pass filtered)
|
||||
:param z: data from accelerometer for Z axis (band-pass filtered)
|
||||
:param t: array of timestamps
|
||||
:return: total kinetic energy 1/2*mV^2
|
||||
"""
|
||||
weight = 60.0
|
||||
totaltime = (t[-1] - t[0]) / 1000.0
|
||||
|
||||
dT = 1.0
|
||||
velocityX = 0.0
|
||||
velocityY = 0.0
|
||||
velocityZ = 0.0
|
||||
kineticX = 0.0
|
||||
kineticY = 0.0
|
||||
kineticZ = 0.0
|
||||
totalEnergy = 0.0
|
||||
|
||||
for i in range(len(x)):
|
||||
velocityX += x[i] * dT
|
||||
velocityY += y[i] * dT
|
||||
velocityZ += z[i] * dT
|
||||
|
||||
kineticX += 0.5 * weight * velocityX * velocityX * dT
|
||||
kineticY += 0.5 * weight * velocityY * velocityY * dT
|
||||
kineticZ += 0.5 * weight * velocityZ * velocityZ * dT
|
||||
|
||||
totalEnergy += kineticX + kineticY + kineticZ
|
||||
if i < t.size - 1:
|
||||
dT = (t[i + 1] - t[i]) / 1000.0
|
||||
|
||||
return totalEnergy / totaltime
|
||||
|
||||
|
||||
def calcAcAbsoluteArea(x, y, z):
|
||||
"""
|
||||
Calculates a vector with sums of absolute values for the given sensor
|
||||
:param x: x component (band-pass filtered)
|
||||
:param y: y component (band-pass filtered)
|
||||
:param z: z component (band-pass filtered)
|
||||
:return: [sumX, sumY, sumZ]
|
||||
"""
|
||||
return np.array([np.sum(np.absolute(x)), np.sum(np.absolute(y)), np.sum(np.absolute(z))])
|
||||
|
||||
|
||||
def calcAbsoluteMean(data):
|
||||
"""
|
||||
|
||||
:param data: data from accelerometer for selected axis (band-pass filtered)
|
||||
:return: mean (sum)/N
|
||||
"""
|
||||
return np.sum(np.absolute(data)) / len(data)
|
||||
|
||||
|
||||
def calcAmplitude(data):
|
||||
"""
|
||||
Calculates dispersion for a given vector component
|
||||
:param data: data from accelerometer for selected axis (band-pass filtered)
|
||||
:return: dispersion
|
||||
"""
|
||||
return np.max(data) - np.min(data)
|
||||
|
||||
|
||||
def calcCoefficientOfVariation(data):
|
||||
"""
|
||||
Calculates dispersion for a given vector component
|
||||
:param data: data from accelerometer for selected axis (band-pass filtered)
|
||||
:return: dispersion
|
||||
"""
|
||||
s = np.sum(np.absolute(data))
|
||||
if s == 0:
|
||||
return np.NaN
|
||||
return np.sqrt(calcVariance(data)) / s * 100
|
||||
|
||||
|
||||
#
|
||||
def calcCorrelation(a, b):
|
||||
"""
|
||||
Calculates Pearson's correlation between sensor axis
|
||||
:param a: first component (band-pass filtered)
|
||||
:param b: second component (band-pass filtered)
|
||||
:return: correlation between a and b
|
||||
"""
|
||||
selfCovarianceA = covariance(a, a)
|
||||
selfCovarianceB = covariance(b, b)
|
||||
s = np.sqrt(selfCovarianceA * selfCovarianceB)
|
||||
if s == 0:
|
||||
return np.NaN
|
||||
return covariance(a, b) / s
|
||||
|
||||
|
||||
def calcEntropy(data):
|
||||
"""
|
||||
Calculates the degree of disorder
|
||||
:param data: data from accelerometer for selected axis (band-pass filtered)
|
||||
:return: entropy for the selected axis
|
||||
"""
|
||||
acc = 0
|
||||
for d in normalize(fftMagnitude(fft(data))):
|
||||
if d == 0:
|
||||
return np.NaN
|
||||
acc += d * np.log(d) / np.log(2.0)
|
||||
|
||||
return -acc
|
||||
|
||||
|
||||
#
|
||||
def calcInterQuartileRange(qData):
|
||||
"""
|
||||
Calculates interquartile range
|
||||
:param qData: quartiles vector
|
||||
:return: range for the selected axis
|
||||
"""
|
||||
return qData[2] - qData[0]
|
||||
|
||||
|
||||
def calcKurtosis(data):
|
||||
"""
|
||||
Calculates Kurtosis for given vector
|
||||
:param data: data from accelerometer for selected axis (band-pass filtered)
|
||||
:return: kurtosis for selected vector
|
||||
"""
|
||||
mean = calcAbsoluteMean(data)
|
||||
|
||||
acc = 0
|
||||
for d in data:
|
||||
acc += np.power(d - mean, 4.0)
|
||||
pow4 = acc
|
||||
|
||||
acc = 0
|
||||
for d in data:
|
||||
acc += np.power(d - mean, 2.0)
|
||||
pow2 = acc
|
||||
if pow2 == 0:
|
||||
return np.NaN
|
||||
return len(data) * pow4 / np.square(pow2) - 3
|
||||
|
||||
|
||||
def calcMeanCrossingRate(data):
|
||||
"""
|
||||
Calculates the number of signal crossings with mean
|
||||
:param data: data from accelerometer for selected axis (band-pass filtered)
|
||||
:return: number of mean crossings
|
||||
"""
|
||||
mean = np.sum(np.abs(data)) / len(data)
|
||||
|
||||
crossings = 0
|
||||
last = data[0] - mean
|
||||
|
||||
for i in range(len(data)):
|
||||
current = data[i] - mean
|
||||
if last * current < 0:
|
||||
crossings += 1
|
||||
last = current
|
||||
|
||||
return crossings
|
||||
|
||||
|
||||
def calcQuartiles(data):
|
||||
"""
|
||||
Quartiles at 25%, 50% and 75% per signal
|
||||
:param data: data from accelerometer for selected axis (band-pass filtered)
|
||||
:return: [accQ25, accQ50, accQ75]
|
||||
"""
|
||||
sorted1 = sorted(data)
|
||||
size = len(data)
|
||||
return np.array([sorted1[int(size / 4)], sorted1[int(size / 2)], sorted1[int(size * 3 / 4)]])
|
||||
|
||||
|
||||
def calcSkewness(data):
|
||||
"""
|
||||
Calculates skewness for given vector
|
||||
:param data: data from accelerometer for selected axis (band-pass filtered)
|
||||
:return: skewness for selected vector
|
||||
"""
|
||||
mean = calcAbsoluteMean(data)
|
||||
|
||||
acc = 0
|
||||
for d in data:
|
||||
acc += np.power(d - mean, 3.0)
|
||||
pow3 = acc
|
||||
|
||||
acc = 0
|
||||
for d in data:
|
||||
acc += np.power(d - mean, 2.0)
|
||||
pow2 = acc
|
||||
if pow2 == 0:
|
||||
return np.NaN
|
||||
return np.sqrt(float(len(data))) * pow3 / np.power(pow2, 1.5)
|
||||
|
||||
|
||||
#
|
||||
def calcTotalAbsoluteArea(area):
|
||||
"""
|
||||
Calculates sum of component areas for selected sensor (@see calcAcAbsoluteArea)
|
||||
:param area: [sumX, sumY, sumZ] for given device (band-pass filtered) (@see calcAcAbsoluteArea)
|
||||
:return: sum of component sums
|
||||
"""
|
||||
return np.sum(area)
|
||||
|
||||
|
||||
def calcTotalEnergy(data):
|
||||
"""
|
||||
Calculates total magnitude of AC signal for the given sensor
|
||||
:param data: data from accelerometer for selected axis (band-pass filtered)
|
||||
:return: total energy for selected axis
|
||||
"""
|
||||
fftMagnitudeTmp = fftMagnitude(fft(data))
|
||||
|
||||
return np.sum(np.square(fftMagnitudeTmp[1:])) / len(fftMagnitudeTmp)
|
||||
|
||||
|
||||
def calcDominantFrequencyEnergy(data):
|
||||
"""
|
||||
Calculates ratio of energy in dominant frequency
|
||||
:param data: data from accelerometer for selected axis (band-pass filtered)
|
||||
:return: energy ratio for selected axis
|
||||
"""
|
||||
fftMagnitudeTmp = fftMagnitude(fft(data))
|
||||
sortedTmp = sorted(fftMagnitudeTmp)
|
||||
s = np.sum(sortedTmp)
|
||||
if s == 0:
|
||||
return np.NaN
|
||||
return sortedTmp[-1] / s
|
||||
|
||||
|
||||
def calcTotalMagnitude(x, y, z):
|
||||
"""
|
||||
Calculates total magnitude of AC signal for the given sensor
|
||||
:param x: x component (band-pass filtered)
|
||||
:param y: y component (band-pass filtered)
|
||||
:param z: z component (band-pass filtered)
|
||||
:return: sqrt(sum(x^2+y^2+z^2))
|
||||
"""
|
||||
return np.sqrt(magnitudes(x) + magnitudes(y) + magnitudes(z))
|
||||
|
||||
|
||||
def calcVariance(data):
|
||||
"""
|
||||
Calculates variance for given vector
|
||||
:param data: data from accelerometer for selected axis (band-pass filtered)
|
||||
:return: variance
|
||||
"""
|
||||
acc = 0
|
||||
for d in data:
|
||||
acc = acc + np.square(d - calcAbsoluteMean(data)) / len(data)
|
||||
return acc
|
||||
|
||||
|
||||
def calcArea(data):
|
||||
"""
|
||||
Calculates sum of component
|
||||
:param data: data from accelerometer for selected axis (low-pass filtered)
|
||||
:return: area
|
||||
"""
|
||||
return np.sum(data)
|
||||
|
||||
|
||||
def calcMean(data):
|
||||
"""
|
||||
Calculates mean value for a given vector
|
||||
:param data: data from accelerometer for selected axis (low-pass filtered)
|
||||
:return: mean (sum)/N
|
||||
"""
|
||||
return np.sum(data) / len(data)
|
||||
|
||||
|
||||
def calcPostureDistance(meanX, meanY, meanZ):
|
||||
"""
|
||||
Calculates difference between mean values for a given sensor (low-pass filtered)
|
||||
:param meanX: mean for X components
|
||||
:param meanY: mean for Y components
|
||||
:param meanZ: mean for Z components
|
||||
:return: [X-Y, X-Z, Y-Z]
|
||||
"""
|
||||
return np.array([meanX - meanY, meanX - meanZ, meanY - meanZ])
|
||||
|
||||
|
||||
def calcTotalMean(sumPhone, sumBand):
|
||||
"""
|
||||
Calculates mean of all sensors (low-pass filtered)
|
||||
:param sumPhone: meanX + meanY + meanZ
|
||||
:param sumBand: meanX + meanY + meanZ
|
||||
:return: mean of all means
|
||||
"""
|
||||
return (sumPhone.sum() + sumBand.sum()) / 6
|
||||
|
||||
|
||||
def calcPeakCount(magnitudes):
|
||||
"""
|
||||
Calculates PeakCount feature
|
||||
:param magnitudes: vector of magnitudes
|
||||
:return: [numberOfPeaks, sumOfPeakValues, peakAvg, amplitudeAvg]
|
||||
"""
|
||||
previous = 0.0
|
||||
eExpenditurePeaks = 0.0
|
||||
eExpenditureAmplitude = 0.0
|
||||
|
||||
state = np.zeros(0)
|
||||
peaks = np.zeros(0)
|
||||
low = -1.0
|
||||
|
||||
for currentmagnitude in magnitudes:
|
||||
if currentmagnitude > previous:
|
||||
state = np.append(state, True)
|
||||
else:
|
||||
state = np.append(state, False)
|
||||
|
||||
if len(state) > 2:
|
||||
state = np.delete(state, 0)
|
||||
if state[0] and not state[1]:
|
||||
if low != -1.0:
|
||||
eExpenditureAmplitude = previous - low
|
||||
else:
|
||||
low = previous
|
||||
if previous - low > 1.0:
|
||||
peaks = np.append(peaks, currentmagnitude)
|
||||
eExpenditurePeaks += previous
|
||||
|
||||
if not state[0] and state[1]:
|
||||
low = previous
|
||||
|
||||
previous = currentmagnitude
|
||||
|
||||
peaksReturn0 = len(peaks)
|
||||
peaksReturn1 = eExpenditurePeaks
|
||||
peaksReturn2 = 0.0
|
||||
peaksReturn3 = 0.0
|
||||
if len(peaks) > 0:
|
||||
peaksReturn2 = eExpenditurePeaks / len(peaks)
|
||||
peaksReturn3 = eExpenditureAmplitude / len(peaks)
|
||||
|
||||
return np.array([float(peaksReturn0), peaksReturn1, peaksReturn2, peaksReturn3])
|
||||
|
||||
|
||||
def calcSumPerComponent(data, time):
|
||||
"""
|
||||
Calculates calcSumPerComponent feature
|
||||
:param data: data from accelerometer for selected axis
|
||||
:param time: array of timestamps
|
||||
:return: sum of Xi*dT
|
||||
"""
|
||||
calc = 0.0
|
||||
dT = 1.0
|
||||
|
||||
for i in range(len(data)):
|
||||
calc += np.abs(data[i]) * dT
|
||||
if i < len(data) - 1:
|
||||
dT = (time[i + 1] - time[i]) / 1000.0
|
||||
|
||||
return calc
|
||||
|
||||
|
||||
def computeACVelocity(data, time):
|
||||
"""
|
||||
Calculates velocity feature
|
||||
:param data: data from accelerometer for selected axis (band-pass filtered)
|
||||
:param time: array of timestamps
|
||||
:return: velocity for selected axis
|
||||
"""
|
||||
calc = 0.0
|
||||
dT = 1.0
|
||||
|
||||
for i in range(len(data)):
|
||||
calc += data[i] * dT
|
||||
if i < data.size - 1:
|
||||
dT = (time[i + 1] - time[i]) / 1000.0
|
||||
|
||||
return calc
|
||||
|
||||
|
||||
def lowPassFilter(input, alpha=0.2):
|
||||
""" Low-pass filter implemented in discrete time
|
||||
|
||||
:param input: input signal to be filtered
|
||||
:param alpha: smoothing factor
|
||||
:return: filtered signal
|
||||
"""
|
||||
output = np.zeros(input.size)
|
||||
output[0] = input[0]
|
||||
for i in range(1, output.size):
|
||||
output[i] = output[i - 1] + alpha * (input[i] - output[i - 1])
|
||||
return output
|
||||
|
||||
|
||||
def bandPassFilter(input, alphaLpf=0.2, alphaHpf=0.6):
|
||||
""" Band-pass filer implemented in discrete time
|
||||
|
||||
:param input: input signal to be filtered
|
||||
:param alphaLpf: smoothing factor for LPF
|
||||
:param alphaHpf: smoothing factor for HPF
|
||||
:return: filtered signal
|
||||
"""
|
||||
output = lowPassFilter(input, alphaLpf)
|
||||
output = highPassFilter(output, alphaHpf)
|
||||
return output
|
||||
|
||||
|
||||
def highPassFilter(input, alpha=0.6):
|
||||
""" High-pass filter implemented in discrete time
|
||||
|
||||
:param input: input signal to be filtered
|
||||
:param alpha: smoothing factor
|
||||
:return: filtered signal
|
||||
"""
|
||||
output = np.zeros(input.size)
|
||||
output[0] = input[0]
|
||||
for i in range(1, output.size):
|
||||
output[i] = alpha * (output[i - 1] + input[i] - input[i - 1])
|
||||
return output
|
||||
|
||||
|
||||
def magnitudeVector(x, y, z):
|
||||
""" Calculates magnitudes of vectors x, y and z
|
||||
|
||||
:param x: x axis
|
||||
:param y: y axis
|
||||
:param z: z axis
|
||||
:return: numpy array of magnitudes
|
||||
"""
|
||||
acc = np.zeros(x.size)
|
||||
for i in range(acc.size):
|
||||
acc[i] = np.sqrt(np.square(x[i]) + np.square(y[i]) + np.square(z[i]))
|
||||
return acc
|
||||
|
||||
|
||||
def sum(data):
|
||||
""" Sums up the given array
|
||||
|
||||
:param data: array to sum up
|
||||
:return: summed value
|
||||
"""
|
||||
return np.sum(data)
|
||||
|
||||
|
||||
def absoluteSum(data):
|
||||
""" Sums up absolute values of the given array
|
||||
|
||||
:param data: array to sum up
|
||||
:return: summed value
|
||||
"""
|
||||
return np.sum(np.abs(data))
|
||||
|
||||
|
||||
def fft(data):
|
||||
""" Performs fast fourier transform on the given array
|
||||
|
||||
:param data: the array on which fft should be performed
|
||||
:return: processed array
|
||||
"""
|
||||
tmpArray = [None] * (len(data) * 2)
|
||||
for i in range(len(data)):
|
||||
tmpArray[i] = data[i]
|
||||
tmpArray = np.fft.fft(tmpArray, len(data))
|
||||
|
||||
tmpArray2 = []
|
||||
for t in tmpArray:
|
||||
tmpArray2.append(t.real)
|
||||
tmpArray2.append(t.imag)
|
||||
|
||||
if len(data) % 2 == 0:
|
||||
ret = np.zeros(len(data))
|
||||
else:
|
||||
ret = np.zeros(len(data) + 1)
|
||||
|
||||
for i, _ in enumerate(ret):
|
||||
ret[i] = tmpArray2[i]
|
||||
return ret
|
||||
|
||||
|
||||
def fftMagnitude(data):
|
||||
"""
|
||||
|
||||
:param data:
|
||||
:return:
|
||||
"""
|
||||
ret = np.zeros(int(len(data) / 2))
|
||||
for i, _ in enumerate(ret):
|
||||
ret[i] = np.sqrt(np.square(data[2 * i]) + np.square(data[2 * i + 1]))
|
||||
return ret
|
||||
|
||||
|
||||
def normalize(data):
|
||||
""" Normalize the given array
|
||||
|
||||
:param data: the array to normalize
|
||||
:return: normalized array
|
||||
"""
|
||||
ret = np.zeros(len(data))
|
||||
sum = np.sum(data)
|
||||
for i, _ in enumerate(data):
|
||||
if sum == 0:
|
||||
ret[i] = np.NaN
|
||||
else:
|
||||
ret[i] = data[i] / sum
|
||||
return ret
|
||||
|
||||
|
||||
def covariance(array1, array2):
|
||||
"""
|
||||
Covariance between two arrays
|
||||
:param array1: first array of values
|
||||
:param array2: second array of values
|
||||
:return: covariance(array1, array2)
|
||||
"""
|
||||
cov = 0.0
|
||||
|
||||
m1 = np.sum(np.abs(array1))
|
||||
m2 = np.sum(np.abs(array2))
|
||||
|
||||
for i, _ in enumerate(array1):
|
||||
cov += (array1[i] - m1) * (array2[i] - m2)
|
||||
|
||||
return cov
|
||||
|
||||
|
||||
def magnitudes(data):
|
||||
""" Calculates the sum of squares of given array
|
||||
|
||||
:param data: given array
|
||||
:return: sum of squares
|
||||
"""
|
||||
acc = 0
|
||||
for d in data:
|
||||
acc += np.square(d)
|
||||
return acc
|
||||
|
||||
|
||||
def calcRoll(arrayAy, arrayAz):
|
||||
""" Calculate the roll value of y and z axis
|
||||
|
||||
:param arrayAy: array of y values
|
||||
:param arrayAz: array of z values
|
||||
:return: array of calculated roll values
|
||||
"""
|
||||
roll = np.zeros(arrayAy.size)
|
||||
|
||||
for i, _ in enumerate(arrayAy):
|
||||
roll[i] = np.arctan2(arrayAz[i], arrayAy[i])
|
||||
|
||||
return roll
|
||||
|
||||
|
||||
def calcPitch(arrayAx, arrayAy, arrayAz):
|
||||
"""Calculate the pitch
|
||||
|
||||
:param arrayAx: array of x values
|
||||
:param arrayAy: array of y values
|
||||
:param arrayAz: array of z values
|
||||
:return: array of calculated pitch values
|
||||
"""
|
||||
pitch = np.zeros(arrayAx.size)
|
||||
|
||||
for i, _ in enumerate(arrayAy):
|
||||
pitch[i] = np.arctan2(-arrayAx[i], (arrayAy[i] * arrayAy[i] + arrayAz[i] * arrayAz[i]))
|
||||
|
||||
return pitch
|
||||
|
||||
|
||||
def stdDev(arrayX):
|
||||
""" Calculates the standard deviation of the given array
|
||||
|
||||
:param arrayX: the array on which to calculate standard deviation
|
||||
:return: standard deviation of the given array
|
||||
"""
|
||||
std = 0.0
|
||||
|
||||
mean = calcMean(arrayX)
|
||||
for cnt, _ in enumerate(arrayX):
|
||||
std += (arrayX[cnt] - mean) * (arrayX[cnt] - mean);
|
||||
|
||||
return np.sqrt(std / arrayX.size)
|
||||
|
||||
|
||||
def rollMotionAmount(roll):
|
||||
"""
|
||||
amount of wrist roll motion
|
||||
Improving the Recognition of Eating Gestures Using Intergesture Sequential Dependencies [R.I. Ramos-Garcia]
|
||||
"""
|
||||
meanRoll = calcMean(roll)
|
||||
rollMot_mean = np.zeros(roll.size)
|
||||
|
||||
for i in range(roll.size):
|
||||
rollMot_mean[i] = np.abs(roll[i] - meanRoll) # - gravity;
|
||||
|
||||
return calcMean(rollMot_mean)
|
||||
|
||||
|
||||
def rollMotionRegularity(roll):
|
||||
"""
|
||||
regularity of wrist roll motion
|
||||
represents the percentage of time that the wrist is in roll motion
|
||||
Improving the Recognition of Eating Gestures Using Intergesture Sequential Dependencies [R.I. Ramos-Garcia]
|
||||
"""
|
||||
rollBoundary = 10 * (np.pi / 180)
|
||||
instance_number = 0.0
|
||||
|
||||
for i in range(roll.size):
|
||||
if np.abs(roll[i]) > rollBoundary:
|
||||
instance_number += 1.0
|
||||
|
||||
return instance_number / roll.size
|
||||
|
||||
|
||||
def manipulation(axLow, ayLow, azLow, roll, pitch):
|
||||
"""
|
||||
|
||||
:param axLow: low-pass filtered accelerometer's x axis
|
||||
:param ayLow: low-pass filtered accelerometer's y axis
|
||||
:param azLow: low-pass filtered accelerometer's z axis
|
||||
:param roll: roll value
|
||||
:param pitch: pitch value
|
||||
:return: manipulation array
|
||||
"""
|
||||
man_velocity = np.zeros(roll.size)
|
||||
|
||||
for i in range(roll.size):
|
||||
man_velocity[i] = \
|
||||
(np.abs(roll[i]) + np.abs(pitch[i])) / (np.abs(axLow[i]) + np.abs(ayLow[i]) + np.abs(azLow[i]))
|
||||
return calcMean(man_velocity)
|
||||
|
||||
|
||||
def min(list):
|
||||
""" Return the minimum value of the given list.
|
||||
|
||||
:param list: the given list
|
||||
:return: the minimum value of the list
|
||||
"""
|
||||
return np.min(list)
|
||||
|
||||
|
||||
def max(list):
|
||||
""" Return the maximum value of the given list.
|
||||
|
||||
:param list: the given list
|
||||
:return: the maximum value of the list
|
||||
"""
|
||||
return np.max(list)
|
||||
|
||||
|
||||
def avg(list):
|
||||
""" Return the average value of the given list.
|
||||
|
||||
:param list: the given list
|
||||
:return: the average value of the list
|
||||
"""
|
||||
return np.average(list)
|
||||
|
||||
|
||||
def countAboveMean(signal):
|
||||
""" Returns the number of values in x that are higher than the mean of x
|
||||
|
||||
:param signal: the time series to calculate the feature of
|
||||
:return: the value of this feature
|
||||
"""
|
||||
mean = np.mean(signal)
|
||||
return np.where(signal > mean)[0].size
|
||||
|
||||
|
||||
def countBelowMean(signal):
|
||||
""" Returns the number of values in x that are lower than the mean of x
|
||||
|
||||
:param signal: the time series to calculate the feature of
|
||||
:return: the value of this feature
|
||||
"""
|
||||
mean = np.mean(signal)
|
||||
return np.where(signal < mean)[0].size
|
||||
|
||||
|
||||
def meanAbsChange(signal):
|
||||
""" Returns the mean of absolute differences between subsequent time series values
|
||||
|
||||
:param signal: input signal
|
||||
:return: mean of absolute differences
|
||||
"""
|
||||
return np.mean(np.abs(np.diff(signal)))
|
||||
|
||||
|
||||
def autocorrelation(signal, lag):
|
||||
"""Compute the lag-N autocorrelation.
|
||||
|
||||
This method computes the Pearson correlation between
|
||||
the Series and its shifted self.
|
||||
:param signal: the signal to preform the autocorrelation on
|
||||
:param lag: Number of lags to apply before performing autocorrelation.
|
||||
:return:
|
||||
"""
|
||||
signal = pd.Series(signal)
|
||||
return signal.autocorr(lag)
|
||||
|
||||
|
||||
def autocorrelations(signal):
|
||||
""" This method computes autocorrelations for each lag from 0 to len(signal2D[0]) * 0.7
|
||||
|
||||
:param signal: input signal on which to calculate autocorrelations
|
||||
:return: array of autocorrelations
|
||||
"""
|
||||
nlags = int(len(signal) * 0.7)
|
||||
autocorrs = np.empty(nlags)
|
||||
for lag in range(nlags):
|
||||
autocorrs[lag] = autocorrelation(signal, lag)
|
||||
return autocorrs
|
||||
|
||||
|
||||
def _calcMaxLengthOfSequenceTrueOrOne(signal):
|
||||
if len(signal) == 0:
|
||||
return 0
|
||||
else:
|
||||
res = [len(list(group)) for value, group in itertools.groupby(signal) if value == 1]
|
||||
return max(res) if len(res) > 0 else 0
|
||||
|
||||
|
||||
def meanChange(x):
|
||||
""" Returns the mean over the differences between subsequent time series values
|
||||
|
||||
:param x: the time series to calculate the feature of
|
||||
:return: the value of this feature
|
||||
"""
|
||||
x = np.asarray(x)
|
||||
return (x[-1] - x[0]) / (len(x) - 1) if len(x) > 1 else np.NaN
|
||||
|
||||
|
||||
def numberOfZeroCrossings(x):
|
||||
""" Calculates the number of crossings of x on 0. A crossing is defined as two sequential values where the first
|
||||
value is lower than 0 and the next is greater, or vice-versa.
|
||||
|
||||
:param x: the time series to calculate the feature of
|
||||
:return: the value of this feature
|
||||
"""
|
||||
x = np.asarray(x)
|
||||
positive = x > 0
|
||||
return np.where(np.diff(positive))[0].size
|
||||
|
||||
|
||||
def ratioBeyondRSigma(x, r):
|
||||
""" Ratio of values that are more than r*std(x) (so r sigma) away from the mean of x.
|
||||
|
||||
:param x: the time series to calculate the feature of
|
||||
:param r: the ratio to compare with
|
||||
:return: the value of this feature
|
||||
"""
|
||||
return np.sum(np.abs(x - np.mean(x)) > r * np.std(x)) / x.size
|
||||
|
||||
|
||||
def binnedEntropy(x, max_bins):
|
||||
"""
|
||||
First bins the values of x into max_bins equidistant bins.
|
||||
Then calculates the value of
|
||||
|
||||
.. math::
|
||||
|
||||
- \\sum_{k=0}^{min(max\\_bins, len(x))} p_k log(p_k) \\cdot \\mathbf{1}_{(p_k > 0)}
|
||||
|
||||
where :math:`p_k` is the percentage of samples in bin :math:`k`.
|
||||
|
||||
:param x: the time series to calculate the feature of
|
||||
:type x: numpy.ndarray
|
||||
:param max_bins: the maximal number of bins
|
||||
:type max_bins: int
|
||||
:return: the value of this feature
|
||||
:return type: float
|
||||
"""
|
||||
if not isinstance(x, (np.ndarray, pd.Series)):
|
||||
x = np.asarray(x)
|
||||
|
||||
# nan makes no sense here
|
||||
if np.isnan(x).any():
|
||||
return np.nan
|
||||
|
||||
hist, bin_edges = np.histogram(x, bins=max_bins)
|
||||
probs = hist / x.size
|
||||
probs[probs == 0] = 1.0
|
||||
return - np.sum(probs * np.log(probs))
|
||||
|
||||
|
||||
def absEnergy(x):
|
||||
"""
|
||||
Returns the absolute energy of the time series which is the sum over the squared values
|
||||
|
||||
.. math::
|
||||
|
||||
E = \\sum_{i=1,\\ldots, n} x_i^2
|
||||
|
||||
:param x: the time series to calculate the feature of
|
||||
:type x: numpy.ndarray
|
||||
:return: the value of this feature
|
||||
:return type: float
|
||||
"""
|
||||
if not isinstance(x, (np.ndarray, pd.Series)):
|
||||
x = np.asarray(x)
|
||||
return np.dot(x, x)
|
||||
|
||||
|
||||
def linearTrendSlope(x):
|
||||
"""
|
||||
Calculate a linear least-squares regression for the values of the time series versus the sequence from 0 to
|
||||
length of the time series minus one.
|
||||
This feature assumes the signal to be uniformly sampled. It will not use the time stamps to fit the model.
|
||||
The parameters control which of the characteristics are returned.
|
||||
|
||||
:param x: the time series to calculate the feature of
|
||||
:return: slope of the model
|
||||
"""
|
||||
slope, intercept, r_value, p_value, std_err = linregress(range(len(x)), x)
|
||||
|
||||
return slope
|
|
@ -1,669 +0,0 @@
|
|||
from eda_explorer.load_files import butter_lowpass_filter
|
||||
from eda_explorer.EDA_Peak_Detection_Script import calcPeakFeatures
|
||||
import numpy as np
|
||||
import peakutils
|
||||
import matplotlib.pyplot as plt
|
||||
import scipy.signal as signal
|
||||
import biosppy.signals.tools as st
|
||||
from CalculatingFeatures.helper_functions import checkForFeature
|
||||
|
||||
|
||||
def extractGsrFeatures(signal, startTimestampSeconds=0, sampleRate=4, threshold=.02, offset=1, riseTime=4, decayTime=4,
|
||||
featureNames=None):
|
||||
""" Extract Martin's GSR features with eda-explorer peak detection
|
||||
|
||||
:param signal: numpy array containing the signal
|
||||
:param startTimestampSeconds: seconds from epoch when the signal stared
|
||||
:param sampleRate: sampling rate of the input signal
|
||||
:param threshold: threshold for detected peaks
|
||||
:param offset:
|
||||
:param riseTime: rise time of detected peaks
|
||||
:param decayTime: decay time of detected peaks
|
||||
:return: calculated GSR features
|
||||
"""
|
||||
filteredSignal = butter_lowpass_filter(signal, 1.0, sampleRate, 6)
|
||||
|
||||
gsr_data = pd.DataFrame(signal, columns=["EDA"])
|
||||
|
||||
startTime = pd.to_datetime(startTimestampSeconds, unit="s")
|
||||
gsr_data.index = pd.date_range(start=startTime, periods=len(gsr_data), freq=str(1000 / sampleRate) + 'L')
|
||||
|
||||
# Filter the signal
|
||||
gsr_data['filtered_eda'] = filteredSignal
|
||||
# Calculate peak data with eda-explorer
|
||||
peakData = calcPeakFeatures(gsr_data, offset, threshold,
|
||||
riseTime, decayTime, sampleRate)
|
||||
|
||||
peaks = np.where(peakData.peaks == 1.0)[0]
|
||||
|
||||
if np.any(signal):
|
||||
tonic = peakutils.baseline(signal, 10)
|
||||
else:
|
||||
tonic = signal
|
||||
|
||||
# Calculate features with Martin's library
|
||||
feats = calculate_GSR_features(signal, peaks, tonic, sampleRate, featureNames=featureNames)
|
||||
freq_feats = GSR_freq(signal, sampleRate, False, print_flag=False, featureNames=featureNames)
|
||||
peaks, ends, starts = get_peak_intervals(signal, peaks, sampleRate, False)
|
||||
peak_features = get_peak_intervals_features(signal, peaks, starts, ends, sampleRate, featureNames=featureNames)
|
||||
significant_change_features = significant_change(signal, sampleRate, False, False, featureNames=featureNames)
|
||||
|
||||
return {**feats, **freq_feats, **peak_features, **significant_change_features}
|
||||
|
||||
|
||||
def extractGsrFeatures2D(signal2D, startTimestampSeconds=0, sampleRate=4, threshold=.02, offset=1, riseTime=4,
|
||||
decayTime=4):
|
||||
""" Extract Martin's GSR features with eda-explorer peak detection
|
||||
|
||||
:param signal2D: 2 dimensional numpy array containing the signal (each row is processed seperately)
|
||||
:param startTimestampSeconds: seconds from epoch when the signal stared
|
||||
:param sampleRate: sampling rate of the input signal
|
||||
:param threshold: threshold for detected peaks
|
||||
:param offset:
|
||||
:param riseTime: rise time of detected peaks
|
||||
:param decayTime: decay time of detected peaks
|
||||
:return: pandas dataframe of calculated GSR features, each row corresponds with each input row
|
||||
"""
|
||||
data = pd.DataFrame()
|
||||
|
||||
for signal in signal2D:
|
||||
features = extractGsrFeatures(signal, startTimestampSeconds, sampleRate, threshold, offset, riseTime, decayTime)
|
||||
data = data.append(features, ignore_index=True)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def filter_FIR(signal, sampling_rate, plt_flag=True, ):
|
||||
filtered = st.filter_signal(signal=signal,
|
||||
ftype="FIR",
|
||||
band="bandpass",
|
||||
frequency=(0.01, 1),
|
||||
order=20,
|
||||
sampling_rate=sampling_rate)
|
||||
|
||||
signal_f = filtered['signal']
|
||||
if (plt_flag):
|
||||
plt.plot(signal, label='raw', c="blue")
|
||||
plt.plot(signal_f, label='filtered', c="red")
|
||||
plt.xlabel("Sample")
|
||||
plt.ylabel("GSR value")
|
||||
plt.legend()
|
||||
plt.show()
|
||||
|
||||
return signal_f
|
||||
|
||||
|
||||
def find_peaks(signal, sampling_rate, plt_flag=True):
|
||||
tonic = peakutils.baseline(signal, 10)
|
||||
singal_bf = signal - tonic
|
||||
indexes = peakutils.indexes(singal_bf, thres=0.3, min_dist=sampling_rate)
|
||||
if (plt_flag):
|
||||
plt.figure(figsize=(30, 3))
|
||||
plt.plot(singal_bf, alpha=0.5, color='blue')
|
||||
plt.scatter(indexes, singal_bf[indexes], color='red') # Plot detected peaks
|
||||
plt.title("GSR with removed tonic")
|
||||
plt.show()
|
||||
plt.figure(figsize=(30, 3))
|
||||
plt.plot(signal, alpha=0.5, color='blue', label="GSR signal")
|
||||
plt.scatter(indexes, signal[indexes], color='red') # Plot detected peaks
|
||||
plt.plot(tonic, alpha=0.5, color='green', label="GSR tonic driver")
|
||||
plt.legend()
|
||||
plt.show()
|
||||
return indexes, tonic
|
||||
|
||||
|
||||
def find_peaks_heght_filter(signal, sampling_rate, height_threshold=.1, plt_flag=True):
|
||||
tonic = peakutils.baseline(signal, 10)
|
||||
singal_bf = signal - tonic
|
||||
indexes = peakutils.indexes(singal_bf, thres=0.1, min_dist=sampling_rate)
|
||||
|
||||
all_indexes = np.copy(indexes)
|
||||
good_hights = []
|
||||
bad_indexes = []
|
||||
|
||||
good_hights = np.argwhere(singal_bf[indexes] > height_threshold)
|
||||
bad_hights = np.argwhere(singal_bf[indexes] <= height_threshold)
|
||||
if (len(good_hights) > 0):
|
||||
indexes = np.concatenate(indexes[good_hights])
|
||||
else:
|
||||
indexes = [] # all are bad
|
||||
if (len(bad_hights) > 0):
|
||||
bad_indexes = np.concatenate(all_indexes[bad_hights])
|
||||
# print(singal_bf[indexes])
|
||||
|
||||
if (plt_flag):
|
||||
plt.figure(figsize=(30, 3))
|
||||
plt.plot(singal_bf, alpha=0.5, color='blue', label='GSR-tonic')
|
||||
plt.scatter(indexes, singal_bf[indexes], color='red') # Plot detected peaks
|
||||
plt.legend()
|
||||
plt.show()
|
||||
plt.figure(figsize=(30, 3))
|
||||
plt.plot(signal, alpha=0.5, color='blue', label="GSR signal")
|
||||
plt.scatter(indexes, signal[indexes], color='red', label='Good Detected peaks')
|
||||
plt.scatter(bad_indexes, signal[bad_indexes], color='purple', label='Bad detected peaks')
|
||||
plt.plot(tonic, alpha=0.5, color='green', label="GSR tonic driver")
|
||||
plt.legend()
|
||||
plt.show()
|
||||
|
||||
return indexes, tonic
|
||||
|
||||
|
||||
import pandas as pd
|
||||
|
||||
|
||||
def find_peaks_sliding(sig, sampling_rate, height_threshold=.1, plt_flag=True):
|
||||
window_size = 60 * sampling_rate
|
||||
window_count = 1
|
||||
|
||||
# detrending using sliding window. For signals in which the trend is not linear
|
||||
singal_bf = np.copy(sig)
|
||||
tonic_sliding = []
|
||||
while ((window_count * window_size) <= len(sig)):
|
||||
start = (window_count - 1) * window_size
|
||||
end = window_count * window_size
|
||||
if ((len(singal_bf) - end) < window_size):
|
||||
end = end + window_size
|
||||
tonic_sliding.extend(peakutils.baseline(sig[start:end], 3))
|
||||
window_count = window_count + 1
|
||||
sig_df = pd.DataFrame(tonic_sliding)
|
||||
tonic_sliding = sig_df.iloc[:, 0].rolling(window=(3 * sampling_rate), center=True).mean().values
|
||||
tonic_sliding[np.isnan(tonic_sliding)] = np.reshape(sig_df[np.isnan(tonic_sliding)].values,
|
||||
len(sig_df[np.isnan(tonic_sliding)].values))
|
||||
tonic_sliding = np.reshape(tonic_sliding, len(tonic_sliding))
|
||||
|
||||
tonic = peakutils.baseline(sig, 3)
|
||||
|
||||
if (len(tonic_sliding) > 0):
|
||||
singal_bf = singal_bf - tonic_sliding
|
||||
else:
|
||||
singal_bf = singal_bf - tonic
|
||||
indexes = peakutils.indexes(singal_bf, thres=0.3, min_dist=sampling_rate)
|
||||
all_indexes = np.copy(indexes)
|
||||
good_hights = []
|
||||
bad_indexes = []
|
||||
good_hights = np.argwhere(singal_bf[indexes] > height_threshold)
|
||||
bad_hights = np.argwhere(singal_bf[indexes] <= height_threshold)
|
||||
if (len(good_hights) > 0):
|
||||
indexes = np.concatenate(indexes[good_hights])
|
||||
if (len(bad_hights) > 0):
|
||||
bad_indexes = np.concatenate(all_indexes[bad_hights])
|
||||
|
||||
if (plt_flag):
|
||||
plt.figure(figsize=(30, 3))
|
||||
plt.plot(singal_bf, alpha=0.5, color='blue')
|
||||
plt.scatter(indexes, singal_bf[indexes], color='red') # Plot detected peaks
|
||||
plt.title("GSR with removed tonic")
|
||||
plt.show()
|
||||
plt.figure(figsize=(30, 3))
|
||||
plt.plot(sig, alpha=0.5, color='blue', label="GSR signal")
|
||||
plt.scatter(indexes, sig[indexes], color='red')
|
||||
plt.scatter(bad_indexes, sig[bad_indexes], color='yellow')
|
||||
plt.plot(tonic, alpha=0.5, color='green', label="GSR tonic driver") # Plot semi-transparent HR
|
||||
plt.plot(tonic_sliding, alpha=0.5, color='purple',
|
||||
label="GSR tonic driver - sliding") # Plot semi-transparent HR
|
||||
plt.legend()
|
||||
plt.show()
|
||||
return indexes, tonic
|
||||
|
||||
|
||||
def calculate_GSR_features(signal, peaks, tonic, sampling_rate, featureNames=None):
|
||||
q25 = np.percentile(signal, 0.25)
|
||||
q75 = np.percentile(signal, 0.75)
|
||||
derivative = np.gradient(signal)
|
||||
pos_idx = np.where(derivative > 0)[0]
|
||||
|
||||
out = {}
|
||||
if checkForFeature('mean', featureNames):
|
||||
out['mean'] = np.mean(signal)
|
||||
|
||||
if checkForFeature('std', featureNames):
|
||||
out['std'] = np.std(signal)
|
||||
|
||||
if checkForFeature('q25', featureNames):
|
||||
out['q25'] = q25
|
||||
|
||||
if checkForFeature('q75', featureNames):
|
||||
out['q75'] = q75
|
||||
|
||||
if checkForFeature('qd', featureNames):
|
||||
out['qd'] = q75 - q25
|
||||
|
||||
if checkForFeature('deriv', featureNames):
|
||||
out['deriv'] = np.sum(np.gradient(signal))
|
||||
|
||||
if checkForFeature('power', featureNames):
|
||||
out['power'] = np.mean(signal * signal)
|
||||
|
||||
if checkForFeature('numPeaks', featureNames):
|
||||
out['numPeaks'] = len(peaks)
|
||||
|
||||
if checkForFeature('ratePeaks', featureNames):
|
||||
out['ratePeaks'] = len(peaks) / (len(signal) / sampling_rate)
|
||||
|
||||
if checkForFeature('powerPeaks', featureNames):
|
||||
if len(signal[peaks]) == 0:
|
||||
out['powerPeaks'] = np.nan
|
||||
else:
|
||||
out['powerPeaks'] = np.mean(signal[peaks])
|
||||
|
||||
if checkForFeature('sumPosDeriv', featureNames):
|
||||
out['sumPosDeriv'] = np.sum(derivative[pos_idx]) / len(derivative)
|
||||
|
||||
if checkForFeature('propPosDeriv', featureNames):
|
||||
out['propPosDeriv'] = len(pos_idx) / len(derivative)
|
||||
|
||||
if checkForFeature('derivTonic', featureNames):
|
||||
out['derivTonic'] = np.sum(np.gradient(tonic))
|
||||
|
||||
if checkForFeature('sigTonicDifference', featureNames):
|
||||
out['sigTonicDifference'] = np.mean(signal - tonic)
|
||||
|
||||
return out
|
||||
|
||||
|
||||
# In[7]:
|
||||
|
||||
def get_GSR_features(signal, sampling_rate, height_threshold=.1, plt_flag=True):
|
||||
# signal_f =filter_FIR(signal,sampling_rate,plt_flag)
|
||||
# signal_f = mean_filter(signal,3*sampling_rate,1,sampling_rate,plt_flag)
|
||||
signal_f = signal
|
||||
peaks, tonic = find_peaks_heght_filter(signal_f, sampling_rate, height_threshold, plt_flag)
|
||||
|
||||
feats = calculate_GSR_features(signal_f, peaks, tonic, sampling_rate)
|
||||
freq_feats = GSR_freq(signal_f, sampling_rate, plt_flag, print_flag=plt_flag)
|
||||
|
||||
peaks, ends, starts = get_peak_intervals(signal_f, peaks, sampling_rate, plt_flag)
|
||||
peak_features = get_peak_intervals_features(signal_f, peaks, starts, ends, sampling_rate)
|
||||
significant_change_features = significant_change(signal, sampling_rate, plt_flag, plt_flag)
|
||||
# print('significant_change_features',significant_change_features)
|
||||
|
||||
return np.concatenate((feats, freq_feats, peak_features, significant_change_features))
|
||||
|
||||
|
||||
def get_GSR_features_old(signal, sampling_rate, plt_flag=True):
|
||||
signal_f = filter_FIR(signal, sampling_rate, plt_flag)
|
||||
peaks, tonic = find_peaks(signal_f, sampling_rate, plt_flag)
|
||||
feats = calculate_GSR_features(signal_f, peaks, tonic, sampling_rate)
|
||||
# freq_feats = GSR_freq(signal_f,sampling_rate,plt_flag,print_flag=plt_flag)
|
||||
|
||||
return feats
|
||||
|
||||
|
||||
def GSR_freq(s, fs, plot_flag, print_flag, featureNames=None):
|
||||
if not checkForFeature('freqFeats', featureNames):
|
||||
return dict()
|
||||
|
||||
ff, Pxx_spec = signal.periodogram(s, fs, 'flattop', scaling='spectrum')
|
||||
if (plot_flag):
|
||||
# plt.plot(s,label="Signal freq")
|
||||
# plt.legend()
|
||||
# plt.show()
|
||||
plt.semilogy(ff, Pxx_spec)
|
||||
plt.xlabel('frequency [Hz]')
|
||||
plt.ylabel('PSD [V**2/Hz]')
|
||||
plt.xlim(0, fs // 2)
|
||||
plt.show()
|
||||
# get the power in the band [0-0.5]
|
||||
current_f = 0.0
|
||||
increment = 0.1
|
||||
feats = []
|
||||
while (current_f < 0.6):
|
||||
feat = np.trapz(abs(Pxx_spec[(ff >= current_f) & (ff <= current_f + increment)]))
|
||||
feats.append(feat)
|
||||
# if(print_flag):
|
||||
# print(current_f,"-",current_f+increment, feat)
|
||||
current_f = current_f + increment
|
||||
|
||||
return dict(zip(['fp01', 'fp02', 'fp03', 'fp04', 'fp05', 'fp06'], feats))
|
||||
|
||||
|
||||
def significant_increase(sig, fs, print_flag):
|
||||
# 5 seconds
|
||||
win_size = 5 * fs
|
||||
sig_change_threshold = 1.05 # 5%
|
||||
sig_counter = 0
|
||||
sig_duration_threshold = 15 * fs # 10% change should be sustained for a duration of 15 seconds
|
||||
sig_duration = 0
|
||||
sig_windows = []
|
||||
sig_windows_all = []
|
||||
|
||||
for idx in range(len(sig) // win_size - 1):
|
||||
# print('inside')
|
||||
win_prev = sig[idx * win_size]
|
||||
win_next = sig[(idx + 1) * win_size]
|
||||
if win_prev == 0:
|
||||
win_prev = win_prev + 0.00001
|
||||
if (win_next / win_prev) > sig_change_threshold:
|
||||
sig_counter = sig_counter + 1
|
||||
# print("Sig increase")
|
||||
sig_windows.append(win_prev)
|
||||
else:
|
||||
if sig_counter * win_size >= sig_duration_threshold: # foe how manu windows there was a sig change?
|
||||
sig_duration = sig_duration + (sig_counter * win_size)
|
||||
# if(print_flag):
|
||||
# print("Significant increase ended")
|
||||
sig_windows_all.extend(sig_windows)
|
||||
sig_counter = 0
|
||||
sig_windows = []
|
||||
# if(print_flag):
|
||||
# print(idx*win_size,(idx+1)*win_size,win_next/win_prev)
|
||||
if (sig_counter * win_size >= sig_duration_threshold):
|
||||
sig_duration = sig_duration + (sig_counter * win_size)
|
||||
|
||||
# how many seconds there has been a significant increase
|
||||
mean = 0
|
||||
intensity = 0
|
||||
change = 0
|
||||
speed = 0
|
||||
if len(sig_windows_all) > 0:
|
||||
mean = np.mean(sig_windows_all)
|
||||
intensity = np.mean(sig_windows_all) * sig_duration
|
||||
change = max(sig_windows_all) - min(sig_windows_all)
|
||||
speed = change / sig_duration
|
||||
return [sig_duration, mean, intensity, change, speed]
|
||||
|
||||
|
||||
def significant_decrease(sig, fs, print_flag):
|
||||
# 5 seconds
|
||||
win_size = 5 * fs
|
||||
sig_change_threshold = 1.05
|
||||
sig_counter = 0
|
||||
sig_duration_threshold = 15 * fs # 10% change should be sustained for a duration of 15 seconds
|
||||
sig_duration = 0
|
||||
|
||||
sig_windows = []
|
||||
sig_windows_all = []
|
||||
|
||||
for idx in range(len(sig) // win_size - 1):
|
||||
win_prev = sig[idx * win_size]
|
||||
win_next = sig[(idx + 1) * win_size]
|
||||
if win_next == 0:
|
||||
win_next = win_prev + 0.00001
|
||||
if (win_prev / win_next) > sig_change_threshold:
|
||||
sig_counter = sig_counter + 1
|
||||
sig_windows.append(win_prev)
|
||||
else:
|
||||
if (sig_counter * win_size) >= sig_duration_threshold:
|
||||
sig_duration = sig_duration + (sig_counter * win_size)
|
||||
# if(print_flag):
|
||||
# print("Significant decrease ended")
|
||||
sig_windows_all.extend(sig_windows)
|
||||
sig_counter = 0
|
||||
sig_windows = []
|
||||
# if(print_flag):
|
||||
# print(idx*win_size,(idx+1)*win_size,win_prev/win_next)
|
||||
if (sig_counter * win_size >= sig_duration_threshold):
|
||||
sig_duration = sig_duration + (sig_counter * win_size)
|
||||
|
||||
# how many seconds there has been a significant decrease
|
||||
mean = 0
|
||||
intensity = 0
|
||||
change = 0
|
||||
speed = 0
|
||||
if len(sig_windows_all) > 0:
|
||||
mean = np.mean(sig_windows_all)
|
||||
intensity = np.mean(sig_windows_all) * sig_duration
|
||||
change = min(sig_windows_all) - max(sig_windows_all)
|
||||
speed = change / sig_duration
|
||||
return [sig_duration, mean, intensity, change, speed]
|
||||
|
||||
|
||||
def significant_change(sig, fs, plt_flag, print_flag, featureNames=None):
|
||||
out = {}
|
||||
|
||||
if checkForFeature('significantIncrease', featureNames):
|
||||
a = significant_increase(sig, fs, print_flag)
|
||||
out['significantIncreaseDuration'] = a[0]
|
||||
out['significantIncreaseMean'] = a[1]
|
||||
out['significantIncreaseIntensity'] = a[2]
|
||||
out['significantIncreaseChange'] = a[3]
|
||||
out['significantIncreaseSpeed'] = a[4]
|
||||
|
||||
if checkForFeature('significantDecrease', featureNames):
|
||||
b = significant_decrease(sig, fs, print_flag)
|
||||
out['significantDecreaseDuration'] = b[0]
|
||||
out['significantDecreaseMean'] = b[1]
|
||||
out['significantDecreaseIntensity'] = b[2]
|
||||
out['significantDecreaseChange'] = b[3]
|
||||
out['significantDecreaseSpeed'] = b[4]
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def get_peak_intervals(sig, peak_indexes, sampling_frequency, plt_flag):
|
||||
window_size = 4
|
||||
window_slide = 1
|
||||
inertion = .01
|
||||
ends = []
|
||||
starts = []
|
||||
for start_idx in peak_indexes:
|
||||
# go backwards
|
||||
mean_prev = np.mean(sig[start_idx:(start_idx + window_size)])
|
||||
window_start = start_idx - window_size
|
||||
window_end = start_idx
|
||||
mean_current = np.mean(sig[window_start:window_end])
|
||||
while (window_start >= 0 and (mean_current + inertion) <= mean_prev):
|
||||
window_end = window_end - window_slide
|
||||
window_start = window_start - window_slide
|
||||
mean_prev = mean_current
|
||||
mean_current = np.mean(sig[window_start:window_end])
|
||||
if (window_end < 0):
|
||||
window_end = 0
|
||||
value = window_end
|
||||
if (value > start_idx):
|
||||
value = start_idx - window_size
|
||||
if (value < 0):
|
||||
value = 0
|
||||
starts.append(value)
|
||||
|
||||
# go forward
|
||||
mean_prev = np.mean(sig[start_idx:(start_idx + window_size)])
|
||||
window_start = start_idx + window_slide
|
||||
window_end = window_start + window_size
|
||||
mean_current = np.mean(sig[window_start:window_end])
|
||||
while (window_end <= len(sig) and (mean_current + inertion) <= mean_prev):
|
||||
window_start = window_start + window_slide
|
||||
window_end = window_end + window_slide
|
||||
mean_prev = mean_current
|
||||
mean_current = np.mean(sig[window_start:window_end])
|
||||
if (window_start >= len(sig)):
|
||||
window_start = len(sig) - 1
|
||||
|
||||
value = window_start
|
||||
if (value <= start_idx):
|
||||
value = start_idx + window_size
|
||||
if (value >= len(sig)):
|
||||
value = len(sig) - 1
|
||||
|
||||
ends.append(value)
|
||||
|
||||
# #filter bad-short peaks
|
||||
# inc_duration_threshold = 1
|
||||
# dec_duration_threshold = 1
|
||||
# inc_amplitude_threshold = .1
|
||||
# dec_amplitude_threshold = .1
|
||||
good_indexes = []
|
||||
bad_indexes = []
|
||||
for i in range(len(peak_indexes)):
|
||||
good_indexes.append(i)
|
||||
# inc_duration = (peak_indexes[i]-starts[i])/sampling_frequency
|
||||
# dec_duration = (ends[i]-peak_indexes[i])/sampling_frequency
|
||||
# inc_amplitude = (sig[peak_indexes[i]]-sig[starts[i]])
|
||||
# dec_amplitude = (sig[peak_indexes[i]]-sig[ends[i]])
|
||||
# # print(i,inc_duration,dec_duration,inc_amplitude,dec_amplitude)
|
||||
# if (inc_duration>=inc_duration_threshold and
|
||||
# dec_duration>=dec_duration_threshold and
|
||||
# inc_amplitude>=inc_amplitude_threshold and
|
||||
# dec_amplitude>=dec_amplitude_threshold):
|
||||
# good_indexes.append(i)
|
||||
# else:
|
||||
# bad_indexes.append(i)
|
||||
peak_indexes = np.array(peak_indexes)
|
||||
bad_peak_indexes = peak_indexes[bad_indexes]
|
||||
peak_indexes = peak_indexes[good_indexes]
|
||||
ends = np.array(ends)
|
||||
starts = np.array(starts)
|
||||
ends = ends[good_indexes]
|
||||
starts = starts[good_indexes]
|
||||
|
||||
if (plt_flag and len(peak_indexes) > 0):
|
||||
plt.figure(figsize=(30, 3))
|
||||
plt.plot(sig, label='GSR')
|
||||
plt.scatter(peak_indexes, sig[peak_indexes], color='red', label='Good Detected peaks') # Plot detected peaks
|
||||
plt.scatter(bad_peak_indexes, sig[bad_peak_indexes], color='purple',
|
||||
label='Bad Detected peaks') # Plot detected peaks
|
||||
|
||||
plt.scatter(ends, .001 + sig[ends], color='orange', label='Peak end') # Plot detected peaks
|
||||
plt.scatter(starts, .001 + sig[starts], color='green', label='Peak start') # Plot detected peaks
|
||||
plt.legend()
|
||||
plt.show()
|
||||
|
||||
return peak_indexes, np.array(ends), np.array(starts)
|
||||
|
||||
|
||||
def get_peak_intervals_features(sig, peak_indexes, starts, ends, sampling_frequency, featureNames=None):
|
||||
if (len(peak_indexes) > 0):
|
||||
|
||||
max_peak_idx = np.argmax(sig[peak_indexes])
|
||||
|
||||
max_peak_start = starts[max_peak_idx]
|
||||
max_peak_end = ends[max_peak_idx]
|
||||
|
||||
max_peak_amlitude_change_before = sig[peak_indexes[max_peak_idx]] - sig[max_peak_start]
|
||||
max_peak_amlitude_change_after = sig[peak_indexes[max_peak_idx]] - sig[max_peak_end]
|
||||
|
||||
# max_peak_change_ratio = max_peak_amlitude_change_before/max_peak_amlitude_change_after
|
||||
|
||||
avg_peak_amlitude_change_before = np.median(sig[peak_indexes] - sig[starts])
|
||||
avg_peak_amlitude_change_after = np.median(sig[peak_indexes] - sig[ends])
|
||||
|
||||
# avg_peak_change_ratio=0
|
||||
# if avg_peak_amlitude_change_after!=0:
|
||||
# avg_peak_change_ratio = avg_peak_amlitude_change_before/avg_peak_amlitude_change_after
|
||||
|
||||
max_peak_increase_time = (peak_indexes[max_peak_idx] - max_peak_start) / sampling_frequency
|
||||
max_peak_decrease_time = (max_peak_end - peak_indexes[max_peak_idx]) / sampling_frequency
|
||||
|
||||
max_peak_duration = (max_peak_end - max_peak_start) / sampling_frequency
|
||||
|
||||
max_peak_change_ratio = 0
|
||||
if max_peak_decrease_time != 0:
|
||||
max_peak_change_ratio = max_peak_increase_time / max_peak_decrease_time
|
||||
|
||||
avg_peak_increase_time = np.mean(peak_indexes - starts) / sampling_frequency
|
||||
avg_peak_decrease_time = np.mean(ends - peak_indexes) / sampling_frequency
|
||||
avg_peak_duration = np.mean(ends - starts) * sampling_frequency
|
||||
avg_peak_change_ratio = 0
|
||||
if (avg_peak_decrease_time != 0):
|
||||
avg_peak_change_ratio = avg_peak_increase_time / avg_peak_decrease_time
|
||||
|
||||
dif = np.diff(sig[max_peak_start:peak_indexes[max_peak_idx]])
|
||||
# prevent "Mean of empty slice" warning
|
||||
if len(dif) == 0:
|
||||
max_peak_response_slope_before = np.nan
|
||||
else:
|
||||
max_peak_response_slope_before = np.mean(dif)
|
||||
|
||||
# if np.isnan(max_peak_response_slope_before):
|
||||
# max_peak_response_slope_before = 0
|
||||
|
||||
dif = np.diff(sig[peak_indexes[max_peak_idx]:max_peak_end])
|
||||
|
||||
# prevent "Mean of empty slice" warning
|
||||
if len(dif) == 0:
|
||||
max_peak_response_slope_after = np.nan
|
||||
else:
|
||||
max_peak_response_slope_after = np.mean(dif)
|
||||
|
||||
# if np.isnan(max_peak_response_slope_after):
|
||||
# max_peak_response_slope_after = 0
|
||||
|
||||
signal_overall_change = np.max(sig) - np.min(sig)
|
||||
change_duration = np.abs((np.argmax(sig) - np.argmin(sig))) / sampling_frequency
|
||||
if (signal_overall_change != 0):
|
||||
change_rate = change_duration / signal_overall_change
|
||||
gsr_peak_features = [max_peak_amlitude_change_before, max_peak_amlitude_change_after,
|
||||
avg_peak_amlitude_change_before, avg_peak_amlitude_change_after,
|
||||
avg_peak_change_ratio, max_peak_increase_time, max_peak_decrease_time,
|
||||
max_peak_duration, max_peak_change_ratio,
|
||||
avg_peak_increase_time, avg_peak_decrease_time, avg_peak_duration,
|
||||
max_peak_response_slope_before, max_peak_response_slope_after, signal_overall_change,
|
||||
change_duration, change_rate]
|
||||
else:
|
||||
num_features = 17
|
||||
gsr_peak_features = np.array([np.nan] * num_features)
|
||||
else:
|
||||
num_features = 17
|
||||
gsr_peak_features = np.array([np.nan] * num_features)
|
||||
# print('bad features',gsr_peak_features)
|
||||
|
||||
out = {}
|
||||
if checkForFeature('maxPeakAmplitudeChangeBefore', featureNames):
|
||||
out['maxPeakAmplitudeChangeBefore'] = gsr_peak_features[0]
|
||||
|
||||
if checkForFeature('maxPeakAmplitudeChangeAfter', featureNames):
|
||||
out['maxPeakAmplitudeChangeAfter'] = gsr_peak_features[1]
|
||||
|
||||
if checkForFeature('avgPeakAmplitudeChangeBefore', featureNames):
|
||||
out['avgPeakAmplitudeChangeBefore'] = gsr_peak_features[2]
|
||||
|
||||
if checkForFeature('avgPeakAmplitudeChangeAfter', featureNames):
|
||||
out['avgPeakAmplitudeChangeAfter'] = gsr_peak_features[3]
|
||||
|
||||
if checkForFeature('avgPeakChangeRatio', featureNames):
|
||||
out['avgPeakChangeRatio'] = gsr_peak_features[4]
|
||||
|
||||
if checkForFeature('maxPeakIncreaseTime', featureNames):
|
||||
out['maxPeakIncreaseTime'] = gsr_peak_features[5]
|
||||
|
||||
if checkForFeature('maxPeakDecreaseTime', featureNames):
|
||||
out['maxPeakDecreaseTime'] = gsr_peak_features[6]
|
||||
|
||||
if checkForFeature('maxPeakDuration', featureNames):
|
||||
out['maxPeakDuration'] = gsr_peak_features[7]
|
||||
|
||||
if checkForFeature('maxPeakChangeRatio', featureNames):
|
||||
out['maxPeakChangeRatio'] = gsr_peak_features[8]
|
||||
|
||||
if checkForFeature('avgPeakIncreaseTime', featureNames):
|
||||
out['avgPeakIncreaseTime'] = gsr_peak_features[9]
|
||||
|
||||
if checkForFeature('avgPeakDecreaseTime', featureNames):
|
||||
out['avgPeakDecreaseTime'] = gsr_peak_features[10]
|
||||
|
||||
if checkForFeature('avgPeakDuration', featureNames):
|
||||
out['avgPeakDuration'] = gsr_peak_features[11]
|
||||
|
||||
if checkForFeature('maxPeakResponseSlopeBefore', featureNames):
|
||||
out['maxPeakResponseSlopeBefore'] = gsr_peak_features[12]
|
||||
|
||||
if checkForFeature('maxPeakResponseSlopeAfter', featureNames):
|
||||
out['maxPeakResponseSlopeAfter'] = gsr_peak_features[13]
|
||||
|
||||
if checkForFeature('signalOverallChange', featureNames):
|
||||
out['signalOverallChange'] = gsr_peak_features[14]
|
||||
|
||||
if checkForFeature('changeDuration', featureNames):
|
||||
out['changeDuration'] = gsr_peak_features[15]
|
||||
|
||||
if checkForFeature('changeRate', featureNames):
|
||||
out['changeRate'] = gsr_peak_features[16]
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def mean_filter(s, windows_size, window_slide, sampling_rate, plt_flag=True):
|
||||
mean_s = []
|
||||
start = 0
|
||||
end = windows_size
|
||||
while (end <= len(s)):
|
||||
mean_s.append(np.mean(s[start:end]))
|
||||
start = start + window_slide
|
||||
end = start + windows_size
|
||||
if (plt_flag):
|
||||
plt.plot(s, label='original')
|
||||
plt.plot(mean_s, label='mean_filter')
|
||||
plt.legend()
|
||||
plt.show()
|
||||
|
||||
return np.array(mean_s)
|
|
@ -1,131 +0,0 @@
|
|||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
|
||||
def convertInputInto2d(input, windowLength, overlap=0):
|
||||
"""Convert input into 2d matrix with width = numCols. The last row is padded with zeros to match the other rows.
|
||||
|
||||
Overlap has to be smaller than window length
|
||||
|
||||
:param input: the one dimensional array
|
||||
:param windowLength: window length, expressed in number of samples
|
||||
:param overlap: Amount of overlap
|
||||
:return: 2D matrix
|
||||
"""
|
||||
|
||||
if windowLength <= overlap:
|
||||
raise Exception("Overlap has to be smaller than window length")
|
||||
|
||||
inputWasList = True
|
||||
if type(input) != list:
|
||||
inputWasList = False
|
||||
input = input.tolist()
|
||||
|
||||
out = [input[i: i + windowLength] for i in range(0, len(input), windowLength - overlap)]
|
||||
out[-1].extend([0] * (windowLength - len(out[-1])))
|
||||
return out if inputWasList else np.asarray(out)
|
||||
|
||||
|
||||
def convertInputInto2dTime(input, timeThreshold):
|
||||
""" Convert input array into 2D matrix by time interval. When the timeThreshold is reached in each row,
|
||||
the process continues in the next row.
|
||||
|
||||
:param input: the pandas dataframe with rows "time" and "data"
|
||||
:param timeThreshold: the threshold with which the row width is defined
|
||||
:return: 2D matrix
|
||||
"""
|
||||
outData = [[]]
|
||||
outTime = [[]]
|
||||
startTime = 0
|
||||
|
||||
for index, row in input.iterrows():
|
||||
t = row["time"]
|
||||
data = row["data"]
|
||||
|
||||
if t - startTime >= timeThreshold:
|
||||
startTime = t
|
||||
outData.append([])
|
||||
outTime.append([])
|
||||
|
||||
outData[-1].append(data)
|
||||
outTime[-1].append(t)
|
||||
|
||||
return outData, outTime
|
||||
|
||||
|
||||
def convert1DEmpaticaToArray(pathToEmpaticaCsvFile):
|
||||
""" Convert 1D empatica file to array
|
||||
|
||||
:param pathToEmpaticaCsvFile: path to Empatica csv file
|
||||
:return: array of data, starting timestamp of data, sample rate of data
|
||||
"""
|
||||
df = pd.read_csv(pathToEmpaticaCsvFile, names=["name"])
|
||||
startTimeStamp = df.name[0]
|
||||
sampleRate = df.name[1]
|
||||
df.drop([0, 1], inplace=True)
|
||||
data = df.name.ravel()
|
||||
return data, startTimeStamp, sampleRate
|
||||
|
||||
|
||||
def convert3DEmpaticaToArray(pathToEmpaticaCsvFile):
|
||||
""" Convert 3D empatica file to array
|
||||
|
||||
:param pathToEmpaticaCsvFile: path to Empatica csv file
|
||||
:return: array of data, starting timestamp of data, sample rate of data
|
||||
"""
|
||||
df = pd.read_csv(pathToEmpaticaCsvFile, names=["x", "y", "z"])
|
||||
startTimeStamp = df.x[0]
|
||||
sampleRate = df.x[1]
|
||||
df.drop([0, 1], inplace=True)
|
||||
data = np.vstack((df.x.ravel(), df.y.ravel(), df.z.ravel()))
|
||||
return data, startTimeStamp, sampleRate
|
||||
|
||||
|
||||
def checkForFeature(featureName, featureNames):
|
||||
return featureNames is None or featureName in featureNames
|
||||
|
||||
|
||||
frequencyFeatureNames = ["fqHighestPeakFreqs", "fqHighestPeaks", "fqEnergyFeat", "fqEntropyFeat", "fqHistogramBins",
|
||||
"fqAbsMean", "fqSkewness", "fqKurtosis", "fqInterquart"]
|
||||
|
||||
genericFeatureNames = ["autocorrelations", "countAboveMean", "countBelowMean", "maximum", "minimum", "meanAbsChange",
|
||||
"longestStrikeAboveMean", "longestStrikeBelowMean", "stdDev", "median", "meanChange",
|
||||
"numberOfZeroCrossings", "absEnergy", "linearTrendSlope", "ratioBeyondRSigma", "binnedEntropy",
|
||||
"numOfPeaksAutocorr", "numberOfZeroCrossingsAutocorr", "areaAutocorr",
|
||||
"calcMeanCrossingRateAutocorr", "countAboveMeanAutocorr", "sumPer", "sumSquared",
|
||||
"squareSumOfComponent", "sumOfSquareComponents"]
|
||||
|
||||
accelerometerFeatureNames = ["meanLow", "areaLow", "totalAbsoluteAreaBand", "totalMagnitudeBand", "entropyBand",
|
||||
"skewnessBand", "kurtosisBand", "postureDistanceLow", "absoluteMeanBand",
|
||||
"absoluteAreaBand", "quartilesBand", "interQuartileRangeBand",
|
||||
"varianceBand", "coefficientOfVariationBand", "amplitudeBand", "totalEnergyBand",
|
||||
"dominantFrequencyEnergyBand", "meanCrossingRateBand", "correlationBand",
|
||||
"quartilesMagnitudesBand",
|
||||
"interQuartileRangeMagnitudesBand", "areaUnderAccelerationMagnitude", "peaksDataLow",
|
||||
"sumPerComponentBand", "velocityBand", "meanKineticEnergyBand",
|
||||
"totalKineticEnergyBand", "squareSumOfComponent", "sumOfSquareComponents",
|
||||
"averageVectorLength", "averageVectorLengthPower", "rollAvgLow", "pitchAvgLow",
|
||||
"rollStdDevLow", "pitchStdDevLow",
|
||||
"rollMotionAmountLow", "rollMotionRegularityLow", "manipulationLow", "rollPeaks",
|
||||
"pitchPeaks",
|
||||
"rollPitchCorrelation"]
|
||||
|
||||
gyroscopeFeatureNames = ["meanLow", "areaLow", "totalAbsoluteAreaLow", "totalMagnitudeLow", "entropyLow", "skewnessLow",
|
||||
"kurtosisLow",
|
||||
"quartilesLow", "interQuartileRangeLow", "varianceLow", "coefficientOfVariationLow",
|
||||
"amplitudeLow",
|
||||
"totalEnergyLow", "dominantFrequencyEnergyLow", "meanCrossingRateLow", "correlationLow",
|
||||
"quartilesMagnitudeLow", "interQuartileRangeMagnitudesLow", "areaUnderMagnitude",
|
||||
"peaksCountLow",
|
||||
"averageVectorLengthLow", "averageVectorLengthPowerLow"]
|
||||
|
||||
gsrFeatureNames = ['mean', 'std', 'q25', 'q75', 'qd', 'deriv', 'power', 'numPeaks', 'ratePeaks', 'powerPeaks',
|
||||
'sumPosDeriv', 'propPosDeriv', 'derivTonic', 'sigTonicDifference', 'freqFeats',
|
||||
'maxPeakAmplitudeChangeBefore', 'maxPeakAmplitudeChangeAfter',
|
||||
'avgPeakAmplitudeChangeBefore', 'avgPeakAmplitudeChangeAfter', 'avgPeakChangeRatio',
|
||||
'maxPeakIncreaseTime', 'maxPeakDecreaseTime', 'maxPeakDuration', 'maxPeakChangeRatio',
|
||||
'avgPeakIncreaseTime', 'avgPeakDecreaseTime', 'avgPeakDuration', 'maxPeakResponseSlopeBefore',
|
||||
'maxPeakResponseSlopeAfter', 'signalOverallChange', 'changeDuration', 'changeRate',
|
||||
'significantIncrease', 'significantDecrease']
|
||||
|
||||
hrvFeatureNames = ['meanHr', 'ibi', 'sdnn', 'sdsd', 'rmssd', 'pnn20', 'pnn50', 'sd', 'sd2', 'sd1/sd2', 'numRR']
|
|
@ -1,304 +0,0 @@
|
|||
import pandas as pd
|
||||
import numpy as np
|
||||
import scipy.signal as _signal
|
||||
|
||||
from CalculatingFeatures.helper_functions import checkForFeature, hrvFeatureNames
|
||||
from CalculatingFeatures.calculate_hrv_peaks import peak_detector
|
||||
|
||||
|
||||
def extractHrvFeatures(_sample, ma=False, detrend=False, m_deternd=False, low_pass=False, winsorize=False,
|
||||
winsorize_value=25, hampel_fiter=True, sampling=64, featureNames=None):
|
||||
""" Extract Martin's HRV features
|
||||
|
||||
Warning: Input sample length has to be at least 256!
|
||||
|
||||
:param _sample: array containing the HRV signal
|
||||
:param ma: should moving average filter be used prior the calculation
|
||||
:param detrend: should overall detrending be used prior the calculation
|
||||
:param m_deternd: should moving detrending be used prior the calculation
|
||||
:param low_pass: should low pass filter be used prior the calculation
|
||||
:param winsorize: should winsorize filter be used prior the calculation
|
||||
:param winsorize_value: winsorize value
|
||||
:param hampel_fiter: hould winsorize filter be used after the calculation
|
||||
:param sampling: the sampling frequency of the signal
|
||||
:param featureNames:
|
||||
:return: HRV features
|
||||
"""
|
||||
|
||||
hrv_time_features, sample, rr, timings, peak_indx = get_HRV_features(_sample, ma, detrend, m_deternd, low_pass,
|
||||
winsorize, winsorize_value,
|
||||
hampel_fiter, sampling,
|
||||
featureNames=featureNames)
|
||||
return hrv_time_features
|
||||
|
||||
|
||||
def extractHrvFeatures2D(signal2D, ma=False, detrend=False, m_deternd=False, low_pass=False, winsorize=False,
|
||||
winsorize_value=25, hampel_fiter=True, sampling=64):
|
||||
""" Extract Martin's HRV features
|
||||
|
||||
Warning: Input 2D array width (column count) has to be at least 200!
|
||||
|
||||
:param signal2D: array containing the HRV signal in 2D (each row is processed seperately)
|
||||
:param ma: should moving average filter be used prior the calculation
|
||||
:param detrend: should overall detrending be used prior the calculation
|
||||
:param m_deternd: should moving detrending be used prior the calculation
|
||||
:param low_pass: should low pass filter be used prior the calculation
|
||||
:param winsorize: should winsorize filter be used prior the calculation
|
||||
:param winsorize_value: winsorize value
|
||||
:param hampel_fiter: hould winsorize filter be used after the calculation
|
||||
:param sampling: the sampling frequency of the signal
|
||||
:return: pandas dataframe of calculated HRV features, each row corresponds with each input row
|
||||
"""
|
||||
outData = pd.DataFrame()
|
||||
|
||||
for sample in signal2D:
|
||||
features = extractHrvFeatures(sample, ma, detrend, m_deternd, low_pass,
|
||||
winsorize, winsorize_value,
|
||||
hampel_fiter, sampling)
|
||||
|
||||
outData = outData.append(features, ignore_index=True)
|
||||
|
||||
return outData
|
||||
|
||||
|
||||
# filter signala and calculate HRV features in time and in frequency domain
|
||||
def get_HRV_features(_sample, ma=False, detrend=False, m_deternd=False, low_pass=False, winsorize=True,
|
||||
winsorize_value=25, hampel_fiter=True, sampling=1000, featureNames=None):
|
||||
if featureNames is not None and len(set(featureNames).intersection(set(hrvFeatureNames))) == 0:
|
||||
return dict(), 0, 0, 0, 0
|
||||
|
||||
sample = _sample.copy()
|
||||
|
||||
if low_pass: # lowpass filter
|
||||
sample = butter_lowpass_filter(sample)
|
||||
if m_deternd: # moving detrending
|
||||
sample = moving_detrending(sample, sampling)
|
||||
if detrend: # overall detrending
|
||||
sample = _signal.detrend(sample)
|
||||
if ma: # moving average
|
||||
sample = moving_average(sample)
|
||||
|
||||
# winsorize the signal
|
||||
if winsorize:
|
||||
sample = winsorize_signal(sample, winsorize_value)
|
||||
# if dynamic_threshold: #find the median of the min-max normalized signal
|
||||
# thres = dynamic_threshold_value*np.median((sample - sample.min())/(sample.max() - sample.min()))
|
||||
|
||||
rr, timings, peak_indx = detect_RR(sample, sampling)
|
||||
|
||||
if hampel_fiter:
|
||||
rr, outlier_indeces = hampel_filtering(rr)
|
||||
|
||||
timings, rr = medianFilter(timings, rr)
|
||||
|
||||
bad_signal = False
|
||||
|
||||
if len(rr) < len(sample) / (2 * sampling): # check whether HR is>30
|
||||
#print("Bad signal. Too little RRs detected.")
|
||||
bad_signal = True
|
||||
elif len(rr) > len(sample) / (sampling / 4): # check whether HR is<240
|
||||
#print("Bad signal. Too much RRs detected.")
|
||||
bad_signal = True
|
||||
hrv_time_features = HRV_time(rr, print_flag=False, badSignal=bad_signal, featureNames=featureNames)
|
||||
|
||||
return hrv_time_features, sample, rr, timings, peak_indx
|
||||
|
||||
|
||||
def butter_lowpass(cutoff, fs, order):
|
||||
nyq = 0.5 * fs
|
||||
normal_cutoff = cutoff / nyq
|
||||
b, a = _signal.butter(order, normal_cutoff, btype='low', analog=False)
|
||||
return b, a
|
||||
|
||||
|
||||
def butter_lowpass_filter(data, cutoff=5, fs=64, order=3):
|
||||
b, a = butter_lowpass(cutoff, fs, order=order)
|
||||
y = _signal.lfilter(b, a, data)
|
||||
return pd.Series(y[1000:])
|
||||
|
||||
|
||||
# perfrom detrending using sliding window
|
||||
def moving_detrending(sig_input, sampling_rate=64):
|
||||
sig = np.copy(sig_input)
|
||||
window_size = 1 * sampling_rate
|
||||
window_count = 1
|
||||
start = (window_count - 1) * window_size
|
||||
end = window_count * window_size
|
||||
while (end <= len(sig)):
|
||||
if ((len(sig) - end) < window_size):
|
||||
end = end + window_size
|
||||
sig[start:end] = _signal.detrend(sig[start:end])
|
||||
window_count = window_count + 1
|
||||
start = (window_count - 1) * window_size
|
||||
end = window_count * window_size
|
||||
return sig
|
||||
|
||||
|
||||
# perform moving average
|
||||
def moving_average(sample, ma_size=10):
|
||||
sample = pd.Series(sample)
|
||||
sample_ma = sample.rolling(ma_size).mean()
|
||||
sample_ma = sample_ma.iloc[ma_size:].values
|
||||
return sample_ma
|
||||
|
||||
|
||||
def winsorize_signal(sample, winsorize_value):
|
||||
p_min = np.percentile(sample, winsorize_value)
|
||||
p_max = np.percentile(sample, 100 - winsorize_value)
|
||||
|
||||
sample[sample > p_max] = p_max
|
||||
sample[sample < p_min] = p_min
|
||||
|
||||
return sample
|
||||
|
||||
|
||||
# https://www.mathworks.com/help/signal/ref/hampel.html
|
||||
# compute median and standard deviation
|
||||
# of a window composed of the sample and its six surrounding samples
|
||||
# If a sample differs from the median by more than three standard deviations,
|
||||
# it is replaced with the median.
|
||||
# reutn fistered RRs and outlier indices
|
||||
def hampel_filtering(sample_rr):
|
||||
outlier_indicies = []
|
||||
filtered_rr = []
|
||||
for i in range(len(sample_rr)):
|
||||
start = i - 3
|
||||
end = i + 3
|
||||
if start < 0: # for the first 3 samples calculate median and std using the closest 6 samples
|
||||
start = 0
|
||||
end = end + 3 - i
|
||||
if end > len(sample_rr) - 1: # for the last 3 samples calculate median and std using the first 6 samples
|
||||
start = len(sample_rr) - 7
|
||||
end = len(sample_rr) - 1
|
||||
|
||||
sample_rr_part = sample_rr[start:end]
|
||||
|
||||
# Prevent "Mean of empty slice" warning
|
||||
if len(sample_rr_part) == 0:
|
||||
sample_med = np.nan
|
||||
sample_std = np.nan
|
||||
else:
|
||||
sample_med = np.median(sample_rr_part)
|
||||
sample_std = np.std(sample_rr_part)
|
||||
|
||||
if abs(sample_rr[i] - sample_med) > 3 * sample_std:
|
||||
outlier_indicies.append(i)
|
||||
filtered_rr.append(sample_med)
|
||||
# print('outlier')
|
||||
filtered_rr.append(sample_rr[i])
|
||||
return np.array(filtered_rr), outlier_indicies
|
||||
|
||||
|
||||
def medianFilter(time, rr):
|
||||
percentageBorder = 0.8
|
||||
if len(rr) == 0:
|
||||
median = np.nan
|
||||
else:
|
||||
median = np.median(rr)
|
||||
idx = (rr / median >= percentageBorder) & (rr / median <= (2 - percentageBorder))
|
||||
# f_rr = rr[(rr/median>=percentageBorder) & (rr/median<=(2-percentageBorder))]
|
||||
f_rr = np.copy(rr)
|
||||
# f_rr[~idx]=median
|
||||
f_rr = f_rr[idx]
|
||||
f_time = timestamps_from_RR(f_rr)
|
||||
# f_time = time[(rr/median>=percentageBorder) & (rr/median<=(2-percentageBorder))]
|
||||
return f_time, f_rr
|
||||
|
||||
|
||||
def detect_RR(sig, sampling_rate):
|
||||
# peak_indx = peakutils.indexes(sig, thres=thres, min_dist=sampling_rate/2.5)
|
||||
peak_indx, _ = peak_detector(sig, sampling_rate)
|
||||
if len(peak_indx) == 0:
|
||||
return [], [], []
|
||||
time = np.arange(len(sig))
|
||||
tmp = time[peak_indx]
|
||||
timings1 = tmp[0:]
|
||||
timings = tmp[1:]
|
||||
RR_intervals = timings - timings1[:len(timings1) - 1]
|
||||
|
||||
return RR_intervals / sampling_rate, timings / sampling_rate, peak_indx
|
||||
|
||||
|
||||
# extract HRV features in time domain
|
||||
def HRV_time(RR_intervals, print_flag, badSignal=False, featureNames=None):
|
||||
if not badSignal:
|
||||
ibi = np.mean(RR_intervals) # Take the mean of RR_list to get the mean Inter Beat Interval
|
||||
mean_hr = 60 / ibi
|
||||
sdnn = np.std(RR_intervals) # Take standard deviation of all R-R intervals
|
||||
# find successive/neighbouring RRs (i.e., filter noise)
|
||||
RR_diff = []
|
||||
RR_sqdiff = []
|
||||
for i in range(len(RR_intervals) - 1):
|
||||
RR_diff.append(np.absolute(RR_intervals[i + 1] - RR_intervals[i]))
|
||||
RR_sqdiff.append(np.power(np.absolute(RR_intervals[i + 1] - RR_intervals[i]), 2))
|
||||
RR_diff = np.array(RR_diff)
|
||||
RR_sqdiff = np.array(RR_sqdiff)
|
||||
sdsd = np.std(RR_diff) # Take standard deviation of the differences between all subsequent R-R intervals
|
||||
rmssd = np.sqrt(np.mean(RR_sqdiff)) # Take root of the mean of the list of squared differences
|
||||
nn20 = [x for x in RR_diff if (x > 0.02)] # First create a list of all values over 20, 50
|
||||
nn50 = [x for x in RR_diff if (x > 0.05)]
|
||||
pnn20 = 100 * float(len(nn20)) / float(len(RR_diff)) if len(
|
||||
RR_diff) > 0 else np.nan # Calculate the proportion of NN20, NN50 intervals to all intervals
|
||||
pnn50 = 100 * float(len(nn50)) / float(len(RR_diff)) if len(RR_diff) > 0 else np.nan
|
||||
sd1 = np.sqrt(0.5 * sdnn * sdnn)
|
||||
sd2 = np.nan
|
||||
tmp = 2.0 * sdsd * sdsd - 0.5 * sdnn * sdnn
|
||||
if tmp > 0: # avoid sqrt of negative values
|
||||
sd2 = np.sqrt(2.0 * sdsd * sdsd - 0.5 * sdnn * sdnn)
|
||||
|
||||
if (print_flag):
|
||||
print("menHR:", mean_hr)
|
||||
print("IBI:", ibi)
|
||||
print("SDNN:", sdnn)
|
||||
print("sdsd", sdsd)
|
||||
print("RMSSD:", rmssd)
|
||||
print("pNN20:", pnn20)
|
||||
print("pNN50:", pnn50)
|
||||
print("sd1:", sd1)
|
||||
print("sd2:", sd2)
|
||||
print("sd1/sd2:", sd1 / sd2)
|
||||
|
||||
out = {}
|
||||
if checkForFeature("meanHr", featureNames):
|
||||
out['meanHr'] = mean_hr if not badSignal else np.NaN
|
||||
|
||||
if checkForFeature("ibi", featureNames):
|
||||
out['ibi'] = ibi if not badSignal else np.NaN
|
||||
|
||||
if checkForFeature("sdnn", featureNames):
|
||||
out['sdnn'] = sdnn if not badSignal else np.NaN
|
||||
|
||||
if checkForFeature("sdsd", featureNames):
|
||||
out['sdsd'] = sdsd if not badSignal else np.NaN
|
||||
|
||||
if checkForFeature("rmssd", featureNames):
|
||||
out['rmssd'] = rmssd if not badSignal else np.NaN
|
||||
|
||||
if checkForFeature("pnn20", featureNames):
|
||||
out['pnn20'] = pnn20 if not badSignal else np.NaN
|
||||
|
||||
if checkForFeature("pnn50", featureNames):
|
||||
out['pnn50'] = pnn50 if not badSignal else np.NaN
|
||||
|
||||
if checkForFeature("sd", featureNames):
|
||||
out['sd'] = sd1 if not badSignal else np.NaN
|
||||
|
||||
if checkForFeature("sd2", featureNames):
|
||||
out['sd2'] = sd2 if not badSignal else np.NaN
|
||||
|
||||
if checkForFeature("sd1/sd2", featureNames):
|
||||
out['sd1/sd2'] = sd1 / sd2 if not badSignal else np.NaN
|
||||
|
||||
if checkForFeature("numRR", featureNames):
|
||||
out['numRR'] = len(RR_intervals)
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def timestamps_from_RR(rr_intervals):
|
||||
time = []
|
||||
current_time = 0.0
|
||||
for rr in rr_intervals:
|
||||
current_time = current_time + rr
|
||||
time.append(current_time)
|
||||
return np.array(time)
|
|
@ -1,207 +0,0 @@
|
|||
# Calculating features
|
||||
|
||||
## Usage
|
||||
- Install the library with:
|
||||
```
|
||||
pip install pep517
|
||||
python -m pep517.build .
|
||||
|
||||
Alternative:
|
||||
pip install build
|
||||
python -m build
|
||||
```
|
||||
- Basic usage is:
|
||||
```
|
||||
from calculatingfeatures.CalculatingFeatures.helper_functions import convert1DEmpaticaToArray, convertInputInto2d, frequencyFeatureNames, hrvFeatureNames
|
||||
from calculatingfeatures.CalculatingFeatures.calculate_features import calculateFeatures
|
||||
import pandas as pd
|
||||
|
||||
pathToHrvCsv = "example_data/S2_E4_Data/BVP.csv"
|
||||
windowLength = 500
|
||||
|
||||
# get an array of values from HRV empatica file
|
||||
hrv_data, startTimeStamp, sampleRate = convert1DEmpaticaToArray(pathToHrvCsv)
|
||||
|
||||
# Convert the HRV data into 2D array
|
||||
hrv_data_2D = convertInputInto2d(hrv_data, windowLength)
|
||||
|
||||
# Create a list with feature names
|
||||
featureNames = []
|
||||
featureNames.extend(hrvFeatureNames)
|
||||
featureNames.extend(frequencyFeatureNames)
|
||||
|
||||
pd.set_option('display.max_columns', None)
|
||||
|
||||
# Calculate features
|
||||
calculatedFeatures = calculateFeatures(hrv_data_2D, fs=int(sampleRate), featureNames=featureNames)
|
||||
```
|
||||
- More usage examples are located in **usage_examples.ipynb** file
|
||||
|
||||
## Features
|
||||
- Features are returned (from calculateFeatures() function) in a Pandas DataFrame object.
|
||||
- In the case if a feature couldn't be calculated (for example, if input signal is invalid), NaN value is returned.
|
||||
- Further in this section, the list with descriptions of all possible features is presented.
|
||||
|
||||
### GSR features:
|
||||
These features are useful for 1D GSR(EDA) signals
|
||||
- `mean`: mean of the signal
|
||||
- `std`: standard deviation of signal
|
||||
- `q25`: 0.25 quantile
|
||||
- `q75`: 0.75 quantile
|
||||
- `qd`: q75 - q25
|
||||
- `deriv`: sum of gradients of the signal
|
||||
- `power`: power of the signal (mean of squared signal)
|
||||
- `numPeaks`: number of EDA peaks
|
||||
- `ratePeaks`: average number of peaks per second
|
||||
- `powerPeaks`: power of peaks (mean of signal at indexes of peaks)
|
||||
- `sumPosDeriv`: sum of positive derivatives divided by number of all derivatives
|
||||
- `propPosDeriv`: proportion of positive derivatives per all derivatives
|
||||
- `derivTonic`: sum of gradients of the tonic
|
||||
- `sigTonicDifference`: mean of tonic subtracted from signal
|
||||
- `freqFeats`:
|
||||
- `maxPeakAmplitudeChangeBefore`: maximum peak amplitude change before peak
|
||||
- `maxPeakAmplitudeChangeAfter`: maximum peak amplitude change after peak
|
||||
- `avgPeakAmplitudeChangeBefore`: average peak amplitude change before peak
|
||||
- `avgPeakAmplitudeChangeAfter`: average peak amplitude change after peak
|
||||
- `avgPeakChangeRatio`: avg_peak_increase_time / avg_peak_decrease_time
|
||||
- `maxPeakIncreaseTime`: maximum peak increase time
|
||||
- `maxPeakDecreaseTime`: maximum peak decrease time
|
||||
- `maxPeakDuration`: maximum peak duration
|
||||
- `maxPeakChangeRatio`: max_peak_increase_time / max_peak_decrease_time
|
||||
- `avgPeakIncreaseTime`: average peak increase time
|
||||
- `avgPeakDecreaseTime`: average peak decreade time
|
||||
- `avgPeakDuration`: average peak duration
|
||||
- `maxPeakResponseSlopeBefore`: maximum peak response slope before peak
|
||||
- `maxPeakResponseSlopeAfter`: maximum peak response slope after peak
|
||||
- `signalOverallChange`: maximum difference between samples (max(sig)-min(sig))
|
||||
- `changeDuration`: duration between maximum and minimum values
|
||||
- `changeRate`: change_duration / signal_overall_change
|
||||
- `significantIncrease`:
|
||||
- `significantDecrease`:
|
||||
|
||||
### HRV features:
|
||||
These features are useful for 1D HRV(BVP) signals.
|
||||
|
||||
If number of RR intervals (numRR) is less than `length of sample / (2 * sampling rate)` (30 BPM) or greater than `length of sample / (sampling rate / 4)` (240 BPM), BPM value is incorrect and thus, all other HRV features are set to NaN.
|
||||
|
||||
- `meanHr`: mean heart rate
|
||||
- `ibi`: mean interbeat interval
|
||||
- `sdnn`: standard deviation of the ibi
|
||||
- `sdsd`: standard deviation of the differences between all subsequent R-R intervals
|
||||
- `rmssd`: root of the mean of the list of squared differences
|
||||
- `pnn20`: the proportion of NN20 intervals to all intervals
|
||||
- `pnn50`: the proportion of NN50 intervals to all intervals
|
||||
- `sd`:
|
||||
- `sd2`:
|
||||
- `sd1/sd2`: sd / sd2 ratio
|
||||
- `numRR`: number of RR intervals
|
||||
|
||||
### Accelerometer features:
|
||||
These features are useful for 3D signals from accelerometer
|
||||
- `meanLow`: mean of low-pass filtered signal
|
||||
- `areaLow`: area under the low-pass filtered signal
|
||||
- `totalAbsoluteAreaBand`: sum of absolute areas under the band-pass filtered x, y and z signal
|
||||
- `totalMagnitudeBand`: square root of sum of squared band-pass filtered x, y and z components
|
||||
- `entropyBand`: entropy of band-pass filtered signal
|
||||
- `skewnessBand`: skewness of band-pass filtered signal
|
||||
- `kurtosisBand`: kurtosis of band-pass filtered signal
|
||||
- `postureDistanceLow`: calculates difference between mean values for a given sensor (low-pass filtered)
|
||||
- `absoluteMeanBand`: mean of band-pass filtered signal
|
||||
- `absoluteAreaBand`: area under the band-pass filtered signal
|
||||
- `quartilesBand`: quartiles of band-pass filtered signal
|
||||
- `interQuartileRangeBand`: inter quartile range of band-pass filtered signal
|
||||
- `varianceBand`: variance of band-pass filtered signal
|
||||
- `coefficientOfVariationBand`: dispersion of band-pass filtered signal
|
||||
- `amplitudeBand`: difference between maximum and minimum sample of band-pass filtered signal
|
||||
- `totalEnergyBand`: total magnitude of band-pass filtered signal
|
||||
- `dominantFrequencyEnergyBand`: ratio of energy in dominant frequency
|
||||
- `meanCrossingRateBand`: the number of signal crossings with mean of band-pass filtered signal
|
||||
- `correlationBand`: Pearson's correlation between band-pass filtered axis
|
||||
- `quartilesMagnitudesBand`: quartiles at 25%, 50% and 75% per band-pass filtered signal
|
||||
- `interQuartileRangeMagnitudesBand`: interquartile range of band-pass filtered signal
|
||||
- `areaUnderAccelerationMagnitude`: area under acceleration magnitude
|
||||
- `peaksDataLow`: number of peaks, sum of peak values, peak avg, amplitude avg
|
||||
- `sumPerComponentBand`: sum per component of band-pass filtered signal
|
||||
- `velocityBand`: velocity of the band-pass filtered signal
|
||||
- `meanKineticEnergyBand`: mean kinetic energy 1/2*mV^2 of band-pass filtered signal
|
||||
- `totalKineticEnergyBand`: total kinetic energy 1/2*mV^2 for all axes (band-pass filtered)
|
||||
- `squareSumOfComponent`: squared sum of component
|
||||
- `sumOfSquareComponents`: sum of squared components
|
||||
- `averageVectorLength`: mean of magnitude vector
|
||||
- `averageVectorLengthPower`: square mean of magnitude vector
|
||||
- `rollAvgLow`: maximum difference of low-pass filtered roll samples
|
||||
- `pitchAvgLow`: maximum difference of low-pass filtered pitch samples
|
||||
- `rollStdDevLow`: standard deviation of roll (calculated from low-pass filtered signal)
|
||||
- `pitchStdDevLow`: standard deviation of pitch (calculated from low-pass filtered signal)
|
||||
- `rollMotionAmountLow`: amount of wrist roll (from low-pass filtered signal) motion
|
||||
- `rollMotionRegularityLow`: regularity of wrist roll motion
|
||||
- `manipulationLow`: manipulation of low-pass filtered signals
|
||||
- `rollPeaks`: number of roll peaks, sum of roll peak values, roll peak avg, roll amplitude avg
|
||||
- `pitchPeaks`: number of pitch peaks, sum of pitch peak values, pitch peak avg, pitch amplitude avg
|
||||
- `rollPitchCorrelation`: correlation between roll and peak (obtained from low-pass filtered signal)
|
||||
|
||||
### Gyroscope features:
|
||||
These features are useful for 3D signals from gyroscope
|
||||
- `meanLow`: mean of low-pass filtered signal
|
||||
- `areaLow`: area under the low-pass filtered signal
|
||||
- `totalAbsoluteAreaLow`: sum of absolute areas under the low-pass filtered x, y and z signal
|
||||
- `totalMagnitudeLow`: square root of sum of squared band-pass filtered x, y and z components
|
||||
- `entropyLow`: entropy of low-pass filtered signal
|
||||
- `skewnessLow`: skewness of low-pass filtered signal
|
||||
- `kurtosisLow`: kurtosis of low-pass filtered signal
|
||||
- `quartilesLow`: quartiles of low-pass filtered signal
|
||||
- `interQuartileRangeLow`: inter quartile range of low-pass filtered signal
|
||||
- `varianceLow`: variance of low-pass filtered signal
|
||||
- `coefficientOfVariationLow`: dispersion of low-pass filtered signal
|
||||
- `amplitudeLow`: difference between maximum and minimum sample of low-pass filtered signal
|
||||
- `totalEnergyLow`: total magnitude of low-pass filtered signal
|
||||
- `dominantFrequencyEnergyLow`: ratio of energy in dominant frequency
|
||||
- `meanCrossingRateLow`: the number of signal crossings with mean of low-pass filtered signal
|
||||
- `correlationLow`: Pearson's correlation between low-pass filtered axis
|
||||
- `quartilesMagnitudeLow`: quartiles at 25%, 50% and 75% per low-pass filtered signal
|
||||
- `interQuartileRangeMagnitudesLow`: interquartile range of band-pass filtered signal
|
||||
- `areaUnderMagnitude`: area under magnitude
|
||||
- `peaksCountLow`: number of peaks in low-pass filtered signal
|
||||
- `averageVectorLengthLow`: mean of low-pass filtered magnitude vector
|
||||
- `averageVectorLengthPowerLow`: square mean of low-pass filtered magnitude vector
|
||||
|
||||
|
||||
### Generic features:
|
||||
These are generic features, useful for many different types of signals
|
||||
- `autocorrelations`: autocorrelations of the given signal with lags 5, 10, 20, 30, 50, 75 and 100
|
||||
- `countAboveMean`: number of values in signal that are higher than the mean of signal
|
||||
- `countBelowMean`: number of values in signal that are lower than the mean of signal
|
||||
- `maximum`: maximum value of the signal
|
||||
- `minimum`: minimum value of the signal
|
||||
- `meanAbsChange`: the mean of absolute differences between subsequent time series values
|
||||
- `longestStrikeAboveMean`: longest part of signal above mean
|
||||
- `longestStrikeBelowMean`: longest part of signal below mean
|
||||
- `stdDev`: standard deviation of the signal
|
||||
- `median`: median of the signal
|
||||
- `meanChange`: the mean over the differences between subsequent time series values
|
||||
- `numberOfZeroCrossings`: number of crossings of signal on 0
|
||||
- `absEnergy`: the absolute energy of the time series which is the sum over the squared values
|
||||
- `linearTrendSlope`: a linear least-squares regression for the values of the time series versus the sequence from 0 to length of the time series minus one
|
||||
- `ratioBeyondRSigma`: ratio of values that are more than r*std(x) (so r sigma) away from the mean of signal. r in this case is 2.5
|
||||
- `binnedEntropy`: entropy of binned values
|
||||
- `numOfPeaksAutocorr`: number of peaks of autocorrelations
|
||||
- `numberOfZeroCrossingsAutocorr`: number of crossings of autocorrelations on 0
|
||||
- `areaAutocorr`: area under autocorrelations
|
||||
- `calcMeanCrossingRateAutocorr`: the number of autocorrelation crossings with mean
|
||||
- `countAboveMeanAutocorr`: umber of values in signal that are higher than the mean of autocorrelation
|
||||
- `sumPer`: sum per component
|
||||
- `sumSquared`: squared sum per component
|
||||
- `squareSumOfComponent`: square sum of component
|
||||
- `sumOfSquareComponents`:sum of square components
|
||||
|
||||
### Frequency features:
|
||||
These are frequency features, useful for many different types of signals. The signal is converted to power spectral density signal and features are calculated on this signal
|
||||
- `fqHighestPeakFreqs`: three frequencies corresponding to the largest peaks added to features
|
||||
- `fqHighestPeaks`: three largest peaks added to features
|
||||
- `fqEnergyFeat`: energy calculated as the sum of the squared FFT component magnitudes, and normalized
|
||||
- `fqEntropyFeat`: entropy of the FFT of the signal
|
||||
- `fqHistogramBins`: Binned distribution (histogram)
|
||||
- `fqAbsMean`: absolute mean of the raw signal
|
||||
- `fqSkewness`: skewness of the power spectrum of the data
|
||||
- `fqKurtosis`: kurtosis of the power spectrum of the data
|
||||
- `fqInterquart`: inter quartile range of the raw signal
|
|
@ -1,32 +0,0 @@
|
|||
import sys
|
||||
sys.path.append("..")
|
||||
from CalculatingFeatures.helper_functions import convert1DEmpaticaToArray, convertInputInto2d, accelerometerFeatureNames, frequencyFeatureNames
|
||||
from CalculatingFeatures.helper_functions import convert3DEmpaticaToArray
|
||||
from CalculatingFeatures.calculate_features import calculateFeatures
|
||||
|
||||
import pandas as pd
|
||||
|
||||
pathToAccCsv = "../example_data/S2_E4_Data_shortened/ACC.csv"
|
||||
windowLength = 500
|
||||
|
||||
#np.seterr(all='raise')
|
||||
|
||||
# get an array of values from ACC empatica file
|
||||
acc_data, startTimeStamp, sampleRate = convert3DEmpaticaToArray(pathToAccCsv)
|
||||
acc_data = acc_data[:, :int(300000//sampleRate)]
|
||||
|
||||
# Convert the ACC data into 2D array
|
||||
x_2D = convertInputInto2d(acc_data[0], windowLength)
|
||||
y_2D = convertInputInto2d(acc_data[1], windowLength)
|
||||
z_2D = convertInputInto2d(acc_data[2], windowLength)
|
||||
|
||||
# Create a list with feature names
|
||||
featureNames = []
|
||||
featureNames.extend(accelerometerFeatureNames)
|
||||
featureNames.extend(frequencyFeatureNames)
|
||||
|
||||
pd.set_option('display.max_columns', None)
|
||||
|
||||
# Calculate features
|
||||
calculatedFeatures = calculateFeatures(x_2D, y_2D, z_2D, fs=int(sampleRate), featureNames=featureNames)
|
||||
print(calculatedFeatures)
|
|
@ -1,67 +0,0 @@
|
|||
import sys
|
||||
sys.path.append("..")
|
||||
from CalculatingFeatures.helper_functions import convert1DEmpaticaToArray, convertInputInto2d, gsrFeatureNames
|
||||
from CalculatingFeatures.calculate_features import calculateFeatures
|
||||
from CalculatingFeatures import gsr
|
||||
from eda_explorer.load_files import butter_lowpass_filter
|
||||
from eda_explorer.EDA_Peak_Detection_Script import calcPeakFeatures
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
pd.set_option('display.max_rows', None)
|
||||
pd.set_option('display.max_columns', None)
|
||||
|
||||
|
||||
pathToEDACsv = "../example_data/S2_E4_Data/EDA.csv"
|
||||
|
||||
# get an array of values from EDA empatica file
|
||||
eda_data, startTimeStamp_EDA, sampleRate_EDA = convert1DEmpaticaToArray(pathToEDACsv)
|
||||
eda_data = eda_data[:int(300000//sampleRate_EDA)]
|
||||
|
||||
windowLength_EDA = int(sampleRate_EDA*120)
|
||||
|
||||
# Convert the HRV data into 2D array
|
||||
eda_data_2D = convertInputInto2d(eda_data, windowLength_EDA)
|
||||
|
||||
calculatedFeatures_EDA = calculateFeatures(eda_data_2D, fs=int(sampleRate_EDA), featureNames=gsrFeatureNames)
|
||||
print(calculatedFeatures_EDA)
|
||||
|
||||
# df_EDA = pd.DataFrame()
|
||||
# for row in eda_data_2D:
|
||||
# current_result = {}
|
||||
# current_result.update(gsr.extractGsrFeatures(row, sampleRate=int(sampleRate_EDA),featureNames=gsrFeatureNames))
|
||||
# df_EDA = df_EDA.append(current_result, ignore_index=True)
|
||||
|
||||
# no_interest = 131
|
||||
# current_row = eda_data_2D[no_interest,]
|
||||
# filtered_EDA = butter_lowpass_filter(current_row, 1.0, int(sampleRate_EDA), 6)
|
||||
|
||||
# plt.figure()
|
||||
# plt.plot(current_row, color='blue')
|
||||
# plt.plot(filtered_EDA, color='red')
|
||||
# plt.savefig('output_images/EDA_exa1.png')
|
||||
|
||||
# gsr_data = pd.DataFrame(current_row, columns=["EDA"])
|
||||
# startTime = pd.to_datetime(0, unit="s")
|
||||
# gsr_data.index = pd.date_range(start=startTime, periods=len(gsr_data), freq=str(1000/sampleRate_EDA) + 'L')
|
||||
# gsr_data['filtered_eda'] = filtered_EDA
|
||||
# peakData = calcPeakFeatures(gsr_data, offset=1, thresh=.02, start_WT=4, end_WT=4, sampleRate=int(sampleRate_EDA))
|
||||
|
||||
# peaks = np.where(peakData.peaks == 1.0)[0]
|
||||
# peak_starts = np.where(peakData.peak_start == 1.0)[0]
|
||||
# peak_ends = np.where(peakData.peak_end == 1.0)[0]
|
||||
# print(peaks)
|
||||
# print(peak_starts)
|
||||
# print(peak_ends)
|
||||
|
||||
# plt.figure()
|
||||
# plt.plot(filtered_EDA, color='red')
|
||||
# plt.scatter(peaks, filtered_EDA[peaks], color="green")
|
||||
# plt.scatter(peak_starts, filtered_EDA[peak_starts], color="green", marker=">", alpha=0.5)
|
||||
# plt.scatter(peak_ends, filtered_EDA[peak_ends], color="green", marker="s", alpha=0.5)
|
||||
# plt.savefig('output_images/EDA_exa2.png')
|
||||
|
||||
# print(df_EDA.loc[no_interest, df_EDA.columns.str.contains('Peak')])
|
||||
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
import sys
|
||||
sys.path.append("..")
|
||||
from CalculatingFeatures.helper_functions import convert1DEmpaticaToArray, convertInputInto2d, frequencyFeatureNames, hrvFeatureNames
|
||||
from CalculatingFeatures.calculate_features import calculateFeatures
|
||||
import pandas as pd
|
||||
|
||||
|
||||
|
||||
pathToHrvCsv = "../example_data/S2_E4_Data/BVP.csv"
|
||||
windowLength = 500
|
||||
|
||||
# get an array of values from HRV empatica file
|
||||
hrv_data, startTimeStamp, sampleRate = convert1DEmpaticaToArray(pathToHrvCsv)
|
||||
hrv_data = hrv_data[:int(300000//sampleRate)]
|
||||
|
||||
# Convert the HRV data into 2D array
|
||||
hrv_data_2D = convertInputInto2d(hrv_data, windowLength)
|
||||
|
||||
# Create a list with feature names
|
||||
featureNames = []
|
||||
featureNames.extend(hrvFeatureNames)
|
||||
featureNames.extend(frequencyFeatureNames)
|
||||
|
||||
pd.set_option('display.max_columns', None)
|
||||
|
||||
# Calculate features
|
||||
calculatedFeatures = calculateFeatures(hrv_data_2D, fs=int(sampleRate), featureNames=featureNames)
|
||||
print(calculatedFeatures)
|
|
@ -1 +0,0 @@
|
|||
# Temparature features do not exist - should be used from the RAPIDS defaults?
|
|
@ -1,175 +0,0 @@
|
|||
,EDA,rise_time,max_deriv,amp,decay_time,SCR_width,AUC
|
||||
2017-05-22 07:15:34.750,0.503563,2.375,0.014828457554093788,0.023063999999999973,3.375,5.125,0.11820299999999986
|
||||
2017-05-22 07:16:18.500,0.669122,1.25,0.11153807317588882,0.03459599999999996,0.375,1.5,0.05189399999999994
|
||||
2017-05-22 07:16:29.250,0.808787,1.625,0.15124313794040223,0.057659999999999934,0.375,1.75,0.10090499999999988
|
||||
2017-05-22 07:16:43.250,1.3176430000000001,2.125,0.3137904553094124,0.1410450000000001,0.5,2.375,0.3349818750000002
|
||||
2017-05-22 07:16:49.625,1.436807,1.5,0.2754308494258524,0.09930299999999992,0.125,1.5,0.14895449999999988
|
||||
2017-05-22 07:17:09.250,1.425176,1.0,0.27652972963920597,0.07688000000000006,0.125,1.0,0.07688000000000006
|
||||
2017-05-22 07:17:20.250,1.732795,2.0,0.3374972909832241,0.32299500000000014,1.125,2.125,0.6863643750000004
|
||||
2017-05-22 07:17:31.625,1.1331315,1.375,0.22530763506209084,0.06406650000000003,0.5,1.75,0.11211637500000005
|
||||
2017-05-22 07:17:38.500,1.403394,1.5,0.34871084578724343,0.09225600000000012,0.625,2.0,0.18451200000000023
|
||||
2017-05-22 07:17:45.125,1.4354269999999998,2.0,0.17189871106182686,0.15632299999999977,2.875,4.375,0.683913124999999
|
||||
2017-05-22 07:18:07.125,1.655274,2.125,0.26828106990271827,0.24153099999999994,1.5,3.25,0.7849757499999999
|
||||
2017-05-22 07:18:15.000,1.59313,1.625,0.15518636240276962,0.10378849999999984,1.25,2.5,0.2594712499999996
|
||||
2017-05-22 07:18:28.375,1.5700655,1.75,0.061967970875677736,0.04100250000000005,1.0,2.375,0.09738093750000013
|
||||
2017-05-22 07:18:34.500,1.92115,2.0,0.3612223590696164,0.36646,2.625,4.0,1.46584
|
||||
2017-05-22 07:18:40.375,1.8026270000000002,1.625,0.10289846560438853,0.07623900000000017,1.0,2.25,0.17153775000000038
|
||||
2017-05-22 07:18:48.375,1.8154405000000002,1.875,0.1396967735464596,0.11852350000000045,1.25,2.75,0.32593962500000123
|
||||
2017-05-22 07:19:09.000,1.663603,1.75,0.05329105073014517,0.039722000000000035,1.25,2.625,0.10427025000000009
|
||||
2017-05-22 07:19:20.375,1.6591179999999999,1.75,0.08783426805671546,0.06598799999999994,1.25,2.625,0.17321849999999983
|
||||
2017-05-22 07:19:26.125,1.6450235000000002,2.25,0.044012205524090575,0.043565000000000076,2.125,3.625,0.15792312500000028
|
||||
2017-05-22 07:21:03.625,1.171473,4.0,0.11766914755295232,0.22102999999999995,,,
|
||||
2017-05-22 07:21:11.375,1.1048434999999999,1.875,0.026811067104887343,0.02050149999999995,0.625,2.0,0.0410029999999999
|
||||
2017-05-22 07:21:27.625,1.031167,2.125,0.07750606429624973,0.07752100000000006,2.0,3.625,0.2810136250000002
|
||||
2017-05-22 07:21:48.375,1.0177125,2.75,0.061221624333410496,0.100584,3.0,5.0,0.50292
|
||||
2017-05-22 07:21:58.625,1.1054840000000001,3.75,0.09361829423393608,0.15440050000000016,2.0,4.5,0.6948022500000007
|
||||
2017-05-22 07:23:26.375,0.6320325,1.75,0.047011122388191495,0.03844000000000003,1.125,2.375,0.09129500000000007
|
||||
2017-05-22 07:24:26.000,0.534582,2.0,0.0323970421729749,0.025626999999999955,1.875,3.5,0.08969449999999984
|
||||
2017-05-22 07:25:42.500,0.724219,4.0,0.11106882694184428,0.29086199999999995,,,
|
||||
2017-05-22 07:26:03.125,1.3380455,3.125,0.2512871921254973,0.34852150000000004,2.125,3.875,1.3505208125000001
|
||||
2017-05-22 07:26:14.250,1.34189,2.25,0.2918536338935702,0.254985,3.125,4.875,1.2430518750000001
|
||||
2017-05-22 07:26:20.500,1.321388,1.5,0.1539927116847526,0.09481799999999985,0.75,2.0,0.1896359999999997
|
||||
2017-05-22 07:26:29.250,1.425176,2.125,0.25289703835694866,0.21270100000000003,2.375,4.0,0.8508040000000001
|
||||
2017-05-22 07:26:35.750,1.3944239999999999,1.875,0.12879051845700573,0.09674000000000005,1.125,2.625,0.2539425000000001
|
||||
2017-05-22 07:26:41.000,1.480372,1.875,0.20847826783929513,0.15898349999999994,1.125,2.375,0.3775858124999999
|
||||
2017-05-22 07:26:57.250,1.336863,1.875,0.13441189501376805,0.112757,1.625,3.125,0.352365625
|
||||
2017-05-22 07:27:31.375,1.1786189999999999,2.25,0.08202871886284768,0.08328649999999982,2.625,4.25,0.35396762499999923
|
||||
2017-05-22 07:27:55.375,1.029344,2.125,0.05728986633907596,0.052535,1.5,3.125,0.164171875
|
||||
2017-05-22 07:28:11.125,1.3579065000000001,2.625,0.24722820810617208,0.3466005000000001,,,
|
||||
2017-05-22 07:28:17.750,1.860928,4.0,0.31268337765616927,0.47290999999999994,2.375,4.375,2.0689812499999998
|
||||
2017-05-22 07:28:38.375,1.7680315000000002,2.25,0.29119838550697885,0.32353650000000034,3.125,4.875,1.5772404375000018
|
||||
2017-05-22 07:29:04.375,1.5643,2.125,0.1404294931600223,0.13390000000000013,,,
|
||||
2017-05-22 07:29:26.375,1.7776414999999999,2.125,0.2393192190774318,0.23448349999999984,2.0,3.625,0.8500026874999994
|
||||
2017-05-22 07:30:05.125,1.399008,1.5,0.05270208243056018,0.03203350000000005,0.75,1.875,0.06006281250000009
|
||||
2017-05-22 07:30:31.375,1.653352,3.375,0.2871077101193489,0.429246,,,
|
||||
2017-05-22 07:30:34.625,1.8404265,1.875,0.23727048901370473,0.18835550000000012,0.875,2.375,0.4473443125000003
|
||||
2017-05-22 07:30:47.375,1.7411235,2.0,0.14199373618878397,0.11980500000000016,2.0,3.625,0.4342931250000006
|
||||
2017-05-22 07:30:53.000,1.731513,1.75,0.06045060566822791,0.030752000000000113,0.625,2.0,0.061504000000000225
|
||||
2017-05-22 07:31:15.500,1.525219,2.375,0.03471297956604147,0.03395550000000003,0.875,2.625,0.08913318750000007
|
||||
2017-05-22 07:32:17.875,2.08536,2.375,0.45106857195387384,0.502481,,,
|
||||
2017-05-22 07:32:23.000,2.114831,1.375,0.15153710180561575,0.05445650000000013,0.375,1.625,0.08849181250000021
|
||||
2017-05-22 07:32:40.750,2.410332,1.875,0.7084231020390668,0.5767534999999997,3.0,4.5,2.5953907499999986
|
||||
2017-05-22 07:33:12.375,1.870096,1.0,0.12952756935894172,0.035236000000000045,0.5,1.5,0.05285400000000007
|
||||
2017-05-22 07:33:31.500,1.647145,1.75,0.28035533686811753,0.1255710000000001,0.375,1.375,0.17266012500000014
|
||||
2017-05-22 07:33:48.875,1.6398979999999999,2.5,0.22399495229998578,0.29086199999999995,3.5,5.375,1.5633832499999998
|
||||
2017-05-22 07:33:58.875,1.6597585000000001,2.0,0.23193033054720047,0.23832700000000018,1.75,3.25,0.7745627500000005
|
||||
2017-05-22 07:34:20.125,1.5976145000000002,3.0,0.19560839697922994,0.26843950000000016,,,
|
||||
2017-05-22 07:34:49.875,1.3932419999999999,2.375,0.028080802357340673,0.032032999999999756,0.75,2.375,0.07607837499999942
|
||||
2017-05-22 07:35:19.750,1.143382,2.75,0.032085826213871016,0.03075199999999989,1.0,2.75,0.0845679999999997
|
||||
2017-05-22 07:35:30.500,1.186948,1.75,0.20425108909752332,0.12557099999999988,0.25,1.75,0.21974924999999979
|
||||
2017-05-22 07:35:53.125,1.251014,1.625,0.11746281156048255,0.0320330000000002,0.5,1.875,0.060061875000000375
|
||||
2017-05-22 07:36:05.625,1.306752,2.0,0.10508940062988081,0.09866249999999988,0.625,2.25,0.22199062499999972
|
||||
2017-05-22 07:36:26.625,1.3002465,2.5,0.21642722813322202,0.22103000000000006,0.375,2.125,0.4696887500000001
|
||||
2017-05-22 07:36:42.750,1.176598,1.75,0.14901857025425969,0.11147600000000013,1.5,2.875,0.3204935000000004
|
||||
2017-05-22 07:36:57.375,1.0888265,1.875,0.09062315859340409,0.0762385000000001,1.375,2.875,0.21918568750000028
|
||||
2017-05-22 07:37:19.375,1.0817795000000001,1.75,0.03859067965386842,0.026908500000000224,,,
|
||||
2017-05-22 07:37:22.250,1.224007,2.0,0.14184516802949076,0.12813300000000005,1.625,3.125,0.40041562500000016
|
||||
2017-05-22 07:37:31.500,1.158659,2.0,0.05539320099511258,0.04869000000000012,1.625,3.25,0.1582425000000004
|
||||
2017-05-22 07:37:53.500,1.4290200000000002,2.25,0.24063567651439755,0.24985900000000005,,,
|
||||
2017-05-22 07:38:02.250,1.375204,1.75,0.0511188487860057,0.03972100000000012,0.375,1.75,0.0695117500000002
|
||||
2017-05-22 07:38:13.375,1.298965,2.0,0.048745168416420626,0.049331000000000014,2.375,3.75,0.18499125000000005
|
||||
2017-05-22 07:38:22.500,1.288074,2.75,0.019866865669309064,0.028189999999999937,1.625,3.625,0.10218874999999977
|
||||
2017-05-22 07:38:42.500,1.250915,2.0,0.04330280907170092,0.03459599999999985,1.75,3.375,0.11676149999999949
|
||||
2017-05-22 07:38:57.875,1.2336174999999998,1.375,0.13929210080343957,0.07367649999999992,0.375,1.5,0.11051474999999988
|
||||
2017-05-22 07:39:24.875,1.2291325,2.625,0.07696330743284285,0.07047349999999986,2.625,4.5,0.31713074999999935
|
||||
2017-05-22 07:39:58.000,1.386736,2.25,0.1950892271550373,0.22807699999999986,,,
|
||||
2017-05-22 07:40:07.750,1.431682,3.625,0.10232995613186091,0.1551404999999999,1.5,4.125,0.6399545624999996
|
||||
2017-05-22 07:40:13.875,1.336223,1.5,0.03691823367415381,0.021782999999999886,0.625,1.875,0.040843124999999786
|
||||
2017-05-22 07:40:31.750,1.7097310000000001,2.125,0.29895999294932096,0.299191,2.125,3.75,1.1219662499999998
|
||||
2017-05-22 07:41:02.125,1.4476985,3.5,0.12358274335508845,0.026908000000000154,0.125,3.125,0.08408750000000048
|
||||
2017-05-22 07:41:30.250,1.3060120000000002,4.0,0.018481394048414757,0.044846000000000164,,,
|
||||
2017-05-22 07:41:48.500,1.311138,3.125,0.026024648999197098,0.02050149999999973,0.75,2.5,0.05125374999999932
|
||||
2017-05-22 07:42:05.875,1.2926575,2.625,0.10473285642778052,0.1274925,1.0,2.625,0.3346678125
|
||||
2017-05-22 07:42:18.250,1.158659,2.375,0.2680741258669528,0.20757550000000013,,,
|
||||
2017-05-22 07:44:54.875,1.07281,2.25,0.2296469005371069,0.14222750000000006,1.0,2.5,0.35556875000000016
|
||||
2017-05-22 07:45:24.250,1.209913,4.0,0.12521915371023162,0.21013900000000008,3.5,5.25,1.1032297500000003
|
||||
2017-05-22 07:45:33.125,1.1778795,3.625,0.019288312412006903,0.08584950000000013,1.0,3.375,0.28974206250000045
|
||||
2017-05-22 07:45:37.000,1.536751,2.125,0.30655783735363684,0.382576,1.25,2.75,1.052084
|
||||
2017-05-22 07:45:51.125,1.6725720000000002,2.625,0.3439185714022184,0.420277,,,
|
||||
2017-05-22 07:46:39.250,1.36249,1.875,0.06455079234815564,0.06726999999999994,0.625,2.0,0.13453999999999988
|
||||
2017-05-22 07:46:43.375,1.234357,1.125,0.36833145242077237,0.09738099999999994,0.375,1.5,0.1460714999999999
|
||||
2017-05-22 07:46:54.625,1.300345,2.125,0.302864053208582,0.23256100000000002,,,
|
||||
2017-05-22 08:05:41.125,0.227703,1.75,0.0699073493119271,0.033954999999999985,2.875,4.5,0.15279749999999992
|
||||
2017-05-22 08:07:34.625,0.20976450000000002,1.875,0.06528087959205453,0.04164350000000003,,,
|
||||
2017-05-22 08:14:31.375,0.42438750000000003,4.0,0.04608763734814758,0.13774350000000002,2.125,5.0,0.6887175000000001
|
||||
2017-05-22 08:16:05.625,0.43848200000000004,1.875,0.022908910049771247,0.02050099999999999,0.875,2.25,0.04612724999999998
|
||||
2017-05-22 08:16:31.750,0.421825,2.375,0.020235005129130013,0.0262675,1.375,3.0,0.0788025
|
||||
2017-05-22 08:17:08.375,0.442967,4.0,0.027462685910427886,0.05637900000000001,,,
|
||||
2017-05-22 08:17:23.625,0.48589150000000003,1.875,0.022108622083991225,0.021782500000000038,1.0,2.375,0.05173343750000009
|
||||
2017-05-22 08:17:41.375,0.47371850000000004,1.875,0.020662833646469814,0.02114149999999998,1.25,2.625,0.05549643749999995
|
||||
2017-05-22 08:17:57.000,0.570459,3.75,0.09028796294871899,0.12172600000000006,,,
|
||||
2017-05-22 08:18:15.000,0.752408,4.0,0.09074689036959427,0.20373099999999988,,,
|
||||
2017-05-22 08:18:18.125,0.776113,1.375,0.045155390005493956,0.022423000000000082,0.5,1.625,0.03643737500000013
|
||||
2017-05-22 08:18:41.375,0.7703470000000001,1.25,0.15855556460387632,0.05830050000000009,0.375,1.5,0.08745075000000013
|
||||
2017-05-22 08:20:34.750,0.6652779999999999,2.125,0.05843900549537295,0.06598899999999996,1.75,3.375,0.22271287499999987
|
||||
2017-05-22 08:20:47.000,0.628119,2.625,0.028209886616290092,0.02818949999999998,1.0,2.75,0.07752112499999994
|
||||
2017-05-22 08:21:05.125,0.633885,2.0,0.04517072457773352,0.05125350000000006,3.125,4.5,0.23064075000000028
|
||||
2017-05-22 08:21:16.500,0.6434949999999999,2.125,0.03412071629378932,0.030111499999999847,1.25,3.0,0.09033449999999954
|
||||
2017-05-22 08:22:17.250,0.674247,2.5,0.10312026697879695,0.11531999999999998,,,
|
||||
2017-05-22 08:22:35.000,0.690904,2.125,0.027065625284675043,0.021141999999999994,0.625,2.25,0.04756949999999999
|
||||
2017-05-22 08:22:48.000,0.697311,2.625,0.03720895432743454,0.049331000000000014,,,
|
||||
2017-05-22 08:22:53.250,0.708843,2.25,0.035330474875114426,0.03459599999999996,,,
|
||||
2017-05-22 08:23:02.000,0.748564,2.0,0.03098284779636007,0.02434500000000006,1.125,2.625,0.06390562500000016
|
||||
2017-05-22 08:23:37.000,0.761378,1.625,0.02820908045797399,0.02050149999999995,0.875,2.125,0.043565687499999894
|
||||
2017-05-22 08:23:45.375,0.9170595,2.5,0.10900167444209874,0.16785450000000002,2.125,4.0,0.6714180000000001
|
||||
2017-05-22 08:23:52.125,0.877979,2.25,0.046180062567243496,0.04869099999999993,0.875,2.25,0.10955474999999984
|
||||
2017-05-22 08:23:58.000,0.902324,2.0,0.0451624964223063,0.049972000000000016,1.0,2.375,0.11868350000000004
|
||||
2017-05-22 08:24:01.875,0.9817665,1.875,0.12067163327367236,0.10378749999999992,1.875,3.25,0.33730937499999974
|
||||
2017-05-22 08:24:27.250,1.0369329999999999,1.875,0.10883694564521296,0.07047349999999997,1.375,2.875,0.20261131249999992
|
||||
2017-05-22 08:24:48.875,1.015791,3.25,0.05283368025782487,0.10250700000000013,2.375,4.625,0.4740948750000006
|
||||
2017-05-22 08:24:54.125,1.0061805000000001,2.5,0.034086601379224035,0.03972100000000023,,,
|
||||
2017-05-22 08:24:57.250,1.142002,2.25,0.14365102798932483,0.14222800000000002,2.625,4.25,0.6044690000000001
|
||||
2017-05-22 08:25:19.375,1.1118905,2.75,0.05343529253432422,0.05894149999999998,0.875,2.5,0.14735374999999995
|
||||
2017-05-22 08:25:36.375,1.146487,3.125,0.06381041996704617,0.14799399999999996,2.0,4.25,0.6289744999999998
|
||||
2017-05-22 08:26:00.875,1.089467,2.375,0.10804115398923564,0.12428899999999987,1.625,3.25,0.4039392499999996
|
||||
2017-05-22 08:26:23.875,1.058075,1.875,0.04895165908758514,0.03267400000000009,0.375,1.75,0.05717950000000016
|
||||
2017-05-22 08:26:29.750,1.07281,1.625,0.03252146988782734,0.02434549999999991,0.75,2.125,0.051734187499999806
|
||||
2017-05-22 08:26:46.750,1.062559,1.875,0.04287899744724122,0.04420550000000012,1.375,2.75,0.12156512500000033
|
||||
2017-05-22 08:26:53.750,1.033089,2.5,0.02030497122408903,0.026907999999999932,3.375,5.125,0.13790349999999965
|
||||
2017-05-22 08:27:10.625,1.109328,4.0,0.08325412668487431,0.08777150000000011,1.375,2.875,0.2523430625000003
|
||||
2017-05-22 08:27:22.125,1.1714725000000001,3.5,0.1102682429606805,0.1313365000000002,3.5,5.125,0.6730995625000009
|
||||
2017-05-22 08:27:34.500,1.106125,2.125,0.049404017703517766,0.04484700000000008,1.5,3.0,0.13454100000000024
|
||||
2017-05-22 08:28:11.000,1.175317,2.375,0.0767191386014403,0.08649050000000003,1.625,3.375,0.2919054375000001
|
||||
2017-05-22 08:28:15.750,1.161222,2.125,0.023469816031331803,0.031392499999999934,1.625,3.125,0.0981015624999998
|
||||
2017-05-22 08:28:40.000,1.131751,2.125,0.021426131696090422,0.026907999999999932,2.5,4.0,0.10763199999999973
|
||||
2017-05-22 08:28:45.625,1.231695,2.375,0.10967664002624034,0.11147599999999991,,,
|
||||
2017-05-22 08:29:05.250,1.254759,1.75,0.11844124124046118,0.10763199999999995,3.375,4.625,0.49779799999999974
|
||||
2017-05-22 08:29:16.500,1.2252889999999999,2.75,0.04090415106637302,0.06406699999999987,2.0,3.75,0.24025124999999953
|
||||
2017-05-22 08:29:47.875,1.2560405,2.25,0.06433835805311716,0.06662899999999983,2.25,4.0,0.2665159999999993
|
||||
2017-05-22 08:29:55.000,1.317544,2.0,0.11189364771460575,0.09353699999999998,,,
|
||||
2017-05-22 08:30:06.250,1.231695,2.5,0.0815369875816696,0.047409000000000034,0.125,2.0,0.09481800000000007
|
||||
2017-05-22 08:30:33.500,0.8972680000000001,1.5,0.20104111662310498,0.05894200000000005,1.125,2.375,0.13998725000000012
|
||||
2017-05-22 08:30:38.750,0.9997739999999999,2.25,0.46054202442366776,0.29086199999999995,,,
|
||||
2017-05-22 08:32:36.750,0.656308,2.625,0.054126330081166074,0.07431699999999997,2.25,4.125,0.30655762499999983
|
||||
2017-05-22 08:33:43.500,0.594804,1.875,0.07805246554195655,0.051253000000000104,1.25,2.5,0.12813250000000026
|
||||
2017-05-22 08:36:47.625,0.319959,3.125,0.037886110588743804,0.045487,0.625,2.5,0.1137175
|
||||
2017-05-22 08:36:51.500,0.402605,1.25,0.0822805602175376,0.02947099999999997,0.375,1.375,0.04052262499999996
|
||||
2017-05-22 08:38:24.125,0.6838569999999999,3.125,0.011308014179868486,0.03908099999999992,,,
|
||||
2017-05-22 08:38:46.125,0.636448,1.0,0.261456682334547,0.02434550000000002,0.125,1.125,0.027388687500000022
|
||||
2017-05-22 08:39:22.625,0.553802,1.375,0.22335943852213358,0.02050099999999999,0.125,1.25,0.02562624999999999
|
||||
2017-05-22 08:41:21.750,0.424387,4.0,0.004364841866654867,0.021782000000000024,,,
|
||||
2017-05-22 08:45:52.875,0.2539705,2.125,0.03486080607122055,0.024345499999999992,0.25,1.625,0.03956143749999999
|
||||
2017-05-22 08:49:09.875,0.5993580000000001,1.625,1.5722707914359493,0.34026200000000006,0.125,1.25,0.4253275000000001
|
||||
2017-05-22 08:57:45.625,0.1143055,1.875,0.05009238869002808,0.026908500000000002,0.75,1.875,0.050453437500000003
|
||||
2017-05-22 08:58:12.750,0.124556,2.25,0.03482288095641184,0.042284,,,
|
||||
2017-05-22 08:59:58.250,0.123275,1.5,0.04808163407584454,0.03203399999999999,0.625,1.875,0.060063749999999985
|
||||
2017-05-22 09:00:08.625,0.1732465,1.0,0.1155253863671517,0.02370449999999999,1.25,2.25,0.053335124999999976
|
||||
2017-05-22 09:00:31.000,0.251408,3.25,0.043829239927442254,0.07047300000000004,2.5,4.375,0.3083193750000002
|
||||
2017-05-22 09:00:58.000,0.221937,4.0,0.01431226024740595,0.032033000000000006,,,
|
||||
2017-05-22 09:01:11.750,0.232188,2.5,0.02680488117904911,0.02306400000000003,3.125,4.625,0.10667100000000013
|
||||
2017-05-22 09:02:07.250,0.21424899999999997,1.875,0.021082079263371023,0.02242299999999997,0.875,2.25,0.050451749999999934
|
||||
2017-05-22 09:02:16.750,0.230906,2.375,0.03262723948795654,0.02882949999999998,0.75,2.5,0.07207374999999995
|
||||
2017-05-22 09:02:56.125,0.240516,2.125,0.03661649007111234,0.033954999999999985,1.125,2.75,0.09337624999999997
|
||||
2017-05-22 09:04:57.125,0.3609615,4.0,0.05488506385313352,0.1845115,,,
|
||||
2017-05-22 09:05:38.625,0.26998700000000003,3.125,0.009294932942037093,0.03651800000000002,3.875,6.375,0.23280225000000015
|
||||
2017-05-22 09:06:09.125,0.29497300000000004,3.375,0.01571755350604942,0.06534800000000004,,,
|
||||
2017-05-22 09:08:43.625,0.33021,1.125,0.30492121093270885,0.07239600000000002,0.125,1.125,0.08144550000000002
|
||||
2017-05-22 09:08:52.375,0.405808,1.25,0.06686000727952646,0.028829999999999967,1.0,2.125,0.06126374999999993
|
||||
2017-05-22 09:12:10.125,0.20399849999999997,4.0,0.02904607966296413,0.04036149999999997,0.25,3.25,0.13117487499999989
|
||||
2017-05-22 09:12:16.625,0.25909550000000003,1.5,0.05538791361833395,0.028189500000000034,1.75,3.0,0.0845685000000001
|
||||
2017-05-22 09:22:03.375,0.167481,1.875,0.04229639383065775,0.030111999999999972,,,
|
||||
2017-05-22 09:23:03.000,0.39721300000000004,1.0,0.09950268516068572,0.023064000000000084,0.25,1.25,0.028830000000000106
|
||||
2017-05-22 09:23:56.375,0.42438750000000003,2.25,0.05731338537437081,0.07559850000000001,,,
|
||||
2017-05-22 09:24:08.875,0.439123,2.875,0.05135528153867819,0.059582000000000024,3.375,5.25,0.3128055000000001
|
||||
2017-05-22 09:26:05.625,0.7614465,1.375,1.1177070991499694,0.44469050000000004,2.125,3.25,1.445244125
|
||||
2017-05-22 09:26:27.125,0.473078,1.875,0.03726277291054325,0.023063999999999973,2.0,3.125,0.07207499999999992
|
|
|
@ -1,5 +0,0 @@
|
|||
*.pyc
|
||||
.idea
|
||||
env-python2
|
||||
env-python3
|
||||
tests
|
|
@ -1,309 +0,0 @@
|
|||
import numpy as np
|
||||
import pandas as pd
|
||||
import scipy.signal as scisig
|
||||
import os
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
from load_files import getInputLoadFile, getOutputPath, get_user_input
|
||||
|
||||
DEBUG = True
|
||||
|
||||
SAMPLING_RATE = 8
|
||||
|
||||
ONE_MINUTE_S = 60
|
||||
THIRTY_MIN_S = ONE_MINUTE_S*30
|
||||
SECONDS_IN_DAY = 24*60*60
|
||||
|
||||
STILLNESS_MOTION_THRESHOLD = .1
|
||||
PERCENT_STILLNESS_THRESHOLD = .95
|
||||
|
||||
STEP_DIFFERENCE_THRESHOLD = 0.3
|
||||
|
||||
|
||||
|
||||
def computeAllAccelerometerFeatures(data, time_frames):
|
||||
if DEBUG: print("\t\tcomputing motion...")
|
||||
motion = computeMotion(data['AccelX'], data['AccelY'], data['AccelZ'])
|
||||
|
||||
if DEBUG: print("\t\tcomputing steps...")
|
||||
steps = computeSteps(motion)
|
||||
|
||||
if DEBUG: print("\t\tcomputing stillness...")
|
||||
stillness = computeStillness(motion)
|
||||
|
||||
features = []
|
||||
|
||||
for time_frame in time_frames:
|
||||
start = time_frame[0]
|
||||
end = time_frame[1]
|
||||
start1Hz = int(start / SAMPLING_RATE)
|
||||
end1Hz = end if end == -1 else int(end / SAMPLING_RATE)
|
||||
if DEBUG: print("\t\tcomputing features for time frame. Start index: "+ str(start)+ " end index: "+ str(end))
|
||||
|
||||
time_frame_feats = computeAccelerometerFeaturesOverOneTimeFrame(motion[start:end],
|
||||
steps[start:end],
|
||||
stillness[start1Hz:end1Hz])
|
||||
features.append(time_frame_feats)
|
||||
|
||||
return features, steps, motion
|
||||
|
||||
def computeMotion(acc1, acc2, acc3):
|
||||
'''Aggregates 3-axis accelerometer signal into a single motion signal'''
|
||||
return np.sqrt(np.array(acc1)**2 + np.array(acc2)**2 + np.array(acc3)**2)
|
||||
|
||||
def computeSteps(motion):
|
||||
'''Determines the location of steps from the aggregated accelerometer signal.
|
||||
Signal is low-pass filtered, then minimums are located in the signal. For each
|
||||
min, if the max absolute derivative (first difference) immediately surrounding
|
||||
it is greater than a threshold, it is counted as a step.
|
||||
|
||||
Args:
|
||||
motion: root mean squared 3 axis acceleration
|
||||
Returns:
|
||||
steps: binary array at 8Hz which is 1 everywhere there is a step'''
|
||||
|
||||
filtered_signal = filterSignalFIR(motion, 2, 256)
|
||||
diff = filtered_signal[1:]-filtered_signal[:-1]
|
||||
|
||||
mins = scisig.argrelextrema(filtered_signal, np.less)[0]
|
||||
|
||||
steps = [0] * len(filtered_signal)
|
||||
for m in mins:
|
||||
if m <= 4 or m >= len(diff) - 4:
|
||||
continue
|
||||
if max(abs(diff[m-4:m+4])) > STEP_DIFFERENCE_THRESHOLD:
|
||||
steps[m] = 1.0
|
||||
|
||||
return steps
|
||||
|
||||
def filterSignalFIR(eda, cutoff=0.4, numtaps=64):
|
||||
f = cutoff/(SAMPLING_RATE/2.0)
|
||||
FIR_coeff = scisig.firwin(numtaps,f)
|
||||
|
||||
return scisig.lfilter(FIR_coeff,1,eda)
|
||||
|
||||
def computeStillness(motion):
|
||||
'''Locates periods in which the person is still or motionless.
|
||||
Total acceleration must be less than a threshold for 95 percent of one
|
||||
minute in order for that minute to count as still
|
||||
|
||||
Args:
|
||||
motion: an array containing the root mean squared acceleration
|
||||
Returns:
|
||||
A 1Hz array that is 1 for each second belonging to a still period, 0 otherwise
|
||||
'''
|
||||
diff = motion[1:]-motion[:-1]
|
||||
momentary_stillness = diff < STILLNESS_MOTION_THRESHOLD
|
||||
np.append(momentary_stillness,0) # to ensure list is the same size as the full day signal
|
||||
num_minutes_in_day = 24*60
|
||||
|
||||
#create array indicating whether person was still or not for each second of the day
|
||||
#to be still the momentary_stillness signal must be true for more than 95% of the minute
|
||||
#containing that second
|
||||
second_stillness = [0]*SECONDS_IN_DAY
|
||||
|
||||
for i in range(num_minutes_in_day):
|
||||
hours_start = int(i / 60)
|
||||
mins_start = i % 60
|
||||
hours_end = int((i+1) / 60)
|
||||
mins_end = (i+1) % 60
|
||||
|
||||
start_idx = getIndexFromTimestamp(hours_start, mins_start)
|
||||
end_idx = getIndexFromTimestamp(hours_end, mins_end)
|
||||
|
||||
this_minute = momentary_stillness[start_idx:end_idx]
|
||||
minute_stillness = sum(this_minute) > PERCENT_STILLNESS_THRESHOLD*(60*SAMPLING_RATE)
|
||||
|
||||
second_idx = int(start_idx/8)
|
||||
for si in range(second_idx,second_idx+60):
|
||||
second_stillness[si] = float(minute_stillness)
|
||||
|
||||
return second_stillness
|
||||
|
||||
def computeAccelerometerFeaturesOverOneTimeFrame(motion, steps, stillness):
|
||||
''' Computes all available features for a time period. Incoming signals are assumed to be from
|
||||
only that time period.
|
||||
|
||||
Args:
|
||||
motion: 8Hz root mean squared 3 axis acceleration
|
||||
steps: 8Hz binary signal that is 1 if there is a step
|
||||
stillness: 1Hz 1 if the person was still during this second, 0 otherwise
|
||||
Returns:
|
||||
A list of features containing (in order):
|
||||
-Step count number of steps detected
|
||||
-mean step time during movement average number of samples between two steps (aggregated first to 1 minute,
|
||||
then we take the mean of only the parts of this signal occuring during movement)
|
||||
-percent stillness percentage of time the person spent nearly motionless
|
||||
'''
|
||||
|
||||
features = []
|
||||
|
||||
features.extend(computeStepFeatures(steps,stillness))
|
||||
features.append(countStillness(stillness))
|
||||
|
||||
return features
|
||||
|
||||
def computeStepFeatures(steps,stillness):
|
||||
'''Counts the total number of steps over a given period,
|
||||
as well as the average time between steps (meant to approximate walking speed)
|
||||
|
||||
Args:
|
||||
steps: an binary array at 8 Hz that is 1 every time there is a step
|
||||
Returns:
|
||||
sum: the number of steps in a period
|
||||
median time: average number of samples between two steps'''
|
||||
|
||||
sum_steps = float(sum(steps))
|
||||
|
||||
step_indices = np.nonzero(steps)[0]
|
||||
diff = step_indices[1:]-step_indices[:-1]
|
||||
|
||||
#ensure length of step difference array is the same so we can get the actual locations of step differences
|
||||
timed_step_diff = np.empty(len(steps)) * np.nan
|
||||
timed_step_diff[step_indices[:len(diff)]] = diff
|
||||
|
||||
signal_length_1s = len(stillness)
|
||||
signal_length_1min = int(signal_length_1s / 60)
|
||||
|
||||
# if there aren't enough steps during this period, cannot accurately compute mean step diff
|
||||
if len(timed_step_diff) < signal_length_1min:
|
||||
return [sum_steps, np.nan]
|
||||
|
||||
agg_stillness = aggregateSignal(stillness, signal_length_1min, 'max')
|
||||
agg_step_diff = aggregateSignal(timed_step_diff, signal_length_1min, 'mean')
|
||||
|
||||
movement_indices = [i for i in range(len(agg_stillness)) if agg_stillness[i] == 0.0]
|
||||
step_diff_during_movement = agg_step_diff[movement_indices]
|
||||
|
||||
return [sum_steps,round(np.nanmean(step_diff_during_movement),10)]
|
||||
|
||||
def countStillness(stillness):
|
||||
'''Counts the total percentage of time spent still over a period
|
||||
|
||||
Args:
|
||||
stillness: an binary array at 1Hz that is 1 if that second is part of a still period
|
||||
Returns:
|
||||
the percentage time spent still over a period'''
|
||||
|
||||
return float(sum(stillness)) / float(len(stillness))
|
||||
|
||||
def aggregateSignal(signal, new_signal_length, agg_method='sum'):
|
||||
new_signal = np.zeros(new_signal_length)
|
||||
samples_per_bucket = int(len(signal) / new_signal_length)
|
||||
|
||||
#the new signal length must be large enough that there is at least 1 sample per bucket
|
||||
assert(samples_per_bucket > 0)
|
||||
|
||||
for i in range(new_signal_length):
|
||||
if agg_method == 'sum':
|
||||
new_signal[i] = np.nansum(signal[i*samples_per_bucket:(i+1)*samples_per_bucket])
|
||||
elif agg_method == 'percent':
|
||||
new_signal[i] = np.nansum(signal[i*samples_per_bucket:(i+1)*samples_per_bucket]) / samples_per_bucket
|
||||
elif agg_method == 'mean':
|
||||
new_signal[i] = np.nanmean(signal[i*samples_per_bucket:(i+1)*samples_per_bucket])
|
||||
elif agg_method == 'max':
|
||||
new_signal[i] = np.nanmax(signal[i*samples_per_bucket:(i+1)*samples_per_bucket])
|
||||
return new_signal
|
||||
|
||||
def getIndexFromTimestamp(hours, mins=0):
|
||||
return ((hours * 60) + mins) * 60 * SAMPLING_RATE
|
||||
|
||||
def inputTimeFrames():
|
||||
'''Allows user to choose the time frames over which they compute accelerometer features.'''
|
||||
|
||||
time_frames = []
|
||||
print("Accelerometer features can be extracted over different time periods.")
|
||||
cont = get_user_input("If you would like to enter a time period over which to compute features, enter 'y', or press enter to compute features over the entire file.")
|
||||
while cont == 'y' or cont == 'Y':
|
||||
start = int(get_user_input("Enter the starting hour of the time period (hour 0 is when the file starts):"))
|
||||
end = int(get_user_input("Enter the ending hour of the time period (hour 0 is when the file starts; use -1 for the end of the file):"))
|
||||
start = getIndexFromTimestamp(int(start))
|
||||
if end != -1:
|
||||
end = getIndexFromTimestamp(int(end))
|
||||
time_frames.append([start,end])
|
||||
print("Great! Now computing features for the following time periods:"+ str(time_frames))
|
||||
cont = get_user_input("To add another time period, enter 'y'. To finish, press enter.")
|
||||
|
||||
if len(time_frames) == 0:
|
||||
time_frames = [[0,-1]] # the whole file
|
||||
|
||||
return time_frames
|
||||
|
||||
def saveFeaturesToFile(features, time_frames, output_file):
|
||||
of = open(output_file, 'w')
|
||||
of.write("Time period start hour, Time period end hour, Step count, Mean step time during movement, Percent stillness\n")
|
||||
tf_i = 0
|
||||
for tf in time_frames:
|
||||
output_str = str(tf[0]) + ' , ' + str(tf[1])
|
||||
for feat in features[tf_i]:
|
||||
output_str += ' , ' + str(feat)
|
||||
tf_i += 1
|
||||
of.write(output_str + '\n')
|
||||
of.close()
|
||||
print("Saved features to file"+ output_file)
|
||||
|
||||
# draws a graph of the data with the peaks marked on it
|
||||
# assumes that 'data' dataframe already contains the 'peaks' column
|
||||
def plotSteps(data, x_seconds, sampleRate = SAMPLING_RATE):
|
||||
if x_seconds:
|
||||
time_m = np.arange(0,len(data))/float(sampleRate)
|
||||
realign = 128/(sampleRate)
|
||||
else:
|
||||
time_m = np.arange(0,len(data))/(sampleRate*60.)
|
||||
realign = 128/(sampleRate*60.)
|
||||
|
||||
data_min = data['motion'].min()
|
||||
data_max = data['motion'].max()
|
||||
|
||||
#Plot the data with the Peaks marked
|
||||
plt.figure(1,figsize=(20, 5))
|
||||
|
||||
plt.plot(time_m,data['motion'])
|
||||
|
||||
for i in range(len(data)):
|
||||
if data.iloc[i]["steps"]==1:
|
||||
x_loc = time_m[i] - realign
|
||||
plt.plot([x_loc,x_loc],[data_min,data_max],"k")
|
||||
step_height = data_max * 1.15
|
||||
#data['steps_plot'] = data['steps'] * step_height
|
||||
#plt.plot(time_m,data['steps_plot'],'k')
|
||||
|
||||
plt.xlim([0,time_m[-1]])
|
||||
plt.ylim([data_min-.1,data_max+.1])
|
||||
plt.title('Motion with Detected "Steps" marked')
|
||||
plt.ylabel('g')
|
||||
if x_seconds:
|
||||
plt.xlabel('Time (s)')
|
||||
else:
|
||||
plt.xlabel('Time (min)')
|
||||
|
||||
plt.show()
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("This script will extract features related to accelerometer data.")
|
||||
|
||||
data, filepath_confirm = getInputLoadFile()
|
||||
|
||||
output_path = getOutputPath()
|
||||
|
||||
time_frames = inputTimeFrames()
|
||||
|
||||
features, steps, motion = computeAllAccelerometerFeatures(data, time_frames)
|
||||
|
||||
data["steps"] = steps
|
||||
data["motion"] = motion
|
||||
|
||||
saveFeaturesToFile(features, time_frames, output_path)
|
||||
|
||||
print("")
|
||||
plot_ans = get_user_input("Do you want to plot the detected steps? (y/n): ")
|
||||
if 'y' in plot_ans:
|
||||
secs_ans = get_user_input("Would you like the x-axis to be in seconds or minutes? (sec/min): ")
|
||||
if 'sec' in secs_ans:
|
||||
x_seconds=True
|
||||
else:
|
||||
x_seconds=False
|
||||
plotSteps(data, x_seconds)
|
||||
else:
|
||||
print("\tOkay, script will not produce a plot")
|
||||
|
|
@ -1,601 +0,0 @@
|
|||
import numpy as np
|
||||
from sklearn.metrics.pairwise import rbf_kernel
|
||||
|
||||
|
||||
def predict_binary_classifier(X):
|
||||
''''
|
||||
X: num test data by 13 features
|
||||
'''
|
||||
|
||||
# Get params
|
||||
params = binary_classifier()
|
||||
|
||||
# compute kernel for all data points
|
||||
K = rbf_kernel(params['support_vec'], X, gamma=params['gamma'])
|
||||
|
||||
# Prediction = sign((sum_{i=1}^n y_i*alpha*K(x_i,x)) + rho)
|
||||
predictions = np.zeros(X.shape[0])
|
||||
for i in range(X.shape[0]):
|
||||
predictions[i] = np.sign(np.sum(params['dual_coef']*K[:, i]) + params['intercept'])
|
||||
|
||||
return predictions
|
||||
|
||||
|
||||
def predict_multiclass_classifier(X):
|
||||
'''
|
||||
X: num test data by 10 features
|
||||
'''
|
||||
# Get params
|
||||
params = multiclass_classifier()
|
||||
|
||||
K = rbf_kernel(params['support_vec'], X, gamma=params['gamma'])
|
||||
|
||||
# define the start and end index for support vectors for each class
|
||||
nv = params['num_support_vec']
|
||||
start = [sum(nv[:i]) for i in range(len(nv))]
|
||||
end = [start[i] + nv[i] for i in range(len(nv))]
|
||||
|
||||
# calculate: sum(a_p * k(x_p, x)) between every 2 classes
|
||||
dual_coef = params['dual_coef'].T
|
||||
predictions_0_1 = np.zeros(X.shape[0])
|
||||
for i in range(X.shape[0]):
|
||||
temp_prediction = np.sum(dual_coef[start[0]:end[0], 0] * K[start[0]:end[0], i]) + \
|
||||
np.sum(dual_coef[start[1]:end[1], 0] * K[start[1]:end[1], i]) + params['intercept'][0]
|
||||
predictions_0_1[i] = 0 if temp_prediction > 0 else 1
|
||||
|
||||
predictions_0_2 = np.zeros(X.shape[0])
|
||||
for i in range(X.shape[0]):
|
||||
temp_prediction = np.sum(dual_coef[start[0]:end[0], 1] * K[start[0]:end[0], i]) + \
|
||||
np.sum(dual_coef[start[2]:end[2], 0] * K[start[2]:end[2], i]) + params['intercept'][1]
|
||||
predictions_0_2[i] = 0 if temp_prediction > 0 else 2
|
||||
|
||||
predictions_1_2 = np.zeros(X.shape[0])
|
||||
for i in range(X.shape[0]):
|
||||
temp_prediction = np.sum(dual_coef[start[1]:end[1], 1] * K[start[1]:end[1], i]) + \
|
||||
np.sum(dual_coef[start[2]:end[2], 1] * K[start[2]:end[2], i]) + params['intercept'][2]
|
||||
predictions_1_2[i] = 1 if temp_prediction > 0 else 2
|
||||
|
||||
decision_function = np.vstack([predictions_0_1, predictions_0_2, predictions_1_2]).T
|
||||
|
||||
# Majority Vote to find the best class
|
||||
predictions = np.zeros(X.shape[0])
|
||||
for i in range(X.shape[0]):
|
||||
lst = decision_function[i,:].tolist()
|
||||
predictions[i] = max(set(lst), key=lst.count)-1
|
||||
|
||||
return predictions
|
||||
|
||||
|
||||
|
||||
def binary_classifier():
|
||||
gamma = 0.1
|
||||
|
||||
# dual coef = y_i*alpha_i
|
||||
dual_coef = np.array([[-1.12775599e+02, -1.00000000e+03, -1.00000000e+03,
|
||||
-1.00000000e+03, -1.00000000e+03, -1.00000000e+03,
|
||||
-1.00000000e+03, -1.00000000e+03, -1.00000000e+03,
|
||||
-1.00000000e+03, -1.00000000e+03, -1.00000000e+03,
|
||||
-4.65947457e+02, -1.00000000e+03, -1.00000000e+03,
|
||||
-1.00000000e+03, -1.17935400e+02, -1.00000000e+03,
|
||||
-1.00000000e+03, -1.00000000e+03, -1.00000000e+03,
|
||||
-1.00000000e+03, -1.00000000e+03, -1.00000000e+03,
|
||||
-1.00000000e+03, -2.92534132e+02, -1.00000000e+03,
|
||||
-1.00000000e+03, -3.69965631e+01, -1.00000000e+03,
|
||||
-1.00000000e+03, -1.00000000e+03, -1.00000000e+03,
|
||||
-1.00000000e+03, -1.00000000e+03, -1.00000000e+03,
|
||||
-1.00000000e+03, -1.00000000e+03, 1.00000000e+03,
|
||||
1.00000000e+03, 1.00000000e+03, 1.00000000e+03,
|
||||
7.92366387e+02, 3.00553142e+02, 2.22950860e-01,
|
||||
1.00000000e+03, 1.00000000e+03, 5.58636056e+02,
|
||||
1.21751544e+02, 1.00000000e+03, 1.00000000e+03,
|
||||
2.61920652e+00, 9.96570403e+02, 1.00000000e+03,
|
||||
1.00000000e+03, 1.00000000e+03, 1.00000000e+03,
|
||||
1.00000000e+03, 1.00000000e+03, 1.02270060e+02,
|
||||
5.41288840e+01, 1.91650287e+02, 1.00000000e+03,
|
||||
1.00000000e+03, 1.00000000e+03, 1.00000000e+03,
|
||||
1.00000000e+03, 2.45152637e+02, 7.53766346e+02,
|
||||
1.00000000e+03, 1.00000000e+03, 3.63211198e+00,
|
||||
1.00000000e+03, 3.31675798e+01, 5.64620367e+02,
|
||||
1.00000000e+03, 1.00000000e+03, 1.00000000e+03,
|
||||
2.66900636e+02, 1.00000000e+03, 6.54763900e+02,
|
||||
3.38216549e+02, 6.86434772e+01, 2.78998678e+02,
|
||||
6.97557950e+02, 1.00000000e+03]])
|
||||
|
||||
# intercept = rho
|
||||
intercept = np.array([-2.63232929])
|
||||
|
||||
# support vectors = x_i
|
||||
support_vec = np.array([[0.02809756, 0.0455, 0.025, 0.00866667, 0.03799132, -0.00799413, 0.01061208, 0.016263, 0.00671743, 0.00572262, 0.00578504, 0.00542415, 0.00318195],
|
||||
[0.00060976, 0.0035, 0.007, 0.00087179, 0.00024191, -0.0005069, 0.0005069, 0.0070711, 0.00306413, 0.0031833, 0.0107827, 0.0066959, 0.0022981],
|
||||
[3.49731707, 0.092, 0.054, 0.01923077, 3.53815367, -0.02236652, 0.02659884, 0.062225, 0.0316782, 0.01818914, 0.06607571, 0.03342241, 0.099702],
|
||||
[2.52643902, 0.058, 0.055, 0.0114359, 2.54031008, -0.01070662, 0.01296803, 0.043134, 0.01649923, 0.01579683, 0.03326171, 0.05004163, 0.013965],
|
||||
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, -2.74622599e-18, -2.42947453e-17, 3.36047450e-17, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
|
||||
[3.89758537, 0.167, 0.27, 0.06717949, 3.87923565, -0.04130143, 0.05403825, 0.047376, 0.0328098, 0.01255584, 0.03676955, 0.14237773, 0.11031],
|
||||
[0.93326829, 0.0855, 0.106, 0.01169231, 0.92669874, -0.02740927, 0.02740927, 0.043841, 0.01131377, 0.01595008, 0.0231871, 0.02414775, 0.0139655],
|
||||
[4.64253659, 0.106, 0.13, 0.03661538, 4.63806066, -0.03168223, 0.03168223, 0.10182, 0.0559785, 0.03369301, 0.06341563, 0.08583294, 0.0251025],
|
||||
[0.29312195, 0.028, 0.039, 0.00682051, 0.28575076, -0.00648365, 0.00648365, 0.0056569, 0.00367694, 0.00126494, 0.00364005, 0.01814984, 0.006364],
|
||||
[3.08187805, 0.0615, 0.123, 0.03435897, 3.11862292, -0.02260403, 0.02260403, 0.053033, 0.0397394, 0.01570345, 0.0338851, 0.10069204, 0.16652],
|
||||
[2.43902439e-05, 5.00000000e-04, 1.00000000e-03, 1.02564103e-04, 2.43769719e-05, -7.19856842e-05, 7.19856842e-05, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
|
||||
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, -4.05052739e-10, -2.77557303e-09, 5.77955577e-09, 7.07110000e-04, 1.17851667e-04, 2.88676449e-04, 2.04124145e-04, 1.44336183e-04, 0.00000000e+00],
|
||||
[0.83290244, 0.099, 0.172, 0.02610256, 0.82408369, -0.0168393, 0.0168393, 0.13011, 0.02875613, 0.04987211, 0.03786379, 0.02684837, 0.0155565],
|
||||
[0.92597561, 0.017, 0.009, 0.00369231, 0.92583814, -0.00670974, 0.00670974, 0.012021, 0.00506763, 0.00420523, 0.01259266, 0.0115391, 0.00265165],
|
||||
[2.43902439e-05, 5.00000000e-04, 1.00000000e-03, 2.56410256e-05, 2.18000765e-04, -5.56411248e-04, 5.56411248e-04, 9.19240000e-03, 2.71058333e-03, 4.25246049e-03, 2.49833278e-03, 7.64311464e-03, 0.00000000e+00],
|
||||
[0.88760976, 0.0205, 0.022, 0.00489744, 0.88799505, -0.00346772, 0.00461828, 0.011314, 0.00447838, 0.00394135, 0.01327278, 0.01434142, 0.00406585],
|
||||
[9.21263415, 0.118, 0.472, 0.0695641, 9.19153391, -0.02181738, 0.02181738, 0.16688, 0.07130037, 0.06135461, 0.04328934, 0.04277416, 0.0829085],
|
||||
[0.48378049, 0.017, 0.026, 0.00794872, 0.48333175, -0.00337375, 0.00350864, 0.016971, 0.0089568, 0.00472601, 0.01168189, 0.01629524, 0.0226275],
|
||||
[0.00000000e+000, 0.00000000e+000, 0.00000000e+000, 0.00000000e+000, 9.65026603e-122, -2.00921455e-120, 4.22507597e-120, 0.00000000e+000, 0.00000000e+000, 0.00000000e+000, 0.00000000e+000, 0.00000000e+000, 0.00000000e+000],
|
||||
[0.10897561, 0.03, 0.033, 0.00553846, 0.12761266, -0.00442938, 0.00556735, 0.025456, 0.00872107, 0.00870258, 0.01130487, 0.01554551, 0.0123745],
|
||||
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, -1.38812548e-09, -2.34438020e-08, 2.34438020e-08, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
|
||||
[0.66663415, 0.052, 0.05, 0.00510256, 0.66182973, -0.01361869, 0.01361869, 0.0049497, 0.00296982, 0.00208565, 0.00424264, 0.00961131, 0.012374],
|
||||
[3.74146341e+00, 6.60000000e-02, 7.00000000e-02, 2.41025641e-02, 3.72790310e+00, -1.65194036e-02, 1.65194036e-02, 2.33350000e-02, 2.29102000e-02, 3.87787571e-04, 7.25086202e-03, 8.04828002e-03, 2.26270000e-02],
|
||||
[2.43902439e-05, 5.00000000e-04, 1.00000000e-03, 1.02564103e-04, 2.44149661e-05, -7.19856850e-05, 7.19856850e-05, 7.07110000e-04, 1.17851667e-04, 2.88676449e-04, 2.04124145e-04, 1.44336183e-04, 0.00000000e+00],
|
||||
[1.14713659e+01, 1.68000000e-01, 3.24000000e-01, 8.83589744e-02, 1.13977278e+01, -4.35202063e-02, 4.35202063e-02, 1.20920000e-01, 1.15826000e-01, 5.32593935e-03, 4.29825546e-02, 1.11681949e-01, 1.82080000e-01],
|
||||
[1.63631707, 0.0825, 0.138, 0.02410256, 1.65473267, -0.02914746, 0.02927458, 0.074953, 0.02899134, 0.03271076, 0.02718317, 0.09610564, 0.012728],
|
||||
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 6.01460518e-42, -2.71490067e-40, 2.71490067e-40, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
|
||||
[0.52358537, 0.038, 0.03, 0.00769231, 0.52319376, -0.01066405, 0.01066405, 0.026163, 0.01025307, 0.00912966, 0.02678697, 0.04011893, 0.00866185],
|
||||
[0.10931707, 0.103, 0.407, 0.04461538, 0.13188551, -0.01686662, 0.02506229, 0.1492, 0.0384195, 0.06327203, 0.06411448, 0.05508901, 0],
|
||||
[0.0444878, 0.0245, 0.04, 0.00984615, 0.03577326, -0.00573919, 0.00573919, 0.013435, 0.0078961, 0.00418135, 0.01136515, 0.01291603, 0.0134352],
|
||||
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.03127202e-08, -2.56175141e-07, 5.37317466e-07, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
|
||||
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
|
||||
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 3.27917545e-05, -7.79437718e-04, 7.79437718e-04, 3.04060000e-02, 5.06766667e-03, 1.24131975e-02, 1.34721936e-02, 5.34029589e-02, 0.00000000e+00],
|
||||
[2.43902439e-05, 5.00000000e-04, 1.00000000e-03, 1.02564103e-04, 2.60691650e-05, -7.19856850e-05, 7.19856850e-05, 7.07110000e-04, 1.17851667e-04, 2.88676449e-04, 2.04124145e-04, 1.44336183e-04, 0.00000000e+00],
|
||||
[0.46446341, 0.033, 0.03, 0.00933333, 0.46299034, -0.00866364, 0.00866364, 0.033941, 0.01357644, 0.01214903, 0.02164486, 0.02701617, 0.012374],
|
||||
[5.89978049, 0.117, 0.112, 0.04453846, 5.88525247, -0.02253416, 0.02253416, 0.084146, 0.0492146, 0.01985341, 0.06802812, 0.09041259, 0.045255],
|
||||
[0.01317073, 0.0195, 0.015, 0.00538462, 0.00829287, -0.00622806, 0.00622806, 0.026163, 0.01145514, 0.00926554, 0.00690652, 0.02540613, 0.018031],
|
||||
[1.16509756, 0.028, 0.02, 0.01051282, 1.16338281, -0.01379371, 0.01379371, 0.020506, 0.01461345, 0.00563317, 0.01416569, 0.01971055, 0.0281075],
|
||||
[3.67914634, 0.1235, 0.126, 0.02676923, 3.67052968, -0.04266586, 0.04266586, 0.041719, 0.0233342, 0.0106888, 0.03232337, 0.07260248, 0.050912],
|
||||
[0.11331707, 0.0015, 0.004, 0.0014359, 0.11329803, -0.00042144, 0.00042144, 0.0021213, 0.0014142, 0.00109543, 0.00124164, 0.00053231, 0.00070713],
|
||||
[1.11256098, 0.026, 0.016, 0.00561538, 1.09093248, -0.00174647, 0.00490015, 0.02192, 0.01272782, 0.00816993, 0.02111102, 0.04921207, 0.012021],
|
||||
[0.06846341, 0.007, 0.01, 0.00307692, 0.06774886, -0.00179795, 0.00190969, 0.0056569, 0.00311126, 0.00162791, 0.00195576, 0.00721732, 0.01096],
|
||||
[1.16454634e+01, 1.78500000e-01, 3.20000000e-01, 8.94615385e-02, 1.15869935e+01, -1.15451745e-02, 1.59897956e-02, 1.37890000e-01, 1.23393333e-01, 1.01170444e-02, 3.66151153e-02, 1.46607419e-01, 1.94455000e-01],
|
||||
[3.45158537, 0.1375, 0.052, 0.01676923, 3.44594643, -0.03141983, 0.03141983, 0.038184, 0.0272946, 0.00958649, 0.01698014, 0.06290749, 0.1393],
|
||||
[3.12563415, 0.0535, 0.111, 0.02897436, 3.17337638, -0.02835417, 0.02835417, 0.054447, 0.0278601, 0.0188188, 0.00755315, 0.03628251, 0.055154],
|
||||
[8.50975610e-02, 1.00000000e-03, 4.00000000e-03, 8.20512821e-04, 8.50491997e-02, -1.84870042e-04, 2.35933619e-04, 1.41420000e-03, 1.41420000e-03, 2.60312573e-11, 4.08248290e-04, 2.88668284e-04, 7.07110000e-04],
|
||||
[0.82373171, 0.048, 0.121, 0.01853846, 0.82149219, -0.0053288, 0.00684639, 0.041012, 0.0208598, 0.01423898, 0.02609294, 0.02676908, 0.01078335],
|
||||
[4.39680488, 0.223, 0.354, 0.09258974, 4.35973108, -0.03206468, 0.03450864, 0.20506, 0.0971572, 0.07235446, 0.13713059, 0.23019854, 0.32138],
|
||||
[5.66058537, 0.0285, 0.093, 0.01282051, 5.66682734, -0.00633008, 0.00633008, 0.040305, 0.01513214, 0.01889847, 0.01503912, 0.03383458, 0],
|
||||
[0.13329268, 0.011, 0.021, 0.00338462, 0.13419267, -0.00262455, 0.00262455, 0.0035355, 0.00226272, 0.00092195, 0.00772172, 0.00411547, 0.0038891],
|
||||
[0.15463415, 0.0325, 0.065, 0.01617949, 0.15422134, -0.00766504, 0.00766504, 0.067882, 0.02286322, 0.02270081, 0.02939288, 0.0224428, 0.017501],
|
||||
[1.47902439e-01, 1.50000000e-03, 2.00000000e-03, 3.84615385e-04, 1.48269290e-01, -1.36058722e-04, 1.36058722e-04, 2.12130000e-03, 8.24950000e-04, 9.39849132e-04, 5.16397779e-04, 5.91603500e-04, 0.00000000e+00],
|
||||
[2.76797561, 0.071, 0.17, 0.03212821, 2.84223399, -0.01692731, 0.01692731, 0.04879, 0.03441267, 0.00934515, 0.03221283, 0.05768286, 0.092806],
|
||||
[1.30939024, 0.044, 0.066, 0.0165641, 1.2967273, -0.01727205, 0.01727205, 0.03182, 0.01456652, 0.01056655, 0.00732632, 0.02987207, 0.038891],
|
||||
[0.0914878, 0.038, 0.028, 0.00364103, 0.08295897, -0.00877545, 0.00877545, 0.032527, 0.00648182, 0.01277828, 0.01289089, 0.01040763, 0.0042426],
|
||||
[0.13621951, 0.0015, 0.006, 0.00174359, 0.13689296, -0.00036169, 0.00040731, 0.0021213, 0.00153205, 0.00082663, 0.00058452, 0.00069522, 0.00088391],
|
||||
[0.05692683, 0.007, 0.006, 0.00189744, 0.05532006, -0.00145672, 0.00145672, 0.0056569, 0.00311126, 0.00184393, 0.00420714, 0.00465287, 0.0070711],
|
||||
[0.07460976, 0.002, 0.006, 0.00097436, 0.07430141, -0.00035004, 0.00038011, 0.0028284, 0.00113136, 0.0011832, 0.00070711, 0.0005916, 0.00070711],
|
||||
[0.04782927, 0.006, 0.011, 0.00353846, 0.04406202, -0.00232859, 0.00232859, 0.012021, 0.00438408, 0.00442728, 0.00363318, 0.00540593, 0.0091924],
|
||||
[4.443, 0.141, 0.076, 0.02310256, 4.40858239, -0.03710778, 0.03710778, 0.03182, 0.0271528, 0.00465324, 0.03506173, 0.07970664, 0.11278],
|
||||
[8.79678049, 0.057, 0.208, 0.04194872, 8.784878, -0.01132933, 0.01132933, 0.08061, 0.04695182, 0.039817, 0.0405623, 0.01937402, 0.033234],
|
||||
[2.58236585, 0.063, 0.128, 0.02112821, 2.5705713, -0.0079298, 0.01979542, 0.062225, 0.0309712, 0.02172778, 0.02949491, 0.02741888, 0.02687],
|
||||
[0.08992683, 0.0015, 0.006, 0.00030769, 0.09000535, -0.00020308, 0.00020308, 0.0021213, 0.00106065, 0.00116188, 0.0007746, 0.00086603, 0.00053035],
|
||||
[0.09085366, 0.0175, 0.037, 0.00694872, 0.09607742, -0.00456388, 0.00456388, 0.0098995, 0.00523258, 0.00310646, 0.01357571, 0.0133944, 0.0056569],
|
||||
[1.34473171, 0.0255, 0.022, 0.00953846, 1.37010789, -0.00558419, 0.00558419, 0.030406, 0.0134351, 0.00877511, 0.00929516, 0.03188089, 0.0265165],
|
||||
[0.14253659, 0.001, 0.004, 0.00097436, 0.14237889, -0.0002998, 0.0002998, 0.0014142, 0.0011785, 0.00057734, 0.0005164, 0.00069521, 0.00106066],
|
||||
[0.07617073, 0.001, 0.004, 0.00179487, 0.07597272, -0.00025949, 0.00025949, 0.0014142, 0.0011785, 0.00057734, 0.0005164, 0.00063245, 0.00070711],
|
||||
[0.28502439, 0.0025, 0.01, 0.00241026, 0.28596915, -0.000355, 0.000355, 0.12869, 0.02333393, 0.05162999, 0.0313152, 0.13233722, 0.0044194],
|
||||
[5.97658537, 0.0645, 0.106, 0.02925641, 5.95365623, -0.01454886, 0.01454886, 0.045962, 0.02913296, 0.02145587, 0.04602717, 0.06410626, 0.053033],
|
||||
[4.19787805, 0.0405, 0.072, 0.02764103, 4.21230508, -0.01456906, 0.01468492, 0.030406, 0.02206174, 0.01003006, 0.02031748, 0.03873656, 0.034295],
|
||||
[0.06904878, 0.0025, 0.005, 0.00117949, 0.06819891, -0.00023428, 0.00033805, 0.0035355, 0.00098994, 0.00154918, 0.001, 0.0007071, 0.00070711],
|
||||
[2.07410488e+01, 1.10000000e-02, 4.40000000e-02, 1.24102564e-02, 2.07288498e+01, -5.11402880e-02, 5.11402880e-02, 1.55560000e-02, 1.55560000e-02, 0.00000000e+00, 5.68037557e-03, 3.17543685e-03, 7.77820000e-03],
|
||||
[0.15141463, 0.0025, 0.008, 0.00161538, 0.15286961, -0.00066236, 0.00066236, 0.0049497, 0.0021213, 0.00180276, 0.00235584, 0.01268589, 0.0021213],
|
||||
[1.07970732, 0.0275, 0.046, 0.00725641, 1.0819483, -0.0025949, 0.00261392, 0.026163, 0.00754248, 0.00945165, 0.01400506, 0.00566908, 0.011137],
|
||||
[1.45278049e+00, 2.50000000e-02, 3.40000000e-02, 8.23076923e-03, 1.46401853e+00, -5.22375992e-03, 7.56803574e-03, 8.48530000e-03, 6.71755000e-03, 1.39641061e-03, 4.14024959e-03, 1.47976972e-02, 2.03295000e-02],
|
||||
[1.18829268e-01, 1.00000000e-03, 4.00000000e-03, 1.17948718e-03, 1.18657803e-01, -3.33958979e-04, 3.55599268e-04, 1.41420000e-03, 1.41420000e-03, 2.60312573e-11, 6.32455532e-04, 5.32284214e-04, 7.07110000e-04],
|
||||
[0.09217073, 0.0085, 0.007, 0.00258974, 0.07952256, -0.00104703, 0.00138337, 0.006364, 0.00466692, 0.00203719, 0.00509166, 0.01307342, 0.021213],
|
||||
[0.06936585, 0.0095, 0.015, 0.00394872, 0.06837444, -0.00205373, 0.00205373, 0.0084853, 0.00296984, 0.0030984, 0.00234521, 0.00419839, 0.0017678],
|
||||
[5.05807317, 0.049, 0.082, 0.02402564, 5.06327737, -0.01120311, 0.01120311, 0.031113, 0.0239, 0.01338272, 0.01117139, 0.04351642, 0.020506],
|
||||
[0.26421951, 0.04, 0.068, 0.00902564, 0.2587529, -0.01040894, 0.01040894, 0.025456, 0.01060666, 0.00890233, 0.01111643, 0.04563416, 0.011314],
|
||||
[3.59336585, 0.0575, 0.054, 0.02094872, 3.58195886, -0.01804095, 0.01838506, 0.043134, 0.0336584, 0.01240579, 0.01683523, 0.04717173, 0.038184],
|
||||
[1.29187805, 0.026, 0.016, 0.00689744, 1.27916244, -0.00322078, 0.00490015, 0.025456, 0.01032378, 0.00861112, 0.01863263, 0.0636921, 0.038537],
|
||||
[6.28670732, 0.1245, 0.127, 0.03102564, 6.35501978, -0.01747513, 0.02813757, 0.084146, 0.04690465, 0.0254467, 0.06541464, 0.18275149, 0.15008],
|
||||
[10.64578049, 0.079, 0.284, 0.04564103, 10.64447668, -0.01946271, 0.01947497, 0.10889, 0.04186, 0.05739752, 0.06891299, 0.05417812, 0.050205],
|
||||
[3.32470732, 0.092, 0.046, 0.01687179, 3.32977984, -0.02794509, 0.02794509, 0.072125, 0.0288498, 0.02428699, 0.06277798, 0.10343739, 0.061518],
|
||||
[0.07358537, 0.001, 0.004, 0.00153846, 0.0735262, -0.00027514, 0.00027514, 0.0014142, 0.0009428, 0.00073029, 0.00075277, 0.00053228, 0.00070711]])
|
||||
|
||||
return {'dual_coef': dual_coef,
|
||||
'support_vec': support_vec,
|
||||
'intercept': intercept,
|
||||
'gamma': gamma}
|
||||
|
||||
|
||||
def multiclass_classifier():
|
||||
gamma = 0.1
|
||||
|
||||
# dual coef = y_i*alpha_i
|
||||
dual_coef = np.array([[1.00000000e+02, 0.00000000e+00, 0.00000000e+00, 1.00000000e+02, 2.19164051e-01,
|
||||
1.00000000e+02, 1.00000000e+02, 0.00000000e+00, 0.00000000e+00, 1.00000000e+02,
|
||||
2.73972798e+00, 1.00000000e+02, 0.00000000e+00, 1.00000000e+02, 1.00000000e+02,
|
||||
1.00000000e+02, 0.00000000e+00, 0.00000000e+00, 1.00000000e+02, 1.00000000e+02,
|
||||
1.00000000e+02, 0.00000000e+00, 1.00000000e+02, 1.00000000e+02, 5.78184818e+01,
|
||||
1.00000000e+02, 1.00000000e+02, 0.00000000e+00, 1.00000000e+02, 1.00000000e+02,
|
||||
1.00000000e+02, 0.00000000e+00, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02,
|
||||
0.00000000e+00, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 4.43824790e+01,
|
||||
0.00000000e+00, 0.00000000e+00, 8.90021137e+01, 1.00000000e+02, 3.38829336e+01,
|
||||
1.00000000e+02, 7.35308055e+01, 5.00832282e+01, 1.00000000e+02, 1.00000000e+02,
|
||||
1.00000000e+02, 9.04295253e+01, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
|
||||
1.00000000e+02, 1.00000000e+02, 7.37255035e+01, 1.00000000e+02, 0.00000000e+00,
|
||||
1.00000000e+02, -1.00000000e+02, -4.59726588e+01, -9.10060871e+01, -0.00000000e+00,
|
||||
-1.00000000e+02, -1.00000000e+02, -0.00000000e+00, -0.00000000e+00, -0.00000000e+00,
|
||||
-1.00000000e+02, -1.00000000e+02, -1.00000000e+02, -0.00000000e+00, -0.00000000e+00,
|
||||
-1.00000000e+02, -0.00000000e+00, -0.00000000e+00, -1.00000000e+02, -0.00000000e+00,
|
||||
-0.00000000e+00, -1.00000000e+02, -0.00000000e+00, -1.00000000e+02, -1.00000000e+02,
|
||||
-1.00000000e+02, -1.00000000e+02, -2.32473120e-01, -1.00000000e+02, -0.00000000e+00,
|
||||
-0.00000000e+00, -1.00000000e+02, -1.00000000e+02, -1.00000000e+02, -1.00000000e+02,
|
||||
-0.00000000e+00, -0.00000000e+00, -0.00000000e+00, -1.00000000e+02, -0.00000000e+00,
|
||||
-2.01478019e-01, -1.00000000e+02, -5.32795432e+01, -0.00000000e+00, -0.00000000e+00,
|
||||
-1.00000000e+02, -0.00000000e+00, -0.00000000e+00, -0.00000000e+00, -0.00000000e+00,
|
||||
-1.00000000e+02, -2.05233000e+01, -0.00000000e+00, -9.58435547e-02, -0.00000000e+00,
|
||||
-0.00000000e+00, -0.00000000e+00, -1.00000000e+02, -0.00000000e+00, -1.00000000e+02,
|
||||
-1.00000000e+02, -0.00000000e+00, -0.00000000e+00, -0.00000000e+00, -0.00000000e+00,
|
||||
-1.00000000e+02, -0.00000000e+00, -1.14900102e+01, -7.73085905e+01, -1.00000000e+02,
|
||||
-0.00000000e+00, -1.00000000e+02, -0.00000000e+00, -1.00000000e+02, -0.00000000e+00,
|
||||
-0.00000000e+00, -8.64770605e+01, -1.00000000e+02, -1.18090663e-01, -1.00000000e+02,
|
||||
-1.00000000e+02, -0.00000000e+00, -0.00000000e+00, -1.00000000e+02, -0.00000000e+00,
|
||||
-0.00000000e+00, -6.27523608e+01, -0.00000000e+00, -4.38003436e+01, -0.00000000e+00,
|
||||
-0.00000000e+00, -5.36807440e-02, -0.00000000e+00, -0.00000000e+00, -1.00000000e+02,
|
||||
-0.00000000e+00, -1.51862509e-01, -2.23505792e+01, -0.00000000e+00, -1.71549400e+00,
|
||||
-0.00000000e+00, -0.00000000e+00, -1.00000000e+02, -1.00000000e+02, -0.00000000e+00,
|
||||
-1.00000000e+02, -0.00000000e+00, -1.00000000e+02, -1.00000000e+02, -0.00000000e+00,
|
||||
-6.48908553e+01, -5.45079781e+01, -0.00000000e+00, -1.00000000e+02, -1.00000000e+02,
|
||||
-1.00000000e+02, -4.15526000e+01, -1.00000000e+02, -0.00000000e+00, -0.00000000e+00,
|
||||
-3.97322757e+01, -1.00000000e+02, -1.00000000e+02, -0.00000000e+00, -1.00000000e+02,
|
||||
-1.00000000e+02, -8.51452564e+01, -1.00000000e+02, -0.00000000e+00, -0.00000000e+00,
|
||||
-0.00000000e+00, -0.00000000e+00, -0.00000000e+00, -1.00000000e+02, -1.00000000e+02,
|
||||
-1.00000000e+02, -1.00000000e+02, -0.00000000e+00, -1.00000000e+02, -1.00000000e+02,
|
||||
-1.00000000e+02, -1.36707150e+01, -2.28944671e+00, -1.00000000e+02, -1.00000000e+02,
|
||||
-0.00000000e+00, -1.00000000e+02, -0.00000000e+00, -1.00000000e+02, -9.70237576e+01,
|
||||
-0.00000000e+00, -1.00000000e+02, -8.98901380e+00, -1.00000000e+02, -0.00000000e+00,
|
||||
-1.00000000e+02, -1.00000000e+02, -0.00000000e+00, -0.00000000e+00, -1.00000000e+02,
|
||||
-0.00000000e+00, -0.00000000e+00, -0.00000000e+00, -2.31872364e+00, -0.00000000e+00,
|
||||
-1.00000000e+02, -1.00000000e+02, -6.81207558e+01, -0.00000000e+00, -1.00000000e+02,
|
||||
-1.00000000e+02, -1.00000000e+02, -0.00000000e+00, -0.00000000e+00, -1.00000000e+02,
|
||||
-1.25804913e+01, -1.00000000e+02, -1.00000000e+02, -0.00000000e+00, -1.00000000e+02,
|
||||
-0.00000000e+00, -0.00000000e+00, -5.79636185e+01, -0.00000000e+00, -3.60349193e+01,
|
||||
-1.00000000e+02, -1.00000000e+02],
|
||||
[1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 0.00000000e+00,
|
||||
1.00000000e+02, 0.00000000e+00, 1.22133880e+01, 1.00000000e+02, 1.00000000e+02,
|
||||
1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02,
|
||||
1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 0.00000000e+00, 1.00000000e+02,
|
||||
0.00000000e+00, 1.00000000e+02, 1.00000000e+02, 5.45699567e+01, 0.00000000e+00,
|
||||
1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02,
|
||||
1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02,
|
||||
1.00000000e+02, 1.00000000e+02, 0.00000000e+00, 1.00000000e+02, 0.00000000e+00,
|
||||
1.00000000e+02, 1.00000000e+02, 5.30198194e+01, 1.00000000e+02, 0.00000000e+00,
|
||||
8.10028022e+01, 0.00000000e+00, 1.00000000e+02, 1.00000000e+02, 8.57299348e+01,
|
||||
0.00000000e+00, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02,
|
||||
1.00000000e+02, 0.00000000e+00, 0.00000000e+00, 1.00000000e+02, 1.00000000e+02,
|
||||
1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02,
|
||||
0.00000000e+00, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02,
|
||||
1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 6.98226850e+00, 1.00000000e+02,
|
||||
1.00000000e+02, 2.28942244e+00, 1.00000000e+02, 0.00000000e+00, 3.10951756e+00,
|
||||
1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 0.00000000e+00, 2.43965458e+01,
|
||||
5.54247795e+01, 4.89715327e+01, 0.00000000e+00, 1.00000000e+02, 1.00000000e+02,
|
||||
1.00000000e+02, 0.00000000e+00, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02,
|
||||
1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 8.77648862e-01,
|
||||
1.41352297e+00, 1.00000000e+02, 0.00000000e+00, 1.00000000e+02, 5.87399500e+01,
|
||||
1.00000000e+02, 7.89673831e+01, 7.17216921e-01, 7.08622898e+01, 1.00000000e+02,
|
||||
1.00000000e+02, 0.00000000e+00, 1.00000000e+02, 0.00000000e+00, 7.08652210e+01,
|
||||
1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 2.28740165e+00, 1.00000000e+02,
|
||||
1.00000000e+02, 1.00000000e+02, 6.26644343e+01, 1.51915932e+01, 9.33156003e+01,
|
||||
1.00000000e+02, 5.73480226e-01, 0.00000000e+00, 0.00000000e+00, 1.00000000e+02,
|
||||
6.51947143e+01, 0.00000000e+00, 1.00000000e+02, 3.61854680e+01, 1.50700439e+00,
|
||||
3.93114839e+01, 1.00000000e+02, 1.00000000e+02, 0.00000000e+00, 0.00000000e+00,
|
||||
1.00000000e+02, 1.62942145e+01, 1.00000000e+02, 1.00000000e+02, 3.65697187e+01,
|
||||
3.32328741e+01, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02, 1.00000000e+02,
|
||||
1.00000000e+02, 0.00000000e+00, 3.84017861e-02, 3.27497129e+00, 1.00000000e+02,
|
||||
1.00000000e+02, 0.00000000e+00, 1.00000000e+02, -1.00000000e+02, -1.00000000e+02,
|
||||
-1.00000000e+02, -1.00000000e+02, -1.00000000e+02, -0.00000000e+00, -7.87287696e+01,
|
||||
-1.00000000e+02, -2.17133274e+01, -1.00000000e+02, -0.00000000e+00, -1.00000000e+02,
|
||||
-1.00000000e+02, -1.00000000e+02, -1.00000000e+02, -0.00000000e+00, -1.00000000e+02,
|
||||
-1.00000000e+02, -1.00000000e+02, -1.00000000e+02, -1.00000000e+02, -1.00000000e+02,
|
||||
-1.00000000e+02, -4.03561653e+01, -1.00000000e+02, -1.00000000e+02, -0.00000000e+00,
|
||||
-1.00000000e+02, -0.00000000e+00, -0.00000000e+00, -8.73885349e+01, -1.00000000e+02,
|
||||
-1.00000000e+02, -1.00000000e+02, -1.00000000e+02, -1.00000000e+02, -1.00000000e+02,
|
||||
-0.00000000e+00, -0.00000000e+00, -1.00000000e+02, -1.00000000e+02, -0.00000000e+00,
|
||||
-1.00000000e+02, -1.00000000e+02, -1.00000000e+02, -1.00000000e+02, -1.00000000e+02,
|
||||
-1.00000000e+02, -0.00000000e+00, -1.00000000e+02, -1.00000000e+02, -0.00000000e+00,
|
||||
-1.00000000e+02, -1.00000000e+02, -1.00000000e+02, -0.00000000e+00, -1.00000000e+02,
|
||||
-0.00000000e+00, -1.00000000e+02, -1.00000000e+02, -7.69821289e+01, -0.00000000e+00,
|
||||
-1.00000000e+02, -8.28241499e+01, -1.00000000e+02, -6.27852100e+00, -8.74723914e+01,
|
||||
-0.00000000e+00, -0.00000000e+00, -1.00000000e+02, -1.00000000e+02, -1.00000000e+02,
|
||||
-0.00000000e+00, -0.00000000e+00, -1.00000000e+02, -1.00000000e+02, -0.00000000e+00,
|
||||
-1.00000000e+02, -1.00000000e+02, -1.00000000e+02, -2.15412985e+01, -0.00000000e+00,
|
||||
-2.97074994e+01, -9.62658735e+01, -1.00000000e+02, -1.00000000e+02, -1.00000000e+02,
|
||||
-1.00000000e+02, -0.00000000e+00]])
|
||||
|
||||
# intercept = rho
|
||||
intercept = np.array([-0.62674907, 1.31994877, 0.67252991])
|
||||
|
||||
# support vectors = x_i
|
||||
support_vec = np.array([[5.49570019e-07, -2.58632551e-07, 3.16229206e-02, 0.00000000e+00, 0.00000000e+00, 7.07110000e-02, 0.00000000e+00, 0.00000000e+00, 5.49570019e-07,-1.79132036e-08],
|
||||
[0.01061208, -0.00799413, 0.00572262, 0.025, 0.02809756,0.016263, 0.058, 0.00866667, 0.01061208, 0.03799132],
|
||||
[0.0005069, -0.0005069, 0.0031833, 0.007, 0.00060976,0.0070711, 0.014, 0.00087179, 0.00039282, 0.00024191],
|
||||
[0.02659884, -0.02236652, 0.01818914, 0.054, 3.49731707,0.062225, 0.063, 0.01923077, 0.02659884, 3.53815367],
|
||||
[0.1959552, -0.19377234, 0.49935644, 2.567, 0.212,1.2473, 4.086, 0.27730769, 0.1959552, 0.21128449],
|
||||
[0.01296803, -0.01070662, 0.01579683, 0.055, 2.52643902,0.043134, 0.057, 0.0114359, 0.01296803, 2.54031008],
|
||||
[0.04634941, -0.03616377, 0.03342396, 0.285, 1.25278049,0.11031, 0.285, 0.05482051, 0.04634941, 1.24439126],
|
||||
[0.01161685, -0.01161685, 0.01472225, 0.061, 0.00495122,0.036062, 0.131, 0.00758974, 0.00936955, 0.00494698],
|
||||
[3.36047450e-17, -2.42947453e-17, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 3.36047450e-17,-2.74622599e-18],
|
||||
[0.05403825, -0.04130143, 0.01255584, 0.27, 3.89758537,0.047376, 0.27, 0.06717949, 0.05403825, 3.87923565],
|
||||
[0.06322635, -0.0450853, 0.0069893, 0.107, 0.52617073,0.019799, 0.166, 0.01469231, 0.06322635, 0.51547654],
|
||||
[0.02740927, -0.02740927, 0.01595008, 0.106, 0.93326829,0.043841, 0.106, 0.01169231, 0.01663183, 0.92669874],
|
||||
[0.02181645, -0.02181645, 0.00031623, 0.045, 0.04685366,0.00070711, 0.045, 0.00605128, 0.01964941, 0.05939633],
|
||||
[0.03168223, -0.03168223, 0.03369301, 0.13, 4.64253659,0.10182, 0.13, 0.03661538, 0.02589546, 4.63806066],
|
||||
[0.00648365, -0.00648365, 0.00126494, 0.039, 0.29312195,0.0056569, 0.039, 0.00682051, 0.0043318, 0.28575076],
|
||||
[0.02260403, -0.02260403, 0.01570345, 0.123, 3.08187805,0.053033, 0.123, 0.03435897, 0.01139526, 3.11862292],
|
||||
[7.19856842e-05, -7.19856842e-05, 0.00000000e+00, 1.00000000e-03, 2.43902439e-05, 0.00000000e+00, 2.00000000e-03, 1.02564103e-04, 5.59639155e-05, 2.43769719e-05],
|
||||
[5.77955577e-09, -2.77557303e-09, 2.88676449e-04, 0.00000000e+00, 0.00000000e+00, 7.07110000e-04, 0.00000000e+00, 0.00000000e+00, 5.77955577e-09,-4.05052739e-10],
|
||||
[0.0168393, -0.0168393, 0.04987211, 0.172, 0.83290244,0.13011, 0.287, 0.02610256, 0.01063438, 0.82408369],
|
||||
[0.00670974, -0.00670974, 0.00420523, 0.009, 0.92597561,0.012021, 0.012, 0.00369231, 0.00394705, 0.92583814],
|
||||
[0.04881422, -0.04881422, 0.06039519, 0.128, 0.84173171,0.15274, 0.166, 0.0145641, 0.02705203, 0.83114509],
|
||||
[5.56411248e-04, -5.56411248e-04, 4.25246049e-03, 1.00000000e-03, 2.43902439e-05, 9.19240000e-03, 1.00000000e-03, 2.56410256e-05, 4.16114259e-04, 2.18000765e-04],
|
||||
[0.00461828, -0.00346772, 0.00394135, 0.022, 0.88760976,0.011314, 0.022, 0.00489744, 0.00461828, 0.88799505],
|
||||
[0.02181738, -0.02181738, 0.06135461, 0.472, 9.21263415,0.16688, 0.472, 0.0695641, 0.01361679, 9.19153391],
|
||||
[0.18064104, -0.18064104, 0.02243327, 0.141, 1.74753659,0.065761, 0.141, 0.02587179, 0.08614869, 1.89288442],
|
||||
[0.04502684, -0.04502684, 0.03092595, 0.075, 0.66726829,0.070004, 0.075, 0.01507692, 0.02703687, 0.6524218 ],
|
||||
[0.00350864, -0.00337375, 0.00472601, 0.026, 0.48378049,0.016971, 0.038, 0.00794872, 0.00350864, 0.48333175],
|
||||
[4.22507597e-120, -2.00921455e-120, 0.00000000e+000, 0.00000000e+000, 0.00000000e+000, 0.00000000e+000, 0.00000000e+000, 0.00000000e+000, 4.22507597e-120, 9.65026603e-122],
|
||||
[0.0511954, -0.0511954, 0.00917865, 0.032, 1.50741463,0.028284, 0.056, 0.00958974, 0.01612377, 1.43087637],
|
||||
[0.0504243, -0.03682241, 0.03693438, 0.147, 3.4434878,0.086267, 0.147, 0.02051282, 0.0504243, 3.49633275],
|
||||
[0.00556735, -0.00442938, 0.00870258, 0.033, 0.10897561,0.025456, 0.033, 0.00553846, 0.00556735, 0.12761266],
|
||||
[2.34438020e-08, -2.34438020e-08, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.15475168e-08,-1.38812548e-09],
|
||||
[0.01361869, -0.01361869, 0.00208565, 0.05, 0.66663415,0.0049497, 0.05, 0.00510256, 0.0087315, 0.66182973],
|
||||
[0.00526982, -0.00287197, 0.016589, 0.06, 0.02595122,0.042426, 0.06, 0.00515385, 0.00526982, 0.02530658],
|
||||
[1.65194036e-02, -1.65194036e-02, 3.87787571e-04, 7.00000000e-02, 3.74146341e+00, 2.33350000e-02, 1.06000000e-01, 2.41025641e-02, 1.58008422e-02, 3.72790310e+00],
|
||||
[7.19856850e-05, -7.19856850e-05, 2.88676449e-04, 1.00000000e-03, 2.43902439e-05, 7.07110000e-04, 2.00000000e-03, 1.02564103e-04, 5.59639159e-05, 2.44149661e-05],
|
||||
[4.35202063e-02, -4.35202063e-02, 5.32593935e-03, 3.24000000e-01, 1.14713659e+01, 1.20920000e-01, 3.24000000e-01, 8.83589744e-02, 3.31111507e-02, 1.13977278e+01],
|
||||
[0.0767267, -0.0767267, 0.07121201, 0.446, 8.87180488,0.24324, 0.446, 0.10620513, 0.0720187, 8.83162683],
|
||||
[0.02927458, -0.02914746, 0.03271076, 0.138, 1.63631707,0.074953, 0.138, 0.02410256, 0.02927458, 1.65473267],
|
||||
[0.02124579, -0.00660226, 0.05683001, 0.218, 0.16541463,0.13718, 0.241, 0.04171795, 0.02124579, 0.07039812],
|
||||
[2.71490067e-40, -2.71490067e-40, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.26462710e-40, 6.01460518e-42],
|
||||
[0.0147575, -0.0147575, 0.016589, 0.06, 0.03368293,0.042426, 0.06, 0.00917949, 0.01001548, 0.03172037],
|
||||
[0.05563514, -0.05003055, 0.0377493, 0.474, 3.34209756,0.10819, 0.474, 0.04441026, 0.05563514, 3.34310614],
|
||||
[0.01066405, -0.01066405, 0.00912966, 0.03, 0.52358537,0.026163, 0.042, 0.00769231, 0.00753237, 0.52319376],
|
||||
[0.02506229, -0.01686662, 0.06327203, 0.407, 0.10931707,0.1492, 0.407, 0.04461538, 0.02506229, 0.13188551],
|
||||
[0.05540528, -0.05540528, 0.05916798, 0.36, 5.93456098,0.18526, 0.36, 0.14474359, 0.03879351, 5.91696978],
|
||||
[0.05114493, -0.04906722, 0.03169166, 0.444, 1.59946341,0.089803, 0.444, 0.051, 0.05114493, 1.39984371],
|
||||
[1.87455177e-04, -8.82323678e-05, 3.60629601e-02, 0.00000000e+00, 0.00000000e+00, 7.63680000e-02, 0.00000000e+00, 0.00000000e+00, 1.87455177e-04,-1.21670239e-05],
|
||||
[0.00573919, -0.00573919, 0.00418135, 0.04, 0.0444878,0.013435, 0.04, 0.00984615, 0.00463094, 0.03577326],
|
||||
[0.02921642, -0.02921642, 0.06455675, 0.145, 1.28114634,0.15486, 0.145, 0.0314359, 0.00907076, 1.30595106],
|
||||
[0.08101569, -0.05496349, 0.03591274, 0.557, 5.79265854,0.1294, 0.557, 0.10274359, 0.08101569, 5.7521422 ],
|
||||
[5.37317466e-07, -2.56175141e-07, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 5.37317466e-07, 1.03127202e-08],
|
||||
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
|
||||
[7.79437718e-04, -7.79437718e-04, 1.24131975e-02, 0.00000000e+00, 0.00000000e+00, 3.04060000e-02, 0.00000000e+00, 0.00000000e+00, 3.70210952e-04, 3.27917545e-05],
|
||||
[7.19856850e-05, -7.19856850e-05, 2.88676449e-04, 1.00000000e-03, 2.43902439e-05, 7.07110000e-04, 2.00000000e-03, 1.02564103e-04, 4.31188798e-05, 2.60691650e-05],
|
||||
[0.00866364, -0.00866364, 0.01214903, 0.03, 0.46446341,0.033941, 0.048, 0.00933333, 0.00584264, 0.46299034],
|
||||
[0.03683296, -0.03683296, 0.07256353, 0.2, 0.89,0.18385, 0.353, 0.0225641, 0.02517257, 0.88537216],
|
||||
[0.03602508, -0.03342342, 0.05528397, 0.172, 0.12058537,0.14991, 0.173, 0.05969231, 0.03602508, 0.10686173],
|
||||
[0.02253416, -0.02253416, 0.01985341, 0.112, 5.89978049,0.084146, 0.12, 0.04453846, 0.01958278, 5.88525247],
|
||||
[0.00622806, -0.00622806, 0.00926554, 0.015, 0.01317073,0.026163, 0.036, 0.00538462, 0.00320484, 0.00829287],
|
||||
[0.01379371, -0.01379371, 0.00563317, 0.02, 1.16509756,0.020506, 0.043, 0.01051282, 0.0134218, 1.16338281],
|
||||
[0.01159407, -0.01159407, 0.01052999, 0.058, 1.19278049,0.028991, 0.058, 0.00697436, 0.0092909, 1.19462633],
|
||||
[0.01666124, -0.01508531, 0.01591668, 0.15, 4.25739024,0.057276, 0.15, 0.02835897, 0.01666124, 4.23504838],
|
||||
[0.02791508, -0.02791508, 0.00861704, 0.059, 2.72943902,0.031113, 0.059, 0.01371795, 0.02720056, 2.66137841],
|
||||
[0.02677122, -0.02677122, 0.00984899, 0.043, 4.52929268,0.051619, 0.074, 0.02512821, 0.02513023, 4.52779804],
|
||||
[0.00820856, -0.00820856, 0.04182701, 0.229, 0.85280488,0.12374, 0.229, 0.02958974, 0.00626605, 0.8738938 ],
|
||||
[0.00660789, -0.00533542, 0.00713786, 0.049, 0.06365854,0.027577, 0.049, 0.01835897, 0.00660789, 0.05446593],
|
||||
[0.00553774, -0.00553774, 0.00491419, 0.026, 3.13085366,0.014849, 0.026, 0.00651282, 0.0033944, 3.12413464],
|
||||
[7.01018256e-03, -7.01018256e-03, 2.29380976e-02, 1.26000000e-01, 6.56448780e+00, 4.45480000e-02, 1.26000000e-01, 1.09743590e-02, 5.16783812e-03, 6.55718858e+00],
|
||||
[0.01903315, -0.01903315, 0.02305625, 0.102, 4.03021951,0.073539, 0.127, 0.04748718, 0.01826975, 4.03932544],
|
||||
[0.010121, -0.00916936, 0.01151868, 0.072, 0.54807317,0.037477, 0.072, 0.02423077, 0.010121, 0.56664269],
|
||||
[0.02477958, -0.02477958, 0.00811683, 0.072, 2.91585366,0.033941, 0.106, 0.01771795, 0.02394629, 2.93431172],
|
||||
[0.01211938, -0.01211938, 0.01493102, 0.059, 1.22153659,0.038891, 0.059, 0.0164359, 0.00614063, 1.21761274],
|
||||
[2.29049895e-02, -2.29049895e-02, 6.47077661e-03, 3.14000000e-01, 1.10041220e+01, 1.23740000e-01, 3.27000000e-01, 1.18076923e-01, 2.12905216e-02, 1.09333661e+01],
|
||||
[0.01060994, -0.01060994, 0.01007483, 0.034, 3.8605122,0.024042, 0.034, 0.01323077, 0.00897269, 3.85004794],
|
||||
[0.00627491, -0.00627491, 0.00093988, 0.022, 0.77860976,0.0056569, 0.022, 0.00546154, 0.0034974, 0.77367652],
|
||||
[0.05803024, -0.0551299, 0.10802924, 0.289,15.55358537, 0.20435, 0.289, 0.05861538, 0.05803024, 15.56158018],
|
||||
[0.01871651, -0.01186601, 0.0663762, 0.171,11.80826829, 0.13011, 0.171, 0.02697436, 0.01871651, 11.8298579 ],
|
||||
[0.05996643, -0.05996643, 0.01671762, 0.244, 0.87841463,0.042426, 0.244, 0.02264103, 0.03965289, 0.87488872],
|
||||
[4.98711743e-02, -4.98711743e-02, 3.65041596e-03, 5.28000000e-01, 1.48976098e+01, 1.90920000e-01, 5.28000000e-01, 1.90358974e-01, 3.84779140e-02, 1.48525928e+01],
|
||||
[0.01136134, -0.01136134, 0.00951859, 0.069, 3.72480488,0.043841, 0.132, 0.02179487, 0.00932468, 3.66845001],
|
||||
[0.03194417, -0.03194417, 0.00795281, 0.026, 1.34785366,0.028284, 0.028, 0.00864103, 0.03178111, 1.33144448],
|
||||
[2.64572119e-02, -2.21160394e-02, 7.74479696e-04, 7.50000000e-02, 4.26239024e+00, 2.82840000e-02, 7.50000000e-02, 2.74358974e-02, 2.64572119e-02, 4.25156996e+00],
|
||||
[0.02750577, -0.0258611, 0.04753049, 0.27, 9.51297561,0.092631, 0.27, 0.06833333, 0.02750577, 9.52726146],
|
||||
[0.00866709, -0.00541625, 0.06576809, 0.186, 0.70678049,0.15415, 0.186, 0.01346154, 0.00866709, 0.70474759],
|
||||
[0.03066446, -0.02713743, 0.02467478, 0.232, 4.49546341,0.097581, 0.35, 0.08861538, 0.03066446, 4.52324303],
|
||||
[0.03264028, -0.03264028, 0.06438005, 0.102, 0.34746341,0.15415, 0.2, 0.01420513, 0.02531656, 0.31306559],
|
||||
[0.10027745, -0.10027745, 0.24027297, 2.921,19.25053659, 0.87257, 2.921, 0.42974359, 0.095849, 19.15442972],
|
||||
[0.00700114, -0.00699188, 0.00745196, 0.051, 0.07097561,0.028991, 0.056, 0.01948718, 0.00700114, 0.07004534],
|
||||
[0.02156914, -0.02156914, 0.00709096, 0.037, 2.22063415,0.028284, 0.055, 0.01587179, 0.01496926, 2.2271266 ],
|
||||
[0.04052342, -0.04052342, 0.01554335, 0.088, 4.43229268,0.060104, 0.088, 0.02589744, 0.02803208, 4.44932817],
|
||||
[0.03391535, -0.03391535, 0.03559771, 0.18, 0.57178049,0.13576, 0.236, 0.08284615, 0.0298453, 0.61786751],
|
||||
[0.01311557, -0.01311557, 0.01651455, 0.096, 5.56714634,0.034648, 0.098, 0.02623077, 0.01097477, 5.57445898],
|
||||
[0.02984218, -0.02984218, 0.04015245, 0.156, 0.49970732,0.11384, 0.162, 0.05010256, 0.02111739, 0.50149962],
|
||||
[0.00525823, -0.00525823, 0.0044271, 0.017, 1.22795122,0.014849, 0.024, 0.00615385, 0.00431849, 1.2262395 ],
|
||||
[0.01034227, -0.0064927, 0.00905154, 0.054, 3.7145122,0.022627, 0.059, 0.01389744, 0.01034227, 3.71109431],
|
||||
[0.01480054, -0.01480054, 0.01288003, 0.1, 4.24504878,0.04879, 0.1, 0.03451282, 0.01429233, 4.26818139],
|
||||
[0.02487982, -0.02487982, 0.04157097, 0.238, 9.47917073,0.084146, 0.238, 0.06812821, 0.02178416, 9.4506215 ],
|
||||
[0.0519567, -0.0519567, 0.03125581, 0.244, 3.01041463,0.10394, 0.244, 0.04551282, 0.03842769, 3.00134531],
|
||||
[0.11461199, -0.11461199, 0.18484777, 1.671,15.49843902, 0.63498, 1.671, 0.38930769, 0.10481226, 15.48006191],
|
||||
[0.07299834, -0.06491022, 0.18753433, 0.99,21.14187805, 0.35002, 0.99, 0.19264103, 0.07299834, 21.11450374],
|
||||
[0.00637064, -0.00336012, 0.05694093, 0.131, 1.2014878,0.13647, 0.175, 0.01325641, 0.00637064, 1.18082784],
|
||||
[3.20491472e-02, -3.20491472e-02, 5.58137707e-03, 3.42000000e-01, 1.17296585e+01, 1.27280000e-01, 3.52000000e-01, 8.53589744e-02, 2.53251481e-02, 1.17137294e+01],
|
||||
[0.02781479, -0.02778159, 0.02039015, 0.06, 4.47456098,0.060811, 0.06, 0.02194872, 0.02781479, 4.48594928],
|
||||
[2.46837687e-02, -2.46837687e-02, 5.41311894e-03, 5.08000000e-01, 1.48384390e+01, 1.92330000e-01, 5.44000000e-01, 1.59897436e-01, 2.11520468e-02, 1.48894277e+01],
|
||||
[0.00868918, -0.00474268, 0.05575972, 0.069, 0.05356098,0.13576, 0.069, 0.00579487, 0.00868918, 0.02191644],
|
||||
[0.04526781, -0.04408048, 0.07762039, 0.253,13.55292683, 0.16405, 0.461, 0.10697436, 0.04526781, 13.52730947],
|
||||
[0.05879119, -0.05879119, 0.13295615, 0.455,18.92917073, 0.30759, 0.83, 0.13789744, 0.04592195, 18.93763555],
|
||||
[0.02189384, -0.01216995, 0.05066665, 0.246, 9.98273171,0.098995, 0.28, 0.06220513, 0.02189384, 9.99516767],
|
||||
[0.02539376, -0.02539376, 0.01551926, 0.07, 4.18487805,0.048083, 0.07, 0.01479487, 0.01839075, 4.18516621],
|
||||
[0.03555225, -0.02731574, 0.02814509, 0.204, 3.88414634,0.099702, 0.235, 0.08258974, 0.03555225, 3.93872613],
|
||||
[0.0582598, -0.04036336, 0.04616096, 0.319,11.37339024, 0.11455, 0.337, 0.06123077, 0.0582598, 11.34920676],
|
||||
[0.01981077, -0.01981077, 0.03330417, 0.106, 8.0285122,0.062933, 0.188, 0.03092308, 0.01455206, 8.03116754],
|
||||
[0.05959746, -0.04430587, 0, 0.99,21.09304878, 0.35002, 0.99, 0.43153846, 0.05959746, 21.13322117],
|
||||
[3.21965997e-02, -3.21965997e-02, 4.40061045e-03, 2.82000000e-01, 1.03585610e+01, 1.06070000e-01, 2.82000000e-01, 1.04769231e-01, 3.08413155e-02, 1.03278009e+01],
|
||||
[1.61063213e-02, -1.61063213e-02, 5.67556300e-02, 3.14000000e-01, 1.12777805e+01, 1.14550000e-01, 3.14000000e-01, 4.11025641e-02, 1.10897848e-02, 1.12605687e+01],
|
||||
[0.02189384, -0.01179437, 0.05216424, 0.151,10.04721951, 0.098995, 0.151, 0.04297436, 0.02189384, 10.08818858],
|
||||
[0.00238657, -0.00238657, 0.0034182, 0.014, 0.77714634,0.011314, 0.016, 0.00351282, 0.00232057, 0.77743005],
|
||||
[0.06433761, -0.06083287, 0.14957506, 0.946,20.62917073, 0.33446, 0.946, 0.37597436, 0.06433761, 20.61320173],
|
||||
[1.39575245e-04, -6.63125529e-05, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.39575245e-04, 8.37811140e-07],
|
||||
[2.78771985e-10, -2.78771985e-10, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 2.70507545e-10, 2.56354866e-11],
|
||||
[1.17302985e-02, -7.71146815e-03, 2.53259440e-02, 1.64000000e-01, 7.73348780e+00, 6.50540000e-02, 1.74000000e-01, 4.20000000e-02, 1.17302985e-02, 7.74824436e+00],
|
||||
[0.01495479, -0.01495479, 0.05647272, 0.306,11.12082927, 0.11455, 0.306, 0.03379487, 0.01350048, 11.13703625],
|
||||
[0.06224454, -0.06224454, 0.10018563, 0.528,14.96365854, 0.19799, 0.528, 0.08512821, 0.04174687, 14.9855656 ],
|
||||
[1.50784174e-02, -1.50784174e-02, 5.75469303e-02, 3.14000000e-01, 1.12202439e+01, 1.11020000e-01, 3.14000000e-01, 8.52051282e-02, 8.68754010e-03, 1.12353267e+01],
|
||||
[0.00507143, -0.00507143, 0.00379475, 0.017, 0.02482927,0.0098995, 0.023, 0.00358974, 0.00308139, 0.023151 ],
|
||||
[0.02900558, -0.02900558, 0.09336675, 0.528,14.83170732, 0.18668, 0.528, 0.08441026, 0.0267984, 14.78693574],
|
||||
[0.01743156, -0.01634197, 0.06142791, 0.354,11.68380488, 0.13011, 0.355, 0.09658974, 0.01743156, 11.6854788 ],
|
||||
[0.06157537, -0.04061912, 0.09114882, 0.434, 8.39795122,0.31396, 0.878, 0.17061538, 0.06157537, 8.3171658 ],
|
||||
[0.01157586, -0.00787553, 0.00781547, 0.103, 3.28846341,0.034648, 0.103, 0.02582051, 0.01157586, 3.29875571],
|
||||
[7.33132616e-03, -6.81397261e-03, 3.00898494e-02, 1.88000000e-01, 7.75687805e+00, 6.64680000e-02, 1.88000000e-01, 4.87179487e-02, 7.33132616e-03, 7.75743580e+00],
|
||||
[0.0325806, -0.02489858, 0.06056313, 0.347, 1.96856098,0.17678, 0.347, 0.09082051, 0.0325806, 1.93503196],
|
||||
[0.02057271, -0.02057271, 0.03637653, 0.168, 7.91641463,0.070004, 0.168, 0.03194872, 0.0175232, 7.87590288],
|
||||
[0.02488661, -0.02177646, 0.01564241, 0.205, 0.40578049,0.087681, 0.205, 0.05994872, 0.02488661, 0.41578688],
|
||||
[0.07366755, -0.07366755, 0.13654436, 0.99,20.81831707, 0.35002, 0.99, 0.11958974, 0.04818856, 20.83153348],
|
||||
[0.02708256, -0.02708256, 0.08111137, 0.224,13.16290244, 0.15839, 0.42, 0.09841026, 0.02505871, 13.18839945],
|
||||
[0.00958152, -0.00958152, 0.02217553, 0.12, 6.09365854,0.045255, 0.12, 0.02551282, 0.00690841, 6.10985475],
|
||||
[0.02958837, -0.01655602, 0.0402901, 0.206, 4.10741463,0.1393, 0.276, 0.05653846, 0.02958837, 4.02902721],
|
||||
[0.07592541, -0.07592541, 0.19758112, 1.738,16.99126829, 0.50275, 1.738, 0.51812821, 0.06254254, 16.89231623],
|
||||
[0.03172265, -0.0218731, 0.0643494, 0.611, 6.06187805,0.21567, 0.611, 0.11628205, 0.03172265, 6.03346158],
|
||||
[0.00524933, -0.00425262, 0.00617516, 0.022, 0.88902439,0.015556, 0.022, 0.00415385, 0.00524933, 0.88950276],
|
||||
[2.94223481e-02, -1.75922046e-02, 1.93346063e-03, 3.24000000e-01, 1.13452439e+01, 1.14550000e-01, 3.24000000e-01, 1.01076923e-01, 2.94223481e-02, 1.14096319e+01],
|
||||
[0.04812244, -0.03838389, 0.06461018, 0.461,13.97795122, 0.1789, 0.461, 0.13925641, 0.04812244, 13.95949649],
|
||||
[0.03621849, -0.03433152, 0.03043904, 0.165, 1.58258537,0.08556, 0.184, 0.03061538, 0.03621849, 1.53392771],
|
||||
[0.02373659, -0.02277035, 0.05867974, 0.434,13.38863415, 0.16405, 0.434, 0.07953846, 0.02373659, 13.34239253],
|
||||
[0.02111005, -0.02111005, 0.07987431, 0.224,13.04126829, 0.15839, 0.224, 0.04605128, 0.02028668, 13.05917311],
|
||||
[0.03671861, -0.03671861, 0.01580834, 0.064, 3.6292439,0.060811, 0.084, 0.03615385, 0.03589838, 3.59330461],
|
||||
[0.03538591, -0.03538591, 0.05734932, 0.175,11.07687805, 0.11738, 0.175, 0.04389744, 0.02528186, 11.06401646],
|
||||
[0.01139907, -0.01139907, 0.0404773, 0.256, 9.43263415,0.09051, 0.256, 0.04312821, 0.01139243, 9.44721736],
|
||||
[2.21139985e-02, -2.21139985e-02, 7.74479696e-04, 1.13000000e-01, 4.05441463e+00, 2.68700000e-02, 1.13000000e-01, 3.17948718e-02, 1.83677482e-02, 4.02531967e+00],
|
||||
[0.03887186, -0.02634328, 0.0509491, 0.189,11.66063415, 0.13364, 0.189, 0.07851282, 0.03887186, 11.63597796],
|
||||
[4.18064577e-02, -4.18064577e-02, 7.74589241e-03, 3.82000000e-01, 1.81040000e+01, 1.41420000e-02, 7.64000000e-01, 7.98974359e-02, 2.37616498e-02, 1.81240583e+01],
|
||||
[0.0610875, -0.05055225, 0.17753526, 1.452,16.057, 0.49144, 1.452, 0.30223077, 0.0610875, 15.96984146],
|
||||
[0.04722622, -0.02572493, 0.06540382, 0.494,14.07109756, 0.17466, 0.494, 0.16689744, 0.04722622, 14.02996765],
|
||||
[0.00210856, -0.00193237, 0.01084809, 0.048, 0.28902439,0.028284, 0.053, 0.00574359, 0.00210856, 0.28286954],
|
||||
[0.01251112, -0.01153534, 0.03329564, 0.094, 7.72314634,0.066468, 0.178, 0.04507692, 0.01251112, 7.71739425],
|
||||
[0.07891577, -0.05286848, 0.10496868, 0.385,17.73741463, 0.2588, 0.751, 0.13687179, 0.07891577, 17.7611277 ],
|
||||
[2.57691246e-03, -1.65339484e-03, 1.37839166e-03, 1.00000000e-02, 1.53980488e+00, 9.19240000e-03, 1.00000000e-02, 4.35897436e-03, 2.57691246e-03, 1.52310545e+00],
|
||||
[1.65446780e-02, -1.65446780e-02, 5.91533656e-02, 1.30000000e-02, 1.14603171e+01, 1.14550000e-01, 1.30000000e-02, 1.33333333e-03, 1.26380521e-02, 1.14674074e+01],
|
||||
[4.11400031e-02, -3.79141831e-02, 6.84964646e-03, 5.12000000e-01, 1.48432683e+01, 1.97990000e-01, 5.28000000e-01, 1.59692308e-01, 4.11400031e-02, 1.47993898e+01],
|
||||
[0.02155868, -0.01887666, 0.07962665, 0.238,13.24478049, 0.15344, 0.238, 0.04825641, 0.02155868, 13.22587027],
|
||||
[0.00599973, -0.00599973, 0.00697133, 0.034, 0.63663415,0.019799, 0.034, 0.00594872, 0.00480282, 0.61555439],
|
||||
[0.04266586, -0.04266586, 0.0106888, 0.126, 3.67914634,0.041719, 0.132, 0.02676923, 0.03469959, 3.67052968],
|
||||
[0.00042144, -0.00042144, 0.00109543, 0.004, 0.11331707,0.0021213, 0.006, 0.0014359, 0.00027545, 0.11329803],
|
||||
[0.02663118, -0.02639755, 0.01336405, 0.07, 4.36412195,0.054447, 0.079, 0.025, 0.02663118, 4.359373 ],
|
||||
[0.00730426, -0.00730426, 0.00435895, 0.072, 0.61409756,0.012728, 0.096, 0.00930769, 0.00603133, 0.60116405],
|
||||
[0.01877031, -0.00838677, 0.02480503, 0.068, 2.56031707,0.077075, 0.068, 0.02294872, 0.01877031, 2.4790144 ],
|
||||
[0.01901328, -0.01785507, 0.0129973, 0.155, 2.71221951,0.047376, 0.155, 0.03697436, 0.01901328, 2.68282596],
|
||||
[0.00190969, -0.00179795, 0.00162791, 0.01, 0.06846341,0.0056569, 0.01, 0.00307692, 0.00190969, 0.06774886],
|
||||
[0.02343739, -0.02343739, 0.06043373, 0.17,10.8854878, 0.11172, 0.17, 0.07266667, 0.01923799, 10.87016693],
|
||||
[1.59897956e-02, -1.15451745e-02, 1.01170444e-02, 3.20000000e-01, 1.16454634e+01, 1.37890000e-01, 3.47000000e-01, 8.94615385e-02, 1.59897956e-02, 1.15869935e+01],
|
||||
[0.01774385, -0.01578744, 0.03231943, 0.085, 2.70382927,0.12233, 0.133, 0.03161538, 0.01774385, 2.68325119],
|
||||
[0.02835417, -0.02835417, 0.0188188, 0.111, 3.12563415,0.054447, 0.111, 0.02897436, 0.01213407, 3.17337638],
|
||||
[2.35933619e-04, -1.84870042e-04, 2.60312573e-11, 4.00000000e-03, 8.50975610e-02, 1.41420000e-03, 4.00000000e-03, 8.20512821e-04, 2.35933619e-04, 8.50491997e-02],
|
||||
[0.00684639, -0.0053288, 0.01423898, 0.121, 0.82373171,0.041012, 0.121, 0.01853846, 0.00684639, 0.82149219],
|
||||
[0.00656202, -0.00298516, 0.00525832, 0.04, 0.32917073,0.013435, 0.04, 0.00435897, 0.00656202, 0.32469572],
|
||||
[0.03061193, -0.01830022, 0.02278918, 0.083, 3.852,0.07566, 0.083, 0.02366667, 0.03061193, 3.82389036],
|
||||
[0.03450864, -0.03206468, 0.07235446, 0.354, 4.39680488,0.20506, 0.354, 0.09258974, 0.03450864, 4.35973108],
|
||||
[1.33757941e-03, -1.33757941e-03, 1.81681257e-02, 1.60000000e-02, 6.81234146e+00, 4.73760000e-02, 1.60000000e-02, 5.74358974e-03, 7.88662159e-04, 6.81237822e+00],
|
||||
[0.05408959, -0.05408959, 0.07739115, 0.232,13.12702439, 0.16263, 0.46, 0.12551282, 0.02715891, 13.09196866],
|
||||
[0.01414166, -0.01414166, 0, 0.214, 9.05543902,0, 0.214, 0.01707692, 0.01284695, 9.04209973],
|
||||
[0.00262455, -0.00262455, 0.00092195, 0.021, 0.13329268,0.0035355, 0.021, 0.00338462, 0.00176235, 0.13419267],
|
||||
[0.00766504, -0.00766504, 0.02270081, 0.065, 0.15463415,0.067882, 0.065, 0.01617949, 0.00760269, 0.15422134],
|
||||
[0.01601326, -0.01601326, 0.00953142, 0.044, 4.34658537,0.029698, 0.082, 0.02784615, 0.0152717, 4.34824852],
|
||||
[1.36058722e-04, -1.36058722e-04, 9.39849132e-04, 2.00000000e-03, 1.47902439e-01, 2.12130000e-03, 4.00000000e-03, 3.84615385e-04, 1.20873351e-04, 1.48269290e-01],
|
||||
[0.01692731, -0.01692731, 0.00934515, 0.17, 2.76797561,0.04879, 0.17, 0.03212821, 0.00804809, 2.84223399],
|
||||
[0.00021792, -0.00016886, 0.00077459, 0.002, 0.16504878,0.0014142, 0.004, 0.00020513, 0.00021792, 0.16495962],
|
||||
[0.0025624, -0.0017206, 0.00242901, 0.013, 0.22082927,0.0084853, 0.015, 0.00476923, 0.0025624, 0.21293887],
|
||||
[0.00825234, -0.00825234, 0.00915876, 0.068, 4.30339024,0.028284, 0.073, 0.02058974, 0.00762434, 4.32659455],
|
||||
[0.03004306, -0.02691667, 0.04972124, 0.152,10.16087805, 0.10324, 0.158, 0.07189744, 0.03004306, 10.1129346 ],
|
||||
[0.02155975, -0.02155975, 0.04474389, 0.129, 8.92531707,0.084146, 0.236, 0.07838462, 0.02002646, 8.88551601],
|
||||
[1.14268607e-02, -9.73097720e-03, 7.75027419e-04, 6.40000000e-02, 3.79953659e+00, 2.40420000e-02, 6.40000000e-02, 1.87692308e-02, 1.14268607e-02, 3.80236594e+00],
|
||||
[0.02707888, -0.02707888, 0.01880556, 0.076, 4.34395122,0.053033, 0.076, 0.02764103, 0.02459437, 4.34208839],
|
||||
[0.01727205, -0.01727205, 0.01056655, 0.066, 1.30939024,0.03182, 0.066, 0.0165641, 0.01633014, 1.2967273 ],
|
||||
[0.00877545, -0.00877545, 0.01277828, 0.028, 0.0914878,0.032527, 0.037, 0.00364103, 0.00682708, 0.08295897],
|
||||
[0.00040731, -0.00036169, 0.00082663, 0.006, 0.13621951,0.0021213, 0.006, 0.00174359, 0.00040731, 0.13689296],
|
||||
[0.00145672, -0.00145672, 0.00184393, 0.006, 0.05692683,0.0056569, 0.008, 0.00189744, 0.00103611, 0.05532006],
|
||||
[0.00539013, -0.00539013, 0.01083511, 0.025, 0.60539024,0.02687, 0.052, 0.00476923, 0.00387435, 0.60237517],
|
||||
[0.00576748, -0.00576748, 0.01658765, 0.082, 2.201,0.045962, 0.082, 0.01251282, 0.00554347, 2.2021353 ],
|
||||
[0.00038011, -0.00035004, 0.0011832, 0.006, 0.07460976,0.0028284, 0.006, 0.00097436, 0.00038011, 0.07430141],
|
||||
[0.00232859, -0.00232859, 0.00442728, 0.011, 0.04782927,0.012021, 0.012, 0.00353846, 0.00200706, 0.04406202],
|
||||
[0.03710778, -0.03710778, 0.00465324, 0.076, 4.443,0.03182, 0.086, 0.02310256, 0.03407041, 4.40858239],
|
||||
[1.13293306e-02, -1.13293306e-02, 3.98170020e-02, 2.08000000e-01, 8.79678049e+00, 8.06100000e-02, 2.08000000e-01, 4.19487179e-02, 8.02841512e-03, 8.78487800e+00],
|
||||
[0.00241135, -0.00186458, 0.0089488, 0.059, 1.06817073,0.02192, 0.059, 0.00302564, 0.00241135, 1.06816635],
|
||||
[0.01979542, -0.0079298, 0.02172778, 0.128, 2.58236585,0.062225, 0.128, 0.02112821, 0.01979542, 2.5705713 ],
|
||||
[0.02311331, -0.02311331, 0.01137534, 0.062, 4.10326829,0.048083, 0.076, 0.02253846, 0.02280474, 4.08634796],
|
||||
[0.00020308, -0.00020308, 0.00116188, 0.006, 0.08992683,0.0021213, 0.006, 0.00030769, 0.0001032, 0.09000535],
|
||||
[0.02430024, -0.01198259, 0.05125066, 0.284,10.61465854, 0.10041, 0.284, 0.04492308, 0.02430024, 10.68180335],
|
||||
[0.00456388, -0.00456388, 0.00310646, 0.037, 0.09085366,0.0098995, 0.037, 0.00694872, 0.00379939, 0.09607742],
|
||||
[0.0258999, -0.02057017, 0.03178882, 0.123, 5.97997561,0.086974, 0.123, 0.02774359, 0.0258999, 5.91008859],
|
||||
[0.02995729, -0.02995729, 0.02248653, 0.065, 3.89078049,0.067175, 0.072, 0.02235897, 0.02419343, 3.84855841],
|
||||
[0.03168889, -0.01073342, 0.02388286, 0.091, 4.27865854,0.08061, 0.091, 0.02253846, 0.03168889, 4.27815091],
|
||||
[0.00558419, -0.00558419, 0.00877511, 0.022, 1.34473171,0.030406, 0.026, 0.00953846, 0.00182773, 1.37010789],
|
||||
[0.0002998, -0.0002998, 0.00057734, 0.004, 0.14253659,0.0014142, 0.004, 0.00097436, 0.00028829, 0.14237889],
|
||||
[0.00219221, -0.00094528, 0.01264908, 0.057, 0.64868293,0.027577, 0.057, 0.00864103, 0.00219221, 0.63404688],
|
||||
[0.00025949, -0.00025949, 0.00057734, 0.004, 0.07617073,0.0014142, 0.004, 0.00179487, 0.00015403, 0.07597272],
|
||||
[3.54995192e-04, -3.54995192e-04, 5.16299928e-02, 1.00000000e-02, 2.85024390e-01, 1.28690000e-01, 1.00000000e-02, 2.41025641e-03, 2.75611646e-04, 2.85969148e-01],
|
||||
[0.01468492, -0.01456906, 0.01003006, 0.072, 4.19787805,0.030406, 0.072, 0.02764103, 0.01468492, 4.21230508],
|
||||
[0.02925993, -0.02564169, 0.0243944, 0.069, 3.981,0.081317, 0.069, 0.01961538, 0.02925993, 3.88386281],
|
||||
[0.00033805, -0.00023428, 0.00154918, 0.005, 0.06904878,0.0035355, 0.005, 0.00117949, 0.00033805, 0.06819891],
|
||||
[0.02167887, -0.01716068, 0.03000235, 0.167, 7.68443902,0.068589, 0.176, 0.05779487, 0.02167887, 7.62067289],
|
||||
[0.00860053, -0.00687122, 0.00984125, 0.028, 1.9362439,0.032527, 0.028, 0.01158974, 0.00860053, 1.85928806],
|
||||
[1.62829923e-02, -1.27782612e-02, 5.03488484e-03, 2.93000000e-01, 1.10292927e+01, 9.19240000e-03, 2.93000000e-01, 4.98717949e-02, 1.62829923e-02, 1.10624738e+01],
|
||||
[5.11402880e-02, -5.11402880e-02, 0.00000000e+00, 4.40000000e-02, 2.07410488e+01, 1.55560000e-02, 4.40000000e-02, 1.24102564e-02, 2.17468552e-02, 2.07288498e+01],
|
||||
[0.02048881, -0.01983837, 0.01785776, 0.068, 3.90980488,0.045255, 0.068, 0.02412821, 0.02048881, 3.92262256],
|
||||
[0.00066236, -0.00066236, 0.00180276, 0.008, 0.15141463,0.0049497, 0.008, 0.00161538, 0.00045688, 0.15286961],
|
||||
[0.00060837, -0.00053966, 0.00036514, 0.006, 0.15390244,0.0021213, 0.006, 0.00184615, 0.00060837, 0.1543969 ],
|
||||
[0.00261392, -0.0025949, 0.00945165, 0.046, 1.07970732,0.026163, 0.046, 0.00725641, 0.00261392, 1.0819483 ],
|
||||
[2.21525541e-02, -2.09299114e-02, 4.55533610e-03, 1.75000000e-01, 7.59114634e+00, 6.50540000e-02, 1.75000000e-01, 4.90256410e-02, 2.21525541e-02, 7.58764892e+00],
|
||||
[7.56803574e-03, -5.22375992e-03, 1.39641061e-03, 3.40000000e-02, 1.45278049e+00, 8.48530000e-03, 3.90000000e-02, 8.23076923e-03, 7.56803574e-03, 1.46401853e+00],
|
||||
[0.00042417, -0.00042417, 0.00109543, 0.003, 0.16736585,0.0021213, 0.006, 0.00123077, 0.00031221, 0.16745186],
|
||||
[3.55599268e-04, -3.33958979e-04, 2.60312573e-11, 4.00000000e-03, 1.18829268e-01, 1.41420000e-03, 4.00000000e-03, 1.17948718e-03, 3.55599268e-04, 1.18657803e-01],
|
||||
[8.49163246e-03, -7.36645573e-03, 1.26472005e-03, 4.00000000e-02, 4.19334146e+00, 2.82840000e-02, 8.00000000e-02, 2.64615385e-02, 8.49163246e-03, 4.21478989e+00],
|
||||
[0.04132153, -0.04132153, 0.08233984, 0.491,13.82968293, 0.1789, 0.491, 0.09758974, 0.02614823, 13.93101321],
|
||||
[0.00138337, -0.00104703, 0.00203719, 0.007, 0.09217073,0.006364, 0.007, 0.00258974, 0.00138337, 0.07952256],
|
||||
[0.02013966, -0.02013966, 0.06794402, 0.35,11.46958537, 0.12728, 0.35, 0.09010256, 0.01988961, 11.45768721],
|
||||
[0.00205373, -0.00205373, 0.0030984, 0.015, 0.06936585,0.0084853, 0.017, 0.00394872, 0.00152146, 0.06837444],
|
||||
[0.01040894, -0.01040894, 0.00890233, 0.068, 0.26421951,0.025456, 0.068, 0.00902564, 0.00546137, 0.2587529 ],
|
||||
[8.19684883e-03, -8.19684883e-03, 2.09202670e-02, 6.30000000e-02, 6.51109756e+00, 4.45480000e-02, 6.30000000e-02, 1.66153846e-02, 5.80672162e-03, 6.50965345e+00],
|
||||
[0.00023369, -0.00023369, 0.00246646, 0.007, 0.16326829,0.006364, 0.007, 0.00079487, 0.00019522, 0.1627248 ],
|
||||
[0.00490015, -0.00322078, 0.00861112, 0.016, 1.29187805,0.025456, 0.016, 0.00689744, 0.00490015, 1.27916244],
|
||||
[0.0134886, -0.00766548, 0.0189313, 0.109, 6.21553659,0.041012, 0.109, 0.03533333, 0.0134886, 6.22416714],
|
||||
[0.02813757, -0.01747513, 0.0254467, 0.127, 6.28670732,0.084146, 0.127, 0.03102564, 0.02813757, 6.35501978],
|
||||
[0.02044022, -0.02044022, 0.03488198, 0.176, 8.10602439,0.065761, 0.176, 0.04466667, 0.01777938, 8.12759058],
|
||||
[0.01947497, -0.01946271, 0.05739752, 0.284,10.64578049, 0.10889, 0.296, 0.04564103, 0.01947497, 10.64447668],
|
||||
[0.01922614, -0.01922614, 0.02849813, 0.097, 4.29178049,0.08061, 0.097, 0.02720513, 0.01556252, 4.30987745],
|
||||
[0.00027514, -0.00027514, 0.00073029, 0.004, 0.07358537,0.0014142, 0.004, 0.00153846, 0.0002546, 0.0735262]])
|
||||
|
||||
num_support_vec = [61, 97, 89]
|
||||
|
||||
return {'dual_coef': dual_coef,
|
||||
'support_vec': support_vec,
|
||||
'intercept': intercept,
|
||||
'gamma': gamma,
|
||||
'num_support_vec': num_support_vec}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
binary_test_data = np.array([
|
||||
[0., 0., 0., 0., 0., 0., 0., 0.14212846, 0.02368808, 0.0580237, 0.06960633, 0.04921911, 0.],
|
||||
[0.21073171, 0.794, 0.922, 0.14076923, 0.20974742, -0.32312654, 0.32312654, 0.88741901, 0.17300546, 0.3544437, 0.39891235, 0.5271785, 0.0076014],
|
||||
[0.04058537, 0.009, 0.008, 0.00225641, 0.03362015, -0.00420592, 0.00420592, 0.00565685, 0.00235702, 0.00193218, 0.00581951, 0.00861878, 0.00671751],
|
||||
[0.07887805, 0.0035, 0.007, 0.00179487, 0.07598638, -0.00096018, 0.00113304, 0.00494975, 0.00235702, 0.00177012, 0.00098742, 0.00128452, 0.00335876],
|
||||
[0.10126829, 0.0015, 0.004, 0.00174359, 0.09954269, -0.00034342, 0.00034342, 0.00212132, 0.00153206, 0.00028868, 0.00075829, 0.00064872, 0.00212132]
|
||||
])
|
||||
binary_predictions = predict_binary_classifier(binary_test_data)
|
||||
|
||||
import pickle
|
||||
f2 = open('/Users/sarataylor/Dev/eda-explorer-public/SVMBinary.p', 'rb')
|
||||
s2 = f2.read()
|
||||
clf = pickle.loads(s2)
|
||||
|
||||
assert(len([1 for i in range(5) if binary_predictions[i] != clf.predict(binary_test_data)[i]]) == 0)
|
||||
|
||||
# Test multiclass
|
||||
test_data = np.array([[3.11141105e-02, -3.11136868e-02, 2.42079822e-02, 7.49220000e-02, 1.15335178e+01, 8.37681119e-02, 7.49220000e-02, 1.03606795e-02, 3.11141105e-02, 1.15205823e+01],
|
||||
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
|
||||
[0.10255266, -0.10255266, 0.03827904, 0.328471, 6.61645195, 0.26352986, 0.406695, 0.02494941, 0.05696297, 7.64941098],
|
||||
[0.1095642, -0.1095642, 0.08589464, 0.113983, 11.49373772, 0.26352986, 0.113983, 0.01375942, 0.03753318, 11.51816541],
|
||||
[0.15404637, -0.08878016, 0.1020834, 0.768917, 11.40673696, 0.28606288, 0.768917, 0.08697605, 0.15404637, 11.46339086]])
|
||||
|
||||
multi_predictions = predict_multiclass_classifier(test_data)
|
||||
|
||||
f2 = open('/Users/sarataylor/Dev/eda-explorer-public/SVMMulticlass.p', 'rb')
|
||||
s2 = f2.read()
|
||||
clf = pickle.loads(s2)
|
||||
|
||||
assert (len([1 for i in range(5) if multi_predictions[i] != clf.predict(test_data)[i]]) == 0)
|
||||
|
||||
|
||||
|
|
@ -1,411 +0,0 @@
|
|||
import pandas as pd
|
||||
import numpy as np
|
||||
import matplotlib
|
||||
import matplotlib.pyplot as plt
|
||||
import pywt
|
||||
import os
|
||||
import datetime
|
||||
|
||||
from load_files import getInputLoadFile, get_user_input
|
||||
from ArtifactClassifiers import predict_binary_classifier, predict_multiclass_classifier
|
||||
|
||||
matplotlib.rcParams['ps.useafm'] = True
|
||||
matplotlib.rcParams['pdf.use14corefonts'] = True
|
||||
matplotlib.rcParams['text.usetex'] = True
|
||||
|
||||
|
||||
def getWaveletData(data):
|
||||
'''
|
||||
This function computes the wavelet coefficients
|
||||
|
||||
INPUT:
|
||||
data: DataFrame, index is a list of timestamps at 8Hz, columns include EDA, filtered_eda
|
||||
|
||||
OUTPUT:
|
||||
wave1Second: DateFrame, index is a list of timestamps at 1Hz, columns include OneSecond_feature1, OneSecond_feature2, OneSecond_feature3
|
||||
waveHalfSecond: DateFrame, index is a list of timestamps at 2Hz, columns include HalfSecond_feature1, HalfSecond_feature2
|
||||
'''
|
||||
startTime = data.index[0]
|
||||
|
||||
# Create wavelet dataframes
|
||||
oneSecond = pd.date_range(start=startTime, periods=len(data), freq='1s')
|
||||
halfSecond = pd.date_range(start=startTime, periods=len(data), freq='500L')
|
||||
|
||||
# Compute wavelets
|
||||
cA_n, cD_3, cD_2, cD_1 = pywt.wavedec(data['EDA'], 'Haar', level=3) #3 = 1Hz, 2 = 2Hz, 1=4Hz
|
||||
|
||||
# Wavelet 1 second window
|
||||
N = int(len(data)/8)
|
||||
coeff1 = np.max(abs(np.reshape(cD_1[0:4*N],(N,4))), axis=1)
|
||||
coeff2 = np.max(abs(np.reshape(cD_2[0:2*N],(N,2))), axis=1)
|
||||
coeff3 = abs(cD_3[0:N])
|
||||
wave1Second = pd.DataFrame({'OneSecond_feature1':coeff1,'OneSecond_feature2':coeff2,'OneSecond_feature3':coeff3})
|
||||
wave1Second.index = oneSecond[:len(wave1Second)]
|
||||
|
||||
# Wavelet Half second window
|
||||
N = int(np.floor((len(data)/8.0)*2))
|
||||
coeff1 = np.max(abs(np.reshape(cD_1[0:2*N],(N,2))),axis=1)
|
||||
coeff2 = abs(cD_2[0:N])
|
||||
waveHalfSecond = pd.DataFrame({'HalfSecond_feature1':coeff1,'HalfSecond_feature2':coeff2})
|
||||
waveHalfSecond.index = halfSecond[:len(waveHalfSecond)]
|
||||
|
||||
return wave1Second,waveHalfSecond
|
||||
|
||||
|
||||
def getDerivatives(eda):
|
||||
deriv = (eda[1:-1] + eda[2:])/ 2. - (eda[1:-1] + eda[:-2])/ 2.
|
||||
second_deriv = eda[2:] - 2*eda[1:-1] + eda[:-2]
|
||||
return deriv,second_deriv
|
||||
|
||||
|
||||
def getDerivStats(eda):
|
||||
deriv, second_deriv = getDerivatives(eda)
|
||||
maxd = max(deriv)
|
||||
mind = min(deriv)
|
||||
maxabsd = max(abs(deriv))
|
||||
avgabsd = np.mean(abs(deriv))
|
||||
max2d = max(second_deriv)
|
||||
min2d = min(second_deriv)
|
||||
maxabs2d = max(abs(second_deriv))
|
||||
avgabs2d = np.mean(abs(second_deriv))
|
||||
|
||||
return maxd,mind,maxabsd,avgabsd,max2d,min2d,maxabs2d,avgabs2d
|
||||
|
||||
|
||||
def getStats(data):
|
||||
eda = data['EDA'].values
|
||||
filt = data['filtered_eda'].values
|
||||
maxd,mind,maxabsd,avgabsd,max2d,min2d,maxabs2d,avgabs2d = getDerivStats(eda)
|
||||
maxd_f,mind_f,maxabsd_f,avgabsd_f,max2d_f,min2d_f,maxabs2d_f,avgabs2d_f = getDerivStats(filt)
|
||||
amp = np.mean(eda)
|
||||
amp_f = np.mean(filt)
|
||||
return amp, maxd,mind,maxabsd,avgabsd,max2d,min2d,maxabs2d,avgabs2d,amp_f,maxd_f,mind_f,maxabsd_f,avgabsd_f,max2d_f,min2d_f,maxabs2d_f,avgabs2d_f
|
||||
|
||||
|
||||
def computeWaveletFeatures(waveDF):
|
||||
maxList = waveDF.max().tolist()
|
||||
meanList = waveDF.mean().tolist()
|
||||
stdList = waveDF.std().tolist()
|
||||
medianList = waveDF.median().tolist()
|
||||
aboveZeroList = (waveDF[waveDF>0]).count().tolist()
|
||||
|
||||
return maxList,meanList,stdList,medianList,aboveZeroList
|
||||
|
||||
|
||||
def getWavelet(wave1Second,waveHalfSecond):
|
||||
max_1,mean_1,std_1,median_1,aboveZero_1 = computeWaveletFeatures(wave1Second)
|
||||
max_H,mean_H,std_H,median_H,aboveZero_H = computeWaveletFeatures(waveHalfSecond)
|
||||
return max_1,mean_1,std_1,median_1,aboveZero_1,max_H,mean_H,std_H,median_H,aboveZero_H
|
||||
|
||||
|
||||
def getFeatures(data,w1,wH):
|
||||
# Get DerivStats
|
||||
amp,maxd,mind,maxabsd,avgabsd,max2d,min2d,maxabs2d,avgabs2d,amp_f,maxd_f,mind_f,maxabsd_f,avgabsd_f,max2d_f,min2d_f,maxabs2d_f,avgabs2d_f = getStats(data)
|
||||
statFeat = np.hstack([amp,maxd,mind,maxabsd,avgabsd,max2d,min2d,maxabs2d,avgabs2d,amp_f,maxd_f,mind_f,maxabsd_f,avgabsd_f,max2d_f,min2d_f,maxabs2d_f,avgabs2d_f])
|
||||
|
||||
# Get Wavelet Features
|
||||
max_1,mean_1,std_1,median_1,aboveZero_1,max_H,mean_H,std_H,median_H,aboveZero_H = getWavelet(w1,wH)
|
||||
waveletFeat = np.hstack([max_1,mean_1,std_1,median_1,aboveZero_1,max_H,mean_H,std_H,median_H,aboveZero_H])
|
||||
|
||||
all_feat = np.hstack([statFeat,waveletFeat])
|
||||
|
||||
if np.Inf in all_feat:
|
||||
print("Inf")
|
||||
|
||||
if np.NaN in all_feat:
|
||||
print("NaN")
|
||||
|
||||
return list(all_feat)
|
||||
|
||||
|
||||
def createFeatureDF(data):
|
||||
'''
|
||||
INPUTS:
|
||||
filepath: string, path to input file
|
||||
OUTPUTS:
|
||||
features: DataFrame, index is a list of timestamps for each 5 seconds, contains all the features
|
||||
data: DataFrame, index is a list of timestamps at 8Hz, columns include AccelZ, AccelY, AccelX, Temp, EDA, filtered_eda
|
||||
'''
|
||||
# Load data from q sensor
|
||||
wave1sec,waveHalf = getWaveletData(data)
|
||||
|
||||
# Create 5 second timestamp list
|
||||
timestampList = data.index.tolist()[0::40]
|
||||
|
||||
# feature names for DataFrame columns
|
||||
allFeatureNames = ['raw_amp','raw_maxd','raw_mind','raw_maxabsd','raw_avgabsd','raw_max2d','raw_min2d','raw_maxabs2d','raw_avgabs2d','filt_amp','filt_maxd','filt_mind',
|
||||
'filt_maxabsd','filt_avgabsd','filt_max2d','filt_min2d','filt_maxabs2d','filt_avgabs2d','max_1s_1','max_1s_2','max_1s_3','mean_1s_1','mean_1s_2','mean_1s_3',
|
||||
'std_1s_1','std_1s_2','std_1s_3','median_1s_1','median_1s_2','median_1s_3','aboveZero_1s_1','aboveZero_1s_2','aboveZero_1s_3','max_Hs_1','max_Hs_2','mean_Hs_1',
|
||||
'mean_Hs_2','std_Hs_1','std_Hs_2','median_Hs_1','median_Hs_2','aboveZero_Hs_1','aboveZero_Hs_2']
|
||||
|
||||
# Initialize Feature Data Frame
|
||||
features = pd.DataFrame(np.zeros((len(timestampList),len(allFeatureNames))),columns=allFeatureNames,index=timestampList)
|
||||
|
||||
# Compute features for each 5 second epoch
|
||||
for i in range(len(features)-1):
|
||||
start = features.index[i]
|
||||
end = features.index[i+1]
|
||||
this_data = data[start:end]
|
||||
this_w1 = wave1sec[start:end]
|
||||
this_w2 = waveHalf[start:end]
|
||||
features.iloc[i] = getFeatures(this_data,this_w1,this_w2)
|
||||
return features
|
||||
|
||||
|
||||
def classifyEpochs(features,featureNames,classifierName):
|
||||
'''
|
||||
This function takes the full features DataFrame and classifies each 5 second epoch into artifact, questionable, or clean
|
||||
|
||||
INPUTS:
|
||||
features: DataFrame, index is a list of timestamps for each 5 seconds, contains all the features
|
||||
featureNames: list of Strings, subset of feature names needed for classification
|
||||
classifierName: string, type of SVM (binary or multiclass)
|
||||
|
||||
OUTPUTS:
|
||||
labels: Series, index is a list of timestamps for each 5 seconds, values of -1, 0, or 1 for artifact, questionable, or clean
|
||||
'''
|
||||
# Only get relevant features
|
||||
features = features[featureNames]
|
||||
X = features[featureNames].values
|
||||
|
||||
# Classify each 5 second epoch and put into DataFrame
|
||||
if 'Binary' in classifierName:
|
||||
featuresLabels = predict_binary_classifier(X)
|
||||
elif 'Multi' in classifierName:
|
||||
featuresLabels = predict_multiclass_classifier(X)
|
||||
|
||||
return featuresLabels
|
||||
|
||||
|
||||
def getSVMFeatures(key):
|
||||
'''
|
||||
This returns the list of relevant features
|
||||
|
||||
INPUT:
|
||||
key: string, either "Binary" or "Multiclass"
|
||||
|
||||
OUTPUT:
|
||||
featureList: list of Strings, subset of feature names needed for classification
|
||||
'''
|
||||
if key == "Binary":
|
||||
return ['raw_amp','raw_maxabsd','raw_max2d','raw_avgabs2d','filt_amp','filt_min2d','filt_maxabs2d','max_1s_1',
|
||||
'mean_1s_1','std_1s_1','std_1s_2','std_1s_3','median_1s_3']
|
||||
elif key == "Multiclass":
|
||||
return ['filt_maxabs2d','filt_min2d','std_1s_1','raw_max2d','raw_amp','max_1s_1','raw_maxabs2d','raw_avgabs2d',
|
||||
'filt_max2d','filt_amp']
|
||||
else:
|
||||
print('Error!! Invalid key, choose "Binary" or "Multiclass"\n\n')
|
||||
return
|
||||
|
||||
|
||||
def classify(classifierList):
|
||||
'''
|
||||
This function wraps other functions in order to load, classify, and return the label for each 5 second epoch of Q sensor data.
|
||||
|
||||
INPUT:
|
||||
classifierList: list of strings, either "Binary" or "Multiclass"
|
||||
OUTPUT:
|
||||
featureLabels: Series, index is a list of timestamps for each 5 seconds, values of -1, 0, or 1 for artifact, questionable, or clean
|
||||
data: DataFrame, only output if fullFeatureOutput=1, index is a list of timestamps at 8Hz, columns include AccelZ, AccelY, AccelX, Temp, EDA, filtered_eda
|
||||
'''
|
||||
# Constants
|
||||
oneHour = 8*60*60 # 8(samp/s)*60(s/min)*60(min/hour) = samp/hour
|
||||
fiveSec = 8*5
|
||||
|
||||
# Load data
|
||||
data, _ = getInputLoadFile()
|
||||
|
||||
# Get pickle List and featureNames list
|
||||
featureNameList = [[]]*len(classifierList)
|
||||
for i in range(len(classifierList)):
|
||||
featureNames = getSVMFeatures(classifierList[i])
|
||||
featureNameList[i]=featureNames
|
||||
|
||||
# Get the number of data points, hours, and labels
|
||||
rows = len(data)
|
||||
num_labels = int(np.ceil(float(rows)/fiveSec))
|
||||
hours = int(np.ceil(float(rows)/oneHour))
|
||||
|
||||
# Initialize labels array
|
||||
labels = -1*np.ones((num_labels,len(classifierList)))
|
||||
|
||||
for h in range(hours):
|
||||
# Get a data slice that is at most 1 hour long
|
||||
start = h*oneHour
|
||||
end = min((h+1)*oneHour,rows)
|
||||
cur_data = data[start:end]
|
||||
|
||||
features = createFeatureDF(cur_data)
|
||||
|
||||
for i in range(len(classifierList)):
|
||||
# Get correct feature names for classifier
|
||||
classifierName = classifierList[i]
|
||||
featureNames = featureNameList[i]
|
||||
|
||||
# Label each 5 second epoch
|
||||
temp_labels = classifyEpochs(features, featureNames, classifierName)
|
||||
labels[(h*12*60):(h*12*60+temp_labels.shape[0]),i] = temp_labels
|
||||
|
||||
return labels,data
|
||||
|
||||
|
||||
def plotData(data,labels,classifierList,filteredPlot=0,secondsPlot=0):
|
||||
'''
|
||||
This function plots the Q sensor EDA data with shading for artifact (red) and questionable data (grey).
|
||||
Note that questionable data will only appear if you choose a multiclass classifier
|
||||
|
||||
INPUT:
|
||||
data: DataFrame, indexed by timestamps at 8Hz, columns include EDA and filtered_eda
|
||||
labels: array, each row is a 5 second period and each column is a different classifier
|
||||
filteredPlot: binary, 1 for including filtered EDA in plot, 0 for only raw EDA on the plot, defaults to 0
|
||||
secondsPlot: binary, 1 for x-axis in seconds, 0 for x-axis in minutes, defaults to 0
|
||||
|
||||
OUTPUT:
|
||||
[plot] the resulting plot has N subplots (where N is the length of classifierList) that have linked x and y axes
|
||||
and have shading for artifact (red) and questionable data (grey)
|
||||
|
||||
'''
|
||||
|
||||
# Initialize x axis
|
||||
if secondsPlot:
|
||||
scale = 1.0
|
||||
else:
|
||||
scale = 60.0
|
||||
time_m = np.arange(0,len(data))/(8.0*scale)
|
||||
|
||||
# Initialize Figure
|
||||
plt.figure(figsize=(10,5))
|
||||
|
||||
# For each classifier, label each epoch and plot
|
||||
for k in range(np.shape(labels)[1]):
|
||||
key = classifierList[k]
|
||||
|
||||
# Initialize Subplots
|
||||
if k==0:
|
||||
ax = plt.subplot(len(classifierList),1,k+1)
|
||||
else:
|
||||
ax = plt.subplot(len(classifierList),1,k+1,sharex=ax,sharey=ax)
|
||||
|
||||
# Plot EDA
|
||||
ax.plot(time_m,data['EDA'])
|
||||
|
||||
# For each epoch, shade if necessary
|
||||
for i in range(0,len(labels)-1):
|
||||
if labels[i,k]==-1:
|
||||
# artifact
|
||||
start = i*40/(8.0*scale)
|
||||
end = start+5.0/scale
|
||||
ax.axvspan(start, end, facecolor='red', alpha=0.7, edgecolor ='none')
|
||||
elif labels[i,k]==0:
|
||||
# Questionable
|
||||
start = i*40/(8.0*scale)
|
||||
end = start+5.0/scale
|
||||
ax.axvspan(start, end, facecolor='.5', alpha=0.5,edgecolor ='none')
|
||||
|
||||
# Plot filtered data if requested
|
||||
if filteredPlot:
|
||||
ax.plot(time_m-.625/scale,data['filtered_eda'], c='g')
|
||||
plt.legend(['Raw SC','Filtered SC'],loc=0)
|
||||
|
||||
# Label and Title each subplot
|
||||
plt.ylabel('$\mu$S')
|
||||
plt.title(key)
|
||||
|
||||
# Only include x axis label on final subplot
|
||||
if secondsPlot:
|
||||
plt.xlabel('Time (s)')
|
||||
else:
|
||||
plt.xlabel('Time (min)')
|
||||
|
||||
# Display the plot
|
||||
plt.subplots_adjust(hspace=.3)
|
||||
plt.show()
|
||||
return
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
numClassifiers = int(get_user_input('Would you like 1 classifier (Binary or Multiclass) or both (enter 1 or 2): '))
|
||||
|
||||
# Create list of classifiers
|
||||
if numClassifiers==1:
|
||||
temp_clf = int(get_user_input("Select a classifier:\n1: Binary\n2: Multiclass\n:"))
|
||||
while temp_clf != 1 and temp_clf !=2:
|
||||
temp_clf = get_user_input("Something went wrong. Enter the number 1 or 2.\n Select a classifier:\n1: Binary\n2: Multiclass):")
|
||||
if temp_clf == 1:
|
||||
print('Binary Classifier selected')
|
||||
classifierList = ['Binary']
|
||||
elif temp_clf == 2:
|
||||
print('Multiclass Classifier selected')
|
||||
classifierList = ['Multiclass']
|
||||
else:
|
||||
classifierList = ['Binary', 'Multiclass']
|
||||
|
||||
# Classify the data
|
||||
labels, data = classify(classifierList)
|
||||
|
||||
# Plotting the data
|
||||
plotDataInput = get_user_input('Do you want to plot the labels? (y/n): ')
|
||||
|
||||
if plotDataInput=='y':
|
||||
# Include filter plot?
|
||||
filteredPlot = get_user_input('Would you like to include filtered data in your plot? (y/n): ')
|
||||
if filteredPlot=='y':
|
||||
filteredPlot=1
|
||||
else:
|
||||
filteredPlot=0
|
||||
|
||||
# X axis in seconds?
|
||||
secondsPlot = get_user_input('Would you like the x-axis to be in seconds or minutes? (sec/min): ')
|
||||
if secondsPlot=='sec':
|
||||
secondsPlot=1
|
||||
else:
|
||||
secondsPlot=0
|
||||
|
||||
# Plot Data
|
||||
plotData(data,labels,classifierList,filteredPlot,secondsPlot)
|
||||
|
||||
print("Remember! Red is for epochs with artifact, grey is for epochs that are questionable, and no shading is for clean epochs")
|
||||
|
||||
|
||||
# Saving the data
|
||||
saveDataInput = get_user_input('Do you want to save the labels? (y/n): ')
|
||||
|
||||
if saveDataInput=='y':
|
||||
outputPath = get_user_input('Output directory: ')
|
||||
outputLabelFilename= get_user_input('Output filename: ')
|
||||
|
||||
# Save labels
|
||||
fullOutputPath = os.path.join(outputPath,outputLabelFilename)
|
||||
if fullOutputPath[-4:] != '.csv':
|
||||
fullOutputPath = fullOutputPath+'.csv'
|
||||
|
||||
featureLabels = pd.DataFrame(labels, index=pd.date_range(start=data.index[0], periods=len(labels), freq='5s'),
|
||||
columns=classifierList)
|
||||
|
||||
featureLabels.reset_index(inplace=True)
|
||||
featureLabels.rename(columns={'index':'StartTime'}, inplace=True)
|
||||
featureLabels['EndTime'] = featureLabels['StartTime']+datetime.timedelta(seconds=5)
|
||||
featureLabels.index.name = 'EpochNum'
|
||||
|
||||
cols = ['StartTime', 'EndTime']
|
||||
cols.extend(classifierList)
|
||||
|
||||
featureLabels = featureLabels[cols]
|
||||
featureLabels.rename(columns={'Binary': 'BinaryLabels', 'Multiclass': 'MulticlassLabels'},
|
||||
inplace=True)
|
||||
|
||||
featureLabels.to_csv(fullOutputPath)
|
||||
|
||||
print("Labels saved to " + fullOutputPath)
|
||||
print("Remember! The first column is timestamps and the second column is the labels (-1 for artifact, 0 for questionable, 1 for clean)")
|
||||
|
||||
print('--------------------------------')
|
||||
print("Please also cite this project:")
|
||||
print("Taylor, S., Jaques, N., Chen, W., Fedor, S., Sano, A., & Picard, R. Automatic identification of artifacts in electrodermal activity data. In Engineering in Medicine and Biology Conference. 2015")
|
||||
print('--------------------------------')
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,250 +0,0 @@
|
|||
import pandas as pd
|
||||
import numpy as np
|
||||
import os
|
||||
import matplotlib.pyplot as plt
|
||||
import pprint
|
||||
|
||||
from .load_files import getInputLoadFile, get_user_input, getOutputPath
|
||||
|
||||
SAMPLE_RATE = 8
|
||||
|
||||
def findPeaks(data, offset, start_WT, end_WT, thres=0, sampleRate=SAMPLE_RATE):
|
||||
'''
|
||||
This function finds the peaks of an EDA signal and returns basic properties.
|
||||
Also, peak_end is assumed to be no later than the start of the next peak. (Is this okay??)
|
||||
|
||||
********* INPUTS **********
|
||||
data: DataFrame with EDA as one of the columns and indexed by a datetimeIndex
|
||||
offset: the number of rising samples and falling samples after a peak needed to be counted as a peak
|
||||
start_WT: maximum number of seconds before the apex of a peak that is the "start" of the peak
|
||||
end_WT: maximum number of seconds after the apex of a peak that is the "rec.t/2" of the peak, 50% of amp
|
||||
thres: the minimum uS change required to register as a peak, defaults as 0 (i.e. all peaks count)
|
||||
sampleRate: number of samples per second, default=8
|
||||
|
||||
********* OUTPUTS **********
|
||||
peaks: list of binary, 1 if apex of SCR
|
||||
peak_start: list of binary, 1 if start of SCR
|
||||
peak_start_times: list of strings, if this index is the apex of an SCR, it contains datetime of start of peak
|
||||
peak_end: list of binary, 1 if rec.t/2 of SCR
|
||||
peak_end_times: list of strings, if this index is the apex of an SCR, it contains datetime of rec.t/2
|
||||
amplitude: list of floats, value of EDA at apex - value of EDA at start
|
||||
max_deriv: list of floats, max derivative within 1 second of apex of SCR
|
||||
|
||||
'''
|
||||
EDA_deriv = data['filtered_eda'][1:].values - data['filtered_eda'][:-1].values
|
||||
peaks = np.zeros(len(EDA_deriv))
|
||||
peak_sign = np.sign(EDA_deriv)
|
||||
for i in range(int(offset), int(len(EDA_deriv) - offset)):
|
||||
if peak_sign[i] == 1 and peak_sign[i + 1] < 1:
|
||||
peaks[i] = 1
|
||||
for j in range(1, int(offset)):
|
||||
if peak_sign[i - j] < 1 or peak_sign[i + j] > -1:
|
||||
#if peak_sign[i-j]==-1 or peak_sign[i+j]==1:
|
||||
peaks[i] = 0
|
||||
break
|
||||
|
||||
# Finding start of peaks
|
||||
peak_start = np.zeros(len(EDA_deriv))
|
||||
peak_start_times = [''] * len(data)
|
||||
max_deriv = np.zeros(len(data))
|
||||
rise_time = np.zeros(len(data))
|
||||
|
||||
for i in range(0, len(peaks)):
|
||||
if peaks[i] == 1:
|
||||
temp_start = max(0, i - sampleRate)
|
||||
max_deriv[i] = max(EDA_deriv[temp_start:i])
|
||||
start_deriv = .01 * max_deriv[i]
|
||||
|
||||
found = False
|
||||
find_start = i
|
||||
# has to peak within start_WT seconds
|
||||
while found == False and find_start > (i - start_WT * sampleRate):
|
||||
if EDA_deriv[find_start] < start_deriv:
|
||||
found = True
|
||||
peak_start[find_start] = 1
|
||||
peak_start_times[i] = data.index[find_start]
|
||||
rise_time[i] = get_seconds_and_microseconds(data.index[i] - pd.to_datetime(peak_start_times[i]))
|
||||
|
||||
find_start = find_start - 1
|
||||
|
||||
# If we didn't find a start
|
||||
if found == False:
|
||||
peak_start[i - start_WT * sampleRate] = 1
|
||||
peak_start_times[i] = data.index[i - start_WT * sampleRate]
|
||||
rise_time[i] = start_WT
|
||||
|
||||
# Check if amplitude is too small
|
||||
if thres > 0 and (data['EDA'].iloc[i] - data['EDA'][peak_start_times[i]]) < thres:
|
||||
peaks[i] = 0
|
||||
peak_start[i] = 0
|
||||
peak_start_times[i] = ''
|
||||
max_deriv[i] = 0
|
||||
rise_time[i] = 0
|
||||
|
||||
# Finding the end of the peak, amplitude of peak
|
||||
peak_end = np.zeros(len(data))
|
||||
peak_end_times = [''] * len(data)
|
||||
amplitude = np.zeros(len(data))
|
||||
decay_time = np.zeros(len(data))
|
||||
half_rise = [''] * len(data)
|
||||
SCR_width = np.zeros(len(data))
|
||||
|
||||
for i in range(0, len(peaks)):
|
||||
if peaks[i] == 1:
|
||||
peak_amp = data['EDA'].iloc[i]
|
||||
start_amp = data['EDA'][peak_start_times[i]]
|
||||
amplitude[i] = peak_amp - start_amp
|
||||
|
||||
half_amp = amplitude[i] * .5 + start_amp
|
||||
|
||||
found = False
|
||||
find_end = i
|
||||
# has to decay within end_WT seconds
|
||||
while found == False and find_end < (i + end_WT * sampleRate) and find_end < len(peaks):
|
||||
if data['EDA'].iloc[find_end] < half_amp:
|
||||
found = True
|
||||
peak_end[find_end] = 1
|
||||
peak_end_times[i] = data.index[find_end]
|
||||
decay_time[i] = get_seconds_and_microseconds(pd.to_datetime(peak_end_times[i]) - data.index[i])
|
||||
|
||||
# Find width
|
||||
find_rise = i
|
||||
found_rise = False
|
||||
while found_rise == False:
|
||||
if data['EDA'].iloc[find_rise] < half_amp:
|
||||
found_rise = True
|
||||
half_rise[i] = data.index[find_rise]
|
||||
SCR_width[i] = get_seconds_and_microseconds(pd.to_datetime(peak_end_times[i]) - data.index[find_rise])
|
||||
find_rise = find_rise - 1
|
||||
|
||||
elif peak_start[find_end] == 1:
|
||||
found = True
|
||||
peak_end[find_end] = 1
|
||||
peak_end_times[i] = data.index[find_end]
|
||||
find_end = find_end + 1
|
||||
|
||||
# If we didn't find an end
|
||||
if found == False:
|
||||
min_index = np.argmin(data['EDA'].iloc[i:(i + end_WT * sampleRate)].tolist())
|
||||
peak_end[i + min_index] = 1
|
||||
peak_end_times[i] = data.index[i + min_index]
|
||||
|
||||
peaks = np.concatenate((peaks, np.array([0])))
|
||||
peak_start = np.concatenate((peak_start, np.array([0])))
|
||||
max_deriv = max_deriv * sampleRate # now in change in amplitude over change in time form (uS/second)
|
||||
|
||||
return peaks, peak_start, peak_start_times, peak_end, peak_end_times, amplitude, max_deriv, rise_time, decay_time, SCR_width, half_rise
|
||||
|
||||
def get_seconds_and_microseconds(pandas_time):
|
||||
return pandas_time.seconds + pandas_time.microseconds * 1e-6
|
||||
|
||||
def calcPeakFeatures(data,offset,thresh,start_WT,end_WT, sampleRate):
|
||||
returnedPeakData = findPeaks(data, offset*sampleRate, start_WT, end_WT, thresh, sampleRate)
|
||||
data['peaks'] = returnedPeakData[0]
|
||||
data['peak_start'] = returnedPeakData[1]
|
||||
data['peak_end'] = returnedPeakData[3]
|
||||
|
||||
data['peak_start_times'] = returnedPeakData[2]
|
||||
data['peak_end_times'] = returnedPeakData[4]
|
||||
data['half_rise'] = returnedPeakData[10]
|
||||
# Note: If an SCR doesn't decrease to 50% of amplitude, then the peak_end = min(the next peak's start, 15 seconds after peak)
|
||||
data['amp'] = returnedPeakData[5]
|
||||
data['max_deriv'] = returnedPeakData[6]
|
||||
data['rise_time'] = returnedPeakData[7]
|
||||
data['decay_time'] = returnedPeakData[8]
|
||||
data['SCR_width'] = returnedPeakData[9]
|
||||
|
||||
# To keep all filtered data remove this line
|
||||
# featureData = data[data.peaks==1][['EDA','rise_time','max_deriv','amp','decay_time','SCR_width']]
|
||||
|
||||
# Replace 0s with NaN, this is where the 50% of the peak was not found, too close to the next peak
|
||||
# featureData[['SCR_width','decay_time']]=featureData[['SCR_width','decay_time']].replace(0, np.nan)
|
||||
# featureData['AUC']=featureData['amp']*featureData['SCR_width']
|
||||
# if outfile is not None:
|
||||
# featureData.to_csv(outfile)
|
||||
|
||||
return data
|
||||
|
||||
# draws a graph of the data with the peaks marked on it
|
||||
# assumes that 'data' dataframe already contains the 'peaks' column
|
||||
def plotPeaks(data, x_seconds, sampleRate = SAMPLE_RATE):
|
||||
if x_seconds:
|
||||
time_m = np.arange(0,len(data))/float(sampleRate)
|
||||
else:
|
||||
time_m = np.arange(0,len(data))/(sampleRate*60.)
|
||||
|
||||
data_min = min(data['EDA'])
|
||||
data_max = max(data['EDA'])
|
||||
|
||||
#Plot the data with the Peaks marked
|
||||
plt.figure(1,figsize=(20, 5))
|
||||
peak_height = data_max * 1.15
|
||||
data['peaks_plot'] = data['peaks'] * peak_height
|
||||
plt.plot(time_m,data['peaks_plot'],'#4DBD33')
|
||||
#plt.plot(time_m,data['EDA'])
|
||||
plt.plot(time_m,data['filtered_eda'])
|
||||
plt.xlim([0,time_m[-1]])
|
||||
y_min = min(0, data_min) - (data_max - data_min) * 0.1
|
||||
plt.ylim([min(y_min, data_min),peak_height])
|
||||
plt.title('EDA with Peaks marked')
|
||||
plt.ylabel('$\mu$S')
|
||||
if x_seconds:
|
||||
plt.xlabel('Time (s)')
|
||||
else:
|
||||
plt.xlabel('Time (min)')
|
||||
|
||||
plt.show()
|
||||
|
||||
def chooseValueOrDefault(str_input, default):
|
||||
if str_input == "":
|
||||
return default
|
||||
else:
|
||||
return float(str_input)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
data, filepath_confirm = getInputLoadFile()
|
||||
|
||||
fullOutputPath = getOutputPath()
|
||||
|
||||
print("")
|
||||
print("Please choose settings for the peak detection algorithm. For default values press return")
|
||||
thresh_str = get_user_input('\tMinimum peak amplitude (default = .02):')
|
||||
thresh = chooseValueOrDefault(thresh_str,.02)
|
||||
offset_str = get_user_input('\tOffset (default = 1): ')
|
||||
offset = chooseValueOrDefault(offset_str,1)
|
||||
start_WT_str = get_user_input('\tMax rise time (s) (default = 4): ')
|
||||
start_WT = chooseValueOrDefault(start_WT_str,4)
|
||||
end_WT_str = get_user_input('\tMax decay time (s) (default = 4): ')
|
||||
end_WT = chooseValueOrDefault(end_WT_str,4)
|
||||
|
||||
settings_dict = {'threshold':thresh,
|
||||
'offset':offset,
|
||||
'rise time':start_WT,
|
||||
'decay time':end_WT}
|
||||
|
||||
print("")
|
||||
print("Okay, finding peaks in file "+ filepath_confirm + " using the following parameters")
|
||||
pprint.pprint(settings_dict)
|
||||
peakData = calcPeakFeatures(data,fullOutputPath,offset,thresh,start_WT,end_WT)
|
||||
print("Features computed and saved to " + fullOutputPath)
|
||||
|
||||
# Plotting the data
|
||||
print("")
|
||||
plot_ans = get_user_input("Do you want to plot the detected peaks? (y/n): ")
|
||||
if 'y' in plot_ans:
|
||||
secs_ans = get_user_input("Would you like the x-axis to be in seconds or minutes? (sec/min): ")
|
||||
if 'sec' in secs_ans:
|
||||
x_seconds=True
|
||||
else:
|
||||
x_seconds=False
|
||||
plotPeaks(peakData,x_seconds)
|
||||
else:
|
||||
print("\tOkay, script will not produce a plot")
|
||||
|
||||
print("")
|
||||
print('--------------------------------')
|
||||
print("Please also cite this project:")
|
||||
print("Taylor, S., Jaques, N., Chen, W., Fedor, S., Sano, A., & Picard, R. Automatic identification of artifacts in electrodermal activity data. In Engineering in Medicine and Biology Conference. 2015")
|
||||
print('--------------------------------')
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
EDA-Explorer is an open-source project under the MIT License
|
||||
|
||||
Copyright (c) 2016 Sara Taylor and Natasha Jaques
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
|
@ -1,39 +0,0 @@
|
|||
eda-explorer
|
||||
============
|
||||
|
||||
Scripts to detect artifacts and in electrodermal activity (EDA) data. Note that these scripts are written for Python 2.7 and Python 3.7
|
||||
|
||||
|
||||
Version 1.0
|
||||
|
||||
Please also cite this project:
|
||||
Taylor, S., Jaques, N., Chen, W., Fedor, S., Sano, A., & Picard, R. Automatic identification of artifacts in electrodermal activity data. In Engineering in Medicine and Biology Conference. 2015.
|
||||
|
||||
|
||||
Required python packages can be found in requirements.txt
|
||||
|
||||
To run artifact detection from the command line:
|
||||
==
|
||||
python EDA-Artifact-Detection-Script.py
|
||||
|
||||
Currently there are only 2 classifiers to choose from: Binary or Multiclass
|
||||
|
||||
To run peak detection:
|
||||
==
|
||||
python EDA-Peak-Detection-Script.py
|
||||
|
||||
Descriptions of the algorithm settings can be found at http://eda-explorer.media.mit.edu/info/
|
||||
|
||||
To run accelerometer feature extraction:
|
||||
==
|
||||
python AccelerometerFeatureExtractionScript.py
|
||||
|
||||
This file works slightly differently than the others in that it gives summary information over periods of time.
|
||||
|
||||
Notes:
|
||||
===
|
||||
|
||||
1. Currently, these files are written with the assumption that the sample rate is an integer power of 2.
|
||||
|
||||
2. Please visit [eda-explorer.media.mit.edu](https://eda-explorer.media.mit.edu)
|
||||
to use the web-based version
|
|
@ -1 +0,0 @@
|
|||
from .load_files import *
|
|
@ -1,90 +0,0 @@
|
|||
from sklearn.svm import SVC
|
||||
import pickle
|
||||
|
||||
|
||||
#docs: http://scikit-learn.org/stable/modules/generated/sklearn.svm.SVC.html#sklearn.svm.SVC
|
||||
class SVM:
|
||||
def __init__(self, C=1.0,beta=0.0, kernel='linear', poly_degree=3, max_iter=-1, tol=0.001):
|
||||
#possible kernels: linear, 'poly', 'rbf', 'sigmoid', 'precomputed' or a callable
|
||||
|
||||
#data features
|
||||
self.n_features = None
|
||||
self.train_X = []
|
||||
self.train_Y = []
|
||||
self.val_X = []
|
||||
self.val_Y = []
|
||||
self.test_X = []
|
||||
self.test_Y = []
|
||||
|
||||
#classifier features
|
||||
self.C = C
|
||||
self.beta = beta
|
||||
self.kernel = kernel
|
||||
self.poly_degree = poly_degree
|
||||
self.max_iter = max_iter
|
||||
self.tolerance = tol
|
||||
|
||||
self.classifier = None
|
||||
|
||||
|
||||
#SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0, degree=3,
|
||||
# gamma=0.0, kernel='rbf', max_iter=-1, probability=False,
|
||||
# random_state=None, shrinking=True, tol=0.001, verbose=False)
|
||||
|
||||
def setTrainData(self, X, Y):
|
||||
self.train_X = X
|
||||
self.train_Y = Y
|
||||
|
||||
self.n_features = self.train_X.shape[1]
|
||||
|
||||
def setTestData(self, X, Y):
|
||||
self.test_X = X
|
||||
self.test_Y = Y
|
||||
|
||||
def setC(self, c):
|
||||
self.C = c
|
||||
|
||||
def setBeta(self, beta):
|
||||
if beta is None:
|
||||
self.beta = 0.0
|
||||
else:
|
||||
self.beta=beta
|
||||
|
||||
def setKernel(self, kernel, poly_degree=3):
|
||||
self.kernel = kernel
|
||||
self.poly_degree = poly_degree
|
||||
|
||||
def setValData(self, X, Y):
|
||||
self.val_X = X
|
||||
self.val_Y = Y
|
||||
|
||||
def train(self):
|
||||
self.classifier = SVC(C=self.C, kernel=self.kernel, gamma=self.beta, degree=self.poly_degree, max_iter=self.max_iter,
|
||||
tol=self.tolerance)
|
||||
self.classifier.fit(self.train_X, self.train_Y)
|
||||
|
||||
def predict(self, X):
|
||||
return self.classifier.predict(X)
|
||||
|
||||
def getScore(self, X, Y):
|
||||
#returns accuracy
|
||||
return self.classifier.score(X, Y)
|
||||
|
||||
def getNumSupportVectors(self):
|
||||
return self.classifier.n_support_
|
||||
|
||||
def getHingeLoss(self,X,Y):
|
||||
preds = self.predict(X)
|
||||
hinge_inner_func = 1.0 - preds*Y
|
||||
hinge_inner_func = [max(0,x) for x in hinge_inner_func]
|
||||
return sum(hinge_inner_func)
|
||||
|
||||
def saveClassifierToFile(self, filepath):
|
||||
s = pickle.dumps(self.classifier)
|
||||
f = open(filepath, 'wb')
|
||||
f.write(s)
|
||||
|
||||
def loadClassifierFromFile(self, filepath):
|
||||
f2 = open(filepath, 'rb')
|
||||
s2 = f2.read()
|
||||
self.classifier = pickle.loads(s2)
|
|
@ -1,264 +0,0 @@
|
|||
import pandas as pd
|
||||
import scipy.signal as scisig
|
||||
import os
|
||||
import numpy as np
|
||||
|
||||
|
||||
def get_user_input(prompt):
|
||||
try:
|
||||
return raw_input(prompt)
|
||||
except NameError:
|
||||
return input(prompt)
|
||||
|
||||
|
||||
def getInputLoadFile():
|
||||
'''Asks user for type of file and file path. Loads corresponding data.
|
||||
|
||||
OUTPUT:
|
||||
data: DataFrame, index is a list of timestamps at 8Hz, columns include
|
||||
AccelZ, AccelY, AccelX, Temp, EDA, filtered_eda
|
||||
'''
|
||||
print("Please enter information about your EDA file... ")
|
||||
dataType = get_user_input("\tData Type (e4, q, shimmer, or misc): ")
|
||||
if dataType=='q':
|
||||
filepath = get_user_input("\tFile path: ")
|
||||
filepath_confirm = filepath
|
||||
data = loadData_Qsensor(filepath)
|
||||
elif dataType=='e4':
|
||||
filepath = get_user_input("\tPath to E4 directory: ")
|
||||
filepath_confirm = os.path.join(filepath,"EDA.csv")
|
||||
data = loadData_E4(filepath)
|
||||
elif dataType=='shimmer':
|
||||
filepath = get_user_input("\tFile path: ")
|
||||
filepath_confirm = filepath
|
||||
data = loadData_shimmer(filepath)
|
||||
elif dataType=="misc":
|
||||
filepath = get_user_input("\tFile path: ")
|
||||
filepath_confirm = filepath
|
||||
data = loadData_misc(filepath)
|
||||
else:
|
||||
print("Error: not a valid file choice")
|
||||
|
||||
return data, filepath_confirm
|
||||
|
||||
def getOutputPath():
|
||||
print("")
|
||||
print("Where would you like to save the computed output file?")
|
||||
outfile = get_user_input('\tFile name: ')
|
||||
outputPath = get_user_input('\tFile directory (./ for this directory): ')
|
||||
fullOutputPath = os.path.join(outputPath,outfile)
|
||||
if fullOutputPath[-4:] != '.csv':
|
||||
fullOutputPath = fullOutputPath+'.csv'
|
||||
return fullOutputPath
|
||||
|
||||
def loadData_Qsensor(filepath):
|
||||
'''
|
||||
This function loads the Q sensor data, uses a lowpass butterworth filter on the EDA signal
|
||||
Note: currently assumes sampling rate of 8hz, 16hz, 32hz; if sampling rate is 16hz or 32hz the signal is downsampled
|
||||
|
||||
INPUT:
|
||||
filepath: string, path to input file
|
||||
|
||||
OUTPUT:
|
||||
data: DataFrame, index is a list of timestamps at 8Hz, columns include AccelZ, AccelY, AccelX, Temp, EDA, filtered_eda
|
||||
'''
|
||||
# Get header info
|
||||
try:
|
||||
header_info = pd.io.parsers.read_csv(filepath, nrows=5)
|
||||
except IOError:
|
||||
print("Error!! Couldn't load file, make sure the filepath is correct and you are using a csv from the q sensor software\n\n")
|
||||
return
|
||||
|
||||
# Get sample rate
|
||||
sampleRate = int((header_info.iloc[3,0]).split(":")[1].strip())
|
||||
|
||||
# Get the raw data
|
||||
data = pd.io.parsers.read_csv(filepath, skiprows=7)
|
||||
data = data.reset_index()
|
||||
|
||||
# Reset the index to be a time and reset the column headers
|
||||
data.columns = ['AccelZ','AccelY','AccelX','Battery','Temp','EDA']
|
||||
|
||||
# Get Start Time
|
||||
startTime = pd.to_datetime(header_info.iloc[4,0][12:-10])
|
||||
|
||||
# Make sure data has a sample rate of 8Hz
|
||||
data = interpolateDataTo8Hz(data,sampleRate,startTime)
|
||||
|
||||
# Remove Battery Column
|
||||
data = data[['AccelZ','AccelY','AccelX','Temp','EDA']]
|
||||
|
||||
# Get the filtered data using a low-pass butterworth filter (cutoff:1hz, fs:8hz, order:6)
|
||||
data['filtered_eda'] = butter_lowpass_filter(data['EDA'], 1.0, 8, 6)
|
||||
|
||||
return data
|
||||
|
||||
def _loadSingleFile_E4(filepath,list_of_columns, expected_sample_rate,freq):
|
||||
# Load data
|
||||
data = pd.read_csv(filepath)
|
||||
|
||||
# Get the startTime and sample rate
|
||||
startTime = pd.to_datetime(float(data.columns.values[0]),unit="s")
|
||||
sampleRate = float(data.iloc[0][0])
|
||||
data = data[data.index!=0]
|
||||
data.index = data.index-1
|
||||
|
||||
# Reset the data frame assuming expected_sample_rate
|
||||
data.columns = list_of_columns
|
||||
if sampleRate != expected_sample_rate:
|
||||
print('ERROR, NOT SAMPLED AT {0}HZ. PROBLEMS WILL OCCUR\n'.format(expected_sample_rate))
|
||||
|
||||
# Make sure data has a sample rate of 8Hz
|
||||
data = interpolateDataTo8Hz(data,sampleRate,startTime)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def loadData_E4(filepath):
|
||||
# Load EDA data
|
||||
eda_data = _loadSingleFile_E4(os.path.join(filepath,'EDA.csv'),["EDA"],4,"250L")
|
||||
# Get the filtered data using a low-pass butterworth filter (cutoff:1hz, fs:8hz, order:6)
|
||||
eda_data['filtered_eda'] = butter_lowpass_filter(eda_data['EDA'], 1.0, 8, 6)
|
||||
|
||||
# Load ACC data
|
||||
acc_data = _loadSingleFile_E4(os.path.join(filepath,'ACC.csv'),["AccelX","AccelY","AccelZ"],32,"31250U")
|
||||
# Scale the accelometer to +-2g
|
||||
acc_data[["AccelX","AccelY","AccelZ"]] = acc_data[["AccelX","AccelY","AccelZ"]]/64.0
|
||||
|
||||
# Load Temperature data
|
||||
temperature_data = _loadSingleFile_E4(os.path.join(filepath,'TEMP.csv'),["Temp"],4,"250L")
|
||||
|
||||
data = eda_data.join(acc_data, how='outer')
|
||||
data = data.join(temperature_data, how='outer')
|
||||
|
||||
# E4 sometimes records different length files - adjust as necessary
|
||||
min_length = min(len(acc_data), len(eda_data), len(temperature_data))
|
||||
|
||||
return data[:min_length]
|
||||
|
||||
def loadData_shimmer(filepath):
|
||||
data = pd.read_csv(filepath, sep='\t', skiprows=(0,1))
|
||||
|
||||
orig_cols = data.columns
|
||||
rename_cols = {}
|
||||
|
||||
for search, new_col in [['Timestamp','Timestamp'],
|
||||
['Accel_LN_X', 'AccelX'], ['Accel_LN_Y', 'AccelY'], ['Accel_LN_Z', 'AccelZ'],
|
||||
['Skin_Conductance', 'EDA']]:
|
||||
orig = [c for c in orig_cols if search in c]
|
||||
if len(orig) == 0:
|
||||
continue
|
||||
rename_cols[orig[0]] = new_col
|
||||
|
||||
data.rename(columns=rename_cols, inplace=True)
|
||||
|
||||
# TODO: Assuming no temperature is recorded
|
||||
data['Temp'] = 0
|
||||
|
||||
# Drop the units row and unnecessary columns
|
||||
data = data[data['Timestamp'] != 'ms']
|
||||
data.index = pd.to_datetime(data['Timestamp'], unit='ms')
|
||||
data = data[['AccelZ', 'AccelY', 'AccelX', 'Temp', 'EDA']]
|
||||
|
||||
for c in ['AccelZ', 'AccelY', 'AccelX', 'Temp', 'EDA']:
|
||||
data[c] = pd.to_numeric(data[c])
|
||||
|
||||
# Convert to 8Hz
|
||||
data = data.resample("125L").mean()
|
||||
data.interpolate(inplace=True)
|
||||
|
||||
# Get the filtered data using a low-pass butterworth filter (cutoff:1hz, fs:8hz, order:6)
|
||||
data['filtered_eda'] = butter_lowpass_filter(data['EDA'], 1.0, 8, 6)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def loadData_getColNames(data_columns):
|
||||
print("Here are the data columns of your file: ")
|
||||
print(data_columns)
|
||||
|
||||
# Find the column names for each of the 5 data streams
|
||||
colnames = ['EDA data','Temperature data','Acceleration X','Acceleration Y','Acceleration Z']
|
||||
new_colnames = ['','','','','']
|
||||
|
||||
for i in range(len(new_colnames)):
|
||||
new_colnames[i] = get_user_input("Column name that contains "+colnames[i]+": ")
|
||||
while (new_colnames[i] not in data_columns):
|
||||
print("Column not found. Please try again")
|
||||
print("Here are the data columns of your file: ")
|
||||
print(data_columns)
|
||||
|
||||
new_colnames[i] = get_user_input("Column name that contains "+colnames[i]+": ")
|
||||
|
||||
# Get user input on sample rate
|
||||
sampleRate = get_user_input("Enter sample rate (must be an integer power of 2): ")
|
||||
while (sampleRate.isdigit()==False) or (np.log(int(sampleRate))/np.log(2) != np.floor(np.log(int(sampleRate))/np.log(2))):
|
||||
print("Not an integer power of two")
|
||||
sampleRate = get_user_input("Enter sample rate (must be a integer power of 2): ")
|
||||
sampleRate = int(sampleRate)
|
||||
|
||||
# Get user input on start time
|
||||
startTime = pd.to_datetime(get_user_input("Enter a start time (format: YYYY-MM-DD HH:MM:SS): "))
|
||||
while type(startTime)==str:
|
||||
print("Not a valid date/time")
|
||||
startTime = pd.to_datetime(get_user_input("Enter a start time (format: YYYY-MM-DD HH:MM:SS): "))
|
||||
|
||||
|
||||
return sampleRate, startTime, new_colnames
|
||||
|
||||
|
||||
def loadData_misc(filepath):
|
||||
# Load data
|
||||
data = pd.read_csv(filepath)
|
||||
|
||||
# Get the correct colnames
|
||||
sampleRate, startTime, new_colnames = loadData_getColNames(data.columns.values)
|
||||
|
||||
data.rename(columns=dict(zip(new_colnames,['EDA','Temp','AccelX','AccelY','AccelZ'])), inplace=True)
|
||||
data = data[['AccelZ','AccelY','AccelX','Temp','EDA']]
|
||||
|
||||
# Make sure data has a sample rate of 8Hz
|
||||
data = interpolateDataTo8Hz(data,sampleRate,startTime)
|
||||
|
||||
# Get the filtered data using a low-pass butterworth filter (cutoff:1hz, fs:8hz, order:6)
|
||||
data['filtered_eda'] = butter_lowpass_filter(data['EDA'], 1.0, 8, 6)
|
||||
|
||||
return data
|
||||
|
||||
def interpolateDataTo8Hz(data,sample_rate,startTime):
|
||||
if sample_rate<8:
|
||||
# Upsample by linear interpolation
|
||||
if sample_rate==2:
|
||||
data.index = pd.date_range(start=startTime, periods=len(data), freq='500L')
|
||||
elif sample_rate==4:
|
||||
data.index = pd.date_range(start=startTime, periods=len(data), freq='250L')
|
||||
data = data.resample("125L").mean()
|
||||
else:
|
||||
if sample_rate>8:
|
||||
# Downsample
|
||||
idx_range = list(range(0,len(data))) # TODO: double check this one
|
||||
data = data.iloc[idx_range[0::int(int(sample_rate)/8)]]
|
||||
# Set the index to be 8Hz
|
||||
data.index = pd.date_range(start=startTime, periods=len(data), freq='125L')
|
||||
|
||||
# Interpolate all empty values
|
||||
data = interpolateEmptyValues(data)
|
||||
return data
|
||||
|
||||
def interpolateEmptyValues(data):
|
||||
cols = data.columns.values
|
||||
for c in cols:
|
||||
data.loc[:, c] = data[c].interpolate()
|
||||
|
||||
return data
|
||||
|
||||
def butter_lowpass(cutoff, fs, order=5):
|
||||
# Filtering Helper functions
|
||||
sos = scisig.butter(order, cutoff, btype='low', analog=False, output='sos', fs=fs)
|
||||
return sos
|
||||
|
||||
def butter_lowpass_filter(data, cutoff, fs, order=5):
|
||||
# Filtering Helper functions
|
||||
sos = butter_lowpass(cutoff, fs, order=order)
|
||||
y = scisig.sosfilt(sos, data)
|
||||
return y
|
|
@ -1,6 +0,0 @@
|
|||
numpy==1.16.2
|
||||
scipy==1.2.1
|
||||
pandas==0.24.1
|
||||
scikit-learn==0.20.3
|
||||
matplotlib>=2.1.2
|
||||
PyWavelets==1.0.2
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,32 +0,0 @@
|
|||
.csv files in this archive are in the following format:
|
||||
The first row is the initial time of the session expressed as unix timestamp in UTC.
|
||||
The second row is the sample rate expressed in Hz.
|
||||
|
||||
TEMP.csv
|
||||
Data from temperature sensor expressed degrees on the Celsius (°C) scale.
|
||||
|
||||
EDA.csv
|
||||
Data from the electrodermal activity sensor expressed as microsiemens (μS).
|
||||
|
||||
BVP.csv
|
||||
Data from photoplethysmograph.
|
||||
|
||||
ACC.csv
|
||||
Data from 3-axis accelerometer sensor. The accelerometer is configured to measure acceleration in the range [-2g, 2g]. Therefore the unit in this file is 1/64g.
|
||||
Data from x, y, and z axis are respectively in first, second, and third column.
|
||||
|
||||
IBI.csv
|
||||
Time between individuals heart beats extracted from the BVP signal.
|
||||
No sample rate is needed for this file.
|
||||
The first column is the time (respect to the initial time) of the detected inter-beat interval expressed in seconds (s).
|
||||
The second column is the duration in seconds (s) of the detected inter-beat interval (i.e., the distance in seconds from the previous beat).
|
||||
|
||||
HR.csv
|
||||
Average heart rate extracted from the BVP signal.The first row is the initial time of the session expressed as unix timestamp in UTC.
|
||||
The second row is the sample rate expressed in Hz.
|
||||
|
||||
|
||||
tags.csv
|
||||
Event mark times.
|
||||
Each row corresponds to a physical button press on the device; the same time as the status LED is first illuminated.
|
||||
The time is expressed as a unix timestamp in UTC and it is synchronized with initial time of the session indicated in the related data files from the corresponding session.
|
|
@ -1,3 +0,0 @@
|
|||
[build-system]
|
||||
requires = ["setuptools", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
|
@ -1,16 +0,0 @@
|
|||
[metadata]
|
||||
name = CalculatingFeatures
|
||||
version = 0.1.0
|
||||
description = 'Library for calculating features'
|
||||
|
||||
[options]
|
||||
packages = CalculatingFeatures
|
||||
install_requires =
|
||||
numpy
|
||||
pandas
|
||||
scipy
|
||||
tqdm
|
||||
matplotlib
|
||||
scikit-learn
|
||||
PeakUtils
|
||||
pywt
|
File diff suppressed because one or more lines are too long
|
@ -1,8 +1,6 @@
|
|||
import pandas as pd
|
||||
from scipy.stats import entropy
|
||||
|
||||
import sys
|
||||
sys.path.insert(1, '/workspaces/rapids/calculatingfeatures')
|
||||
from CalculatingFeatures.helper_functions import convert1DEmpaticaToArray, convertInputInto2d, gsrFeatureNames
|
||||
from CalculatingFeatures.calculate_features import calculateFeatures
|
||||
|
||||
|
|
Loading…
Reference in New Issue