witw/data/get_data.sh

56 lines
1.6 KiB
Bash
Executable File

#!/bin/bash
# ///////////////////////////////////////////////
# Whistling in the Wind
# Data Sonification with Supercollider
# Rob Canning 2023
# ///////////////////////////////////////////////
# Array of weather station MAC addresses
STATIONS=( 'D7:80:BE:2C:63:BD' 'F1:CC:C3:0B:7F:5C' )
# Request API token (JSON) and parse for access_token string with Python3
TOKEN=$(curl --insecure --data \
"username=data-api-user&password=jBD0CD1wGOKoTVtC&grant_type=password&client_id=phenode" \
https://phenode-link.com:8443/realms/grafana/protocol/openid-connect/token \
| python3 -c "import sys, json; print(json.load(sys.stdin)['access_token'])")
echo $TOKEN;
# pull down the data and clean it up with sed and awk
for data in "${STATIONS[@]}"
do
if [ -d $data ] ## if data dir exists then remove it
then echo "removing old data directories" ; rm -r $data
else echo "nothing to remove"
fi
echo "downloading data from device with MAC: $data"
# curl --insecure \
# -X POST https://phenode-link.com:2537/devices/$data/all-data/2023-04-10T16:52:21Z \
# -H \"Authorization: Bearer $TOKEN\" \
# --output $data.zip
unzip $data.zip && mv all-data $data && rm $data.zip && cd $data
# backup unfiltered / raw data
cp sensor_data.csv sensor_data_ORIG.csv
# remove the null data and replace with 0
sed -i 's/^,/0,/; :a;s/,,/,0,/g;ta' sensor_data.csv
# clear the ugly rows from Growing Degree Days
awk -F, 'NR==1{print} NR>1 && $2=="0" {gsub(/[)(\]\[]/,"",$0);print}' sensor_data.csv > tmp && mv tmp sensor_data.csv
cd ../
# sensor_data.csv is the final file to be used by supercollider
# sensor_data_ORIG.csv is the backup original data
done