diff --git a/.gitignore b/.gitignore
index 4638eb4bc..a24874fdb 100644
--- a/.gitignore
+++ b/.gitignore
@@ -25,3 +25,6 @@ bin/__pycache__
package-lock.json
*.pyc
+
+bash-unit-test-temp
+
diff --git a/README.md b/README.md
index 3644bdfed..2da8ca234 100644
--- a/README.md
+++ b/README.md
@@ -87,7 +87,7 @@ API_SECRET="..." NIGHTSCOUT_HOST=localhost:1337 ns-upload-entries 0 then .glucoseType = .enteredBy else . end
+ | if ._type == "Rewind" and "'$rewind_indicates_cartridge_change'" == "true" then .eventType = "Insulin Change" else . end
+ | if ._type == "Prime" and .type == "fixed" and "'$prime_indicates_pump_site_change'" == "true" then .eventType = "Site Change" else . end
+ | if ._type == "Battery" and "'$battery_indicates_battery_change'" == "true" then .eventType = "Pump Battery Change" else . end
| .eventType = if .eventType then .eventType else "Note" end
| if ._type == "AlarmSensor" and .alarm_description then .notes = .alarm_description else . end
| ( if .notes then .notes else "" end ) as $note
- | if ( .eventType == "Note" ) and ( .alarm_description | not ) then .notes = ( [ ._type, "'" $model "'", $note ] | join("") ) else . end
+ | if ( .eventType == "Note" or .eventType == "Insulin Change" or .eventType == "Site Change" or .eventType == "Pump Battery Change" ) and ( .alarm_description | not ) then .notes = ( [ ._type, "'" $model "'", $note ] | join("") ) else . end
]' \
> $OUTPUT
diff --git a/bin/nightscout.sh b/bin/nightscout.sh
index 32f9f6b51..594439159 100755
--- a/bin/nightscout.sh
+++ b/bin/nightscout.sh
@@ -237,7 +237,7 @@ ns)
| openaps use ${ZONE} select --date dateString --current now --gaps - ${FILE} | jq .
;;
latest-entries-time)
- PREVIOUS_TIME=$(ns-get host $NIGHTSCOUT_HOST entries.json 'find[type]=sgv' | jq .[0])
+ PREVIOUS_TIME=$(ns-get host $NIGHTSCOUT_HOST entries.json 'find[type][$eq]=sgv' | jq .[0])
test -z "${PREVIOUS_TIME}" && echo -n 0 || echo $PREVIOUS_TIME | jq .dateString
exit 0
;;
diff --git a/bin/ns-delete-old-devicestatus.sh b/bin/ns-delete-old-devicestatus.sh
new file mode 100755
index 000000000..d7c5c8573
--- /dev/null
+++ b/bin/ns-delete-old-devicestatus.sh
@@ -0,0 +1,60 @@
+#!/bin/bash
+
+source $(dirname $0)/oref0-bash-common-functions.sh || (echo "ERROR: Failed to run oref0-bash-common-functions.sh. Is oref0 correctly installed?"; exit 1)
+
+usage "$@" < - No-op version, find out what delete would do.
+$self delete - move entries from NIGHTSCOUT_HOST devicestatus collection to "$HOME/myopenaps/backup
+$self nightly - move entries from NIGHTSCOUT_HOST devicestatus collection to "$HOME/myopenaps/backup
+EOF
+
+function write_backup() {
+json -a -o jsony-0 >> $BACKUP_DIR/devicestatus.txt
+}
+
+export API_SECRET
+test -n "$3" && API_SECRET=$(nightscout hash-api-secret $3)
+test -n "$4" && NUM_DAYS=$4
+BACKUP_DIR="$HOME/myopenaps"/backup
+mkdir -p $BACKUP_DIR
+
+ENDPOINT=$2/api/v1/devicestatus
+
+if [ $1 = "nightly" ]; then
+ test -n "$2" && NUM_DAYS=$2
+ ENDPOINT=$NIGHTSCOUT_HOST/api/v1/devicestatus
+fi
+
+if [[ -z "$API_SECRET" || -z "$NUM_DAYS" ]] ; then
+ test -z "$API_SECRET" && echo API_SECRET undefined.
+ test -z "$NUM_DAYS" && echo NUM_DAYS undefined.
+ print_usage
+ exit 1;
+fi
+
+date_string=$(date -d "-$NUM_DAYS days" +%Y-%m-%d)
+fetch_cmd="curl --compressed -s -g $ENDPOINT.json?find\[created_at\]\[\\"\$"lte\]=$date_string\&count=100000"
+delete_cmd="curl -X DELETE -H \"API-SECRET: $API_SECRET\" -s -g $ENDPOINT.json?find\[created_at\]\[\\"\$"lte\]=$date_string\&count=100000"
+
+case "$1" in
+ --find)
+ echo $fetch_cmd
+ echo $delete_cmd
+ ;;
+ delete)
+ #echo $fetch_cmd
+ #echo $delete_cmd
+ eval $fetch_cmd | write_backup
+ eval $delete_cmd
+ ;;
+ nightly)
+ #echo $fetch_cmd
+ #echo $delete_cmd
+ eval $fetch_cmd | write_backup
+ eval $delete_cmd
+ ;;
+ *|help|--help|-h)
+ print_usage
+ exit 1;
+ ;;
+esac
diff --git a/bin/ns-status.js b/bin/ns-status.js
index 4b86c80a7..4d0543fe1 100755
--- a/bin/ns-status.js
+++ b/bin/ns-status.js
@@ -2,10 +2,12 @@
'use strict';
var os = require("os");
+var fs = require('fs');
+var moment = require("moment");
var requireUtils = require('../lib/require-utils');
-var safeRequire = requireUtils.safeRequire;
var requireWithTimestamp = requireUtils.requireWithTimestamp;
+var safeLoadFile = requireUtils.safeLoadFile;
/*
Prepare Status info to for upload to Nightscout
@@ -23,7 +25,7 @@ var requireWithTimestamp = requireUtils.requireWithTimestamp;
*/
-function mmtuneStatus (status) {
+function mmtuneStatus (status, cwd, mmtune_input) {
var mmtune = requireWithTimestamp(cwd + mmtune_input);
if (mmtune) {
if (mmtune.scanDetails && mmtune.scanDetails.length) {
@@ -35,7 +37,7 @@ function mmtuneStatus (status) {
}
}
-function preferencesStatus (status) {
+function preferencesStatus (status, cwd ,preferences_input) {
var preferences = requireWithTimestamp(cwd + preferences_input);
if (preferences) {
status.preferences = preferences;
@@ -47,8 +49,8 @@ function preferencesStatus (status) {
}
}
-function uploaderStatus (status) {
- var uploader = require(cwd + uploader_input);
+function uploaderStatus (status, cwd, uploader_input) {
+ var uploader = JSON.parse(fs.readFileSync(cwd + uploader_input, 'utf8'));
if (uploader) {
if (typeof uploader === 'number') {
status.uploader = {
@@ -60,9 +62,12 @@ function uploaderStatus (status) {
}
}
-if (!module.parent) {
- var argv = require('yargs')
+
+
+var ns_status = function ns_status(argv_params) {
+
+ var argv = require('yargs')(argv_params)
.usage("$0 [--uploader uploader.json] [mmtune.json] [--preferences preferences.json]")
.option('preferences', {
alias: 'p',
@@ -77,10 +82,16 @@ if (!module.parent) {
default: false
})
.strict(true)
+ .fail(function (msg, err, yargs) {
+ if (err) {
+ return console.error('Error found', err);
+ }
+ return console.error('Parsing of command arguments failed', msg)
+ })
.help('help');
-
var params = argv.argv;
var inputs = params._;
+
var clock_input = inputs[0];
var iob_input = inputs[1];
var suggested_input = inputs[2];
@@ -94,9 +105,11 @@ if (!module.parent) {
if (inputs.length < 7 || inputs.length > 8) {
argv.showHelp();
- process.exit(1);
+ return;
}
+ // TODO: For some reason the following line does not work (../package.json ia not found).
+ //var pjson = JSON.parse(fs.readFileSync('../package.json', 'utf8'));
var pjson = require('../package.json');
var cwd = process.cwd() + '/';
@@ -117,6 +130,7 @@ if (!module.parent) {
if (iobArray && iobArray.length) {
iob = iobArray[0];
iob.timestamp = iob.time;
+ iob.mills = moment(iob.time).valueOf();
delete iob.time;
}
@@ -129,6 +143,14 @@ if (!module.parent) {
}
}
+ if (enacted && enacted.timestamp) {
+ enacted.mills = moment(enacted.timestamp).valueOf();
+ }
+
+ if (suggested && suggested.timestamp) {
+ suggested.mills = moment(suggested.timestamp).valueOf();
+ }
+
var status = {
device: 'openaps://' + os.hostname(),
openaps: {
@@ -138,27 +160,41 @@ if (!module.parent) {
version: pjson.version
},
pump: {
- clock: safeRequire(cwd + clock_input),
- battery: safeRequire(cwd + battery_input),
- reservoir: safeRequire(cwd + reservoir_input),
+ clock: safeLoadFile(cwd + clock_input),
+ battery: safeLoadFile(cwd + battery_input),
+ reservoir: safeLoadFile(cwd + reservoir_input),
status: requireWithTimestamp(cwd + status_input)
- }
+ },
+ created_at: new Date()
};
if (mmtune_input) {
- mmtuneStatus(status);
+ mmtuneStatus(status, cwd, mmtune_input);
}
if (preferences_input) {
- preferencesStatus(status);
+ preferencesStatus(status, cwd ,preferences_input);
}
if (uploader_input) {
- uploaderStatus(status);
+ uploaderStatus(status, cwd, uploader_input);
}
- console.log(JSON.stringify(status));
+ return JSON.stringify(status);
} catch (e) {
return console.error("Could not parse input data: ", e);
}
}
+
+if (!module.parent) {
+ // remove the first parameter.
+ var command = process.argv;
+ command.shift();
+ command.shift();
+ var result = ns_status(command);
+ if(result !== undefined) {
+ console.log(result);
+ }
+}
+
+exports = module.exports = ns_status
diff --git a/bin/ns-upload-entries.sh b/bin/ns-upload-entries.sh
index e63bf234e..43be261a0 100755
--- a/bin/ns-upload-entries.sh
+++ b/bin/ns-upload-entries.sh
@@ -28,7 +28,7 @@ fi
# requires API_SECRET and NIGHTSCOUT_HOST to be set in calling environment (i.e. in crontab)
(
-curl -m 30 -s -X POST --data-binary @${ENTRIES} \
+curl --compressed -m 30 -s -X POST --data-binary @${ENTRIES} \
${API_SECRET_HEADER} -H "content-type: application/json" \
${REST_ENDPOINT}
) && ( test -n "${OUTPUT}" && touch ${OUTPUT} ; logger "Uploaded ${ENTRIES} to ${NIGHTSCOUT_HOST}" ) || logger "Unable to upload to ${NIGHTSCOUT_HOST}"
diff --git a/bin/ns-upload.sh b/bin/ns-upload.sh
index b70adb734..010292359 100755
--- a/bin/ns-upload.sh
+++ b/bin/ns-upload.sh
@@ -38,13 +38,13 @@ fi
if [[ "${API_SECRET,,}" =~ "token=" ]]; then
REST_ENDPOINT="${REST_ENDPOINT}?${API_SECRET}"
(test "$ENTRIES" != "-" && cat $ENTRIES || cat )| (
- curl -m 30 -s -X POST --data-binary @- \
+ curl --compressed -m 30 -s -X POST --data-binary @- \
-H "content-type: application/json" \
$REST_ENDPOINT
) && ( test -n "$OUTPUT" && touch $OUTPUT ; logger "Uploaded $ENTRIES to $NIGHTSCOUT_HOST" ) || ( logger "Unable to upload to $NIGHTSCOUT_HOST"; exit 2 )
else
(test "$ENTRIES" != "-" && cat $ENTRIES || cat )| (
- curl -m 30 -s -X POST --data-binary @- \
+ curl --compressed -m 30 -s -X POST --data-binary @- \
-H "API-SECRET: $API_SECRET" \
-H "content-type: application/json" \
$REST_ENDPOINT
diff --git a/bin/openaps-install.sh b/bin/openaps-install.sh
index 9d1e5a63a..0e77d85d8 100755
--- a/bin/openaps-install.sh
+++ b/bin/openaps-install.sh
@@ -1,7 +1,7 @@
#!/usr/bin/env bash
set -e
-BRANCH=${1:-master}
+BRANCH=${1:-dev}
read -p "Enter your rig's new hostname (this will be your rig's "name" in the future, so make sure to write it down): " -r
myrighostname=$REPLY
echo $myrighostname > /etc/hostname
@@ -42,21 +42,16 @@ if cat /etc/os-release | grep 'PRETTY_NAME="Debian GNU/Linux 8 (jessie)"' &> /de
echo "Acquire::Check-Valid-Until false;" | tee -a /etc/apt/apt.conf.d/10-nocheckvalid
# Replace apt sources.list with archive.debian.org locations
echo -e "deb http://security.debian.org/ jessie/updates main\n#deb-src http://security.debian.org/ jessie/updates main\n\ndeb http://archive.debian.org/debian/ jessie-backports main\n#deb-src http://archive.debian.org/debian/ jessie-backports main\n\ndeb http://archive.debian.org/debian/ jessie main contrib non-free\n#deb-src http://archive.debian.org/debian/ jessie main contrib non-free" > /etc/apt/sources.list
+ echo "Please consider upgrading your rig to Jubilinux 0.3.0 (Debian Stretch)!"
+ echo "Jubilinux 0.2.0, based on Debian Jessie, is no longer receiving security or software updates!"
fi
-#Workaround for Jubilinux to install nodejs/npm from nodesource
-if getent passwd edison &> /dev/null; then
- #Use nodesource setup script to add nodesource repository to sources.list.d
- curl -sL https://deb.nodesource.com/setup_8.x | bash -
-fi
-
-#dpkg -P nodejs nodejs-dev
# TODO: remove the `-o Acquire::ForceIPv4=true` once Debian's mirrors work reliably over IPv6
apt-get -o Acquire::ForceIPv4=true update && apt-get -o Acquire::ForceIPv4=true -y dist-upgrade && apt-get -o Acquire::ForceIPv4=true -y autoremove
-apt-get -o Acquire::ForceIPv4=true update && apt-get -o Acquire::ForceIPv4=true install -y sudo strace tcpdump screen acpid vim python-pip locate ntpdate ntp
+apt-get -o Acquire::ForceIPv4=true update && apt-get -o Acquire::ForceIPv4=true install -y sudo strace tcpdump screen acpid vim locate ntpdate ntp
#check if edison user exists before trying to add it to groups
-grep "PermitRootLogin yes" /etc/ssh/sshd_config || echo "PermitRootLogin yes" > /etc/ssh/sshd_config
+grep "PermitRootLogin yes" /etc/ssh/sshd_config || echo "PermitRootLogin yes" >>/etc/ssh/sshd_config
if getent passwd edison > /dev/null; then
echo "Adding edison to sudo users"
@@ -71,7 +66,7 @@ sed -i "s/daily/hourly/g" /etc/logrotate.conf
sed -i "s/#compress/compress/g" /etc/logrotate.conf
curl -s https://raw.githubusercontent.com/openaps/oref0/$BRANCH/bin/openaps-packages.sh | bash -
-mkdir -p ~/src; cd ~/src && ls -d oref0 && (cd oref0 && git checkout $BRANCH && git pull) || git clone git://github.com/openaps/oref0.git
+mkdir -p ~/src; cd ~/src && ls -d oref0 && (cd oref0 && git checkout $BRANCH && git pull) || git clone https://github.com/openaps/oref0.git -b $BRANCH
echo "Press Enter to run oref0-setup with the current release ($BRANCH branch) of oref0,"
read -p "or press ctrl-c to cancel. " -r
cd && ~/src/oref0/bin/oref0-setup.sh
diff --git a/bin/openaps-packages.sh b/bin/openaps-packages.sh
index 47046e955..0fdb15242 100755
--- a/bin/openaps-packages.sh
+++ b/bin/openaps-packages.sh
@@ -10,32 +10,48 @@ echo 'Acquire::ForceIPv4 "true";' | sudo tee /etc/apt/apt.conf.d/99force-ipv4
apt-get install -y sudo
sudo apt-get update && sudo apt-get -y upgrade
-sudo apt-get install -y git python python-dev software-properties-common python-numpy python-pip watchdog strace tcpdump screen acpid vim locate lm-sensors || die "Couldn't install packages"
+## Debian Bullseye (Raspberry Pi OS 64bit, etc) is python3 by default and does not support python2-pip.
+if ! cat /etc/os-release | grep bullseye >& /dev/null; then
+ sudo apt-get install -y git python python-dev software-properties-common python-numpy python-pip watchdog strace tcpdump screen acpid vim locate lm-sensors || die "Couldn't install packages"
+else
+ # Bullseye based OS. Get PIP2 from pypa and pip-install python packages rather than using the py3 ones from apt
+ # Also, install python-is-python2, to override the distro default of linking python to python3
+ sudo apt-get install -y git python-is-python2 python-dev-is-python2 software-properties-common watchdog strace tcpdump screen acpid vim locate lm-sensors || die "Couldn't install packages"
+ curl https://bootstrap.pypa.io/pip/2.7/get-pip.py | python2 || die "Couldn't install pip"
+ python2 -m pip install numpy || die "Couldn't pip install numpy"
+fi
-# We require jq >= 1.5 for --slurpfile for merging preferences. Debian Jessie ships with 1.4
+# We require jq >= 1.5 for --slurpfile for merging preferences. Debian Jessie ships with 1.4.
if cat /etc/os-release | grep 'PRETTY_NAME="Debian GNU/Linux 8 (jessie)"' &> /dev/null; then
+ echo "Please consider upgrading your rig to Jubilinux 0.3.0 (Debian Stretch)!"
sudo apt-get -y -t jessie-backports install jq || die "Couldn't install jq from jessie-backports"
else
+ # Debian Stretch & Buster ship with jq >= 1.5, so install from apt
sudo apt-get -y install jq || die "Couldn't install jq"
fi
-# install/upgrade to latest node 8 if neither node 8 nor node 10+ LTS are installed
-if ! nodejs --version | grep -e 'v8\.' -e 'v1[02468]\.' &> /dev/null ; then
- # nodesource doesn't support armv6
- if ! arch | grep -e 'armv6' &> /dev/null ; then
- sudo bash -c "curl -sL https://deb.nodesource.com/setup_8.x | bash -" || die "Couldn't setup node 8"
- sudo apt-get install -y nodejs=8.* || die "Couldn't install nodejs"
- else
- sudo apt-get install -y nodejs npm || die "Couldn't install nodejs and npm"
- npm install npm@latest -g || die "Couldn't update npm"
- fi
- ## You may also need development tools to build native addons:
- ##sudo apt-get install gcc g++ make
+# Install node using n if there is not an installed version of node >=8,<=19
+# Edge case: This is not likely to work as expected if there *is* a version of node installed, but it is outside of the specified version constraints
+if ! node --version | grep -q -e 'v[89]\.' -e 'v1[[:digit:]]\.'; then
+ echo "Installing node via n..." # For context why we don't install using apt or nvm, see https://github.com/openaps/oref0/pull/1419
+ curl -L https://raw.githubusercontent.com/tj/n/master/bin/n -o n
+ # Install the latest compatible version of node
+ sudo bash n current
+ # Delete the local n binary used to boostrap the install
+ rm n
+ # Install n globally
+ sudo npm install -g n
+
+ # Upgrade to the latest supported version of npm for the current node version
+ sudo npm upgrade -g npm|| die "Couldn't update npm"
+
+ ## You may also need development tools to build native addons:
+ ## sudo apt-get install gcc g++ make
fi
# upgrade setuptools to avoid "'install_requires' must be a string" error
sudo pip install setuptools -U # no need to die if this fails
-sudo pip install -U openaps || die "Couldn't install openaps toolkit"
+sudo pip install -U --default-timeout=1000 git+https://github.com/openaps/openaps.git || die "Couldn't install openaps toolkit"
sudo pip install -U openaps-contrib || die "Couldn't install openaps-contrib"
sudo openaps-install-udev-rules || die "Couldn't run openaps-install-udev-rules"
sudo activate-global-python-argcomplete || die "Couldn't run activate-global-python-argcomplete"
diff --git a/bin/openaps-src.sh b/bin/openaps-src.sh
index a564a98ae..9fceb95b0 100755
--- a/bin/openaps-src.sh
+++ b/bin/openaps-src.sh
@@ -19,27 +19,27 @@ sudo easy_install -ZU setuptools && \
mkdir ~/src
cd ~/src && \
(
- git clone -b dev git://github.com/openaps/decocare.git || \
+ git clone -b dev https://github.com/openaps/decocare.git || \
(cd decocare && git pull)
(cd decocare && \
sudo python setup.py develop
)
- git clone git://github.com/openaps/dexcom_reader.git || \
+ git clone https://github.com/openaps/dexcom_reader.git || \
(cd dexcom_reader && git pull)
(cd dexcom_reader && \
sudo python setup.py develop
)
- git clone -b dev git://github.com/openaps/openaps.git || \
+ git clone -b dev https://github.com/openaps/openaps.git || \
(cd openaps && git pull)
(cd openaps && \
sudo python setup.py develop
)
- git clone git://github.com/openaps/openaps-contrib.git || \
+ git clone https://github.com/openaps/openaps-contrib.git || \
(cd openaps-contrib && git pull)
(cd openaps-contrib && \
sudo python setup.py develop
)
- git clone -b dev git://github.com/openaps/oref0.git || \
+ git clone -b dev https://github.com/openaps/oref0.git || \
(cd oref0 && git pull)
)
test -d oref0 && \
diff --git a/bin/oref0-autosens-history.js b/bin/oref0-autosens-history.js
index a2df748e7..94d3d0c5c 100755
--- a/bin/oref0-autosens-history.js
+++ b/bin/oref0-autosens-history.js
@@ -1,4 +1,5 @@
#!/usr/bin/env node
+'use strict';
/*
Determine Basal
@@ -16,10 +17,10 @@
*/
var basal = require('../lib/profile/basal');
-var detect = require('../lib/determine-basal/autosens');
+var detectSensitivity = require('../lib/determine-basal/autosens');
if (!module.parent) {
- var detectsensitivity = init();
+ //var detectsensitivity = init(); // I don't see where this variable is used, so deleted it.
var argv = require('yargs')
.usage("$0 [outputfile.json]")
@@ -135,16 +136,16 @@ if (!module.parent) {
var ratioArray = [];
do {
detection_inputs.deviations = 96;
- detect(detection_inputs);
+ var result = detectSensitivity(detection_inputs);
for(i=0; i/dev/null; then
- if oref0-detect-sensitivity monitor/glucose.json monitor/pumphistory-24h-zoned.json settings/insulin_sensitivities.json settings/basal_profile.json settings/profile.json monitor/carbhistory.json settings/temptargets.json > settings/autosens.json.new && cat settings/autosens.json.new | jq .ratio | grep -q [0-9]; then
+ if oref0-detect-sensitivity monitor/glucose.json monitor/pumphistory-24h-zoned.json settings/insulin_sensitivities.json settings/basal_profile.json settings/profile.json monitor/carbhistory.json settings/temptargets.json > settings/autosens.json.new && cat settings/autosens.json.new | jq .ratio | grep "[0-9]"; then
mv settings/autosens.json.new settings/autosens.json
echo -n "Autosens refreshed: "
else
diff --git a/bin/oref0-autotune-core.js b/bin/oref0-autotune-core.js
index 439e8ac0d..be51dae7b 100755
--- a/bin/oref0-autotune-core.js
+++ b/bin/oref0-autotune-core.js
@@ -5,7 +5,7 @@
Uses the output of oref0-autotune-prep.js
- Calculates adjustments to basal schedule, ISF, and CSF
+ Calculates adjustments to basal schedule, ISF, and CSF
Released under MIT license. See the accompanying LICENSE.txt file for
full terms and conditions
@@ -19,13 +19,17 @@
THE SOFTWARE.
*/
-
var autotune = require('../lib/autotune');
var stringify = require('json-stable-stringify');
if (!module.parent) {
var argv = require('yargs')
- .usage("$0 ")
+ .usage("$0 [--output-file=]")
+ .option('output-file', {
+ alias: 'o',
+ describe: 'File to write output',
+ default: null,
+ })
.demand(3)
.strict(true)
.help('help');
@@ -65,6 +69,10 @@ if (!module.parent) {
};
var autotune_output = autotune(inputs);
- console.log(stringify(autotune_output, { space: ' '}));
+ if (params["output-file"]) {
+ fs.writeFileSync(params["output-file"], stringify(autotune_output, {space: ' '}));
+ } else {
+ console.log(stringify(autotune_output, { space: ' '}));
+ }
}
diff --git a/bin/oref0-autotune-prep.js b/bin/oref0-autotune-prep.js
index 2ce3082d5..4c781cb5c 100755
--- a/bin/oref0-autotune-prep.js
+++ b/bin/oref0-autotune-prep.js
@@ -27,7 +27,7 @@ var moment = require('moment');
if (!module.parent) {
var argv = require('yargs')
- .usage("$0 [] [--categorize_uam_as_basal] [--tune-insulin-curve]")
+ .usage("$0 [] [--categorize_uam_as_basal] [--tune-insulin-curve] [--output-file=]")
.option('categorize_uam_as_basal', {
alias: 'u',
boolean: true,
@@ -40,6 +40,11 @@ if (!module.parent) {
describe: "Tune peak time and end time",
default: false
})
+ .option('output-file', {
+ alias: 'o',
+ describe: 'Output file to write output',
+ default: null,
+ })
.strict(true)
.help('help');
@@ -66,7 +71,6 @@ if (!module.parent) {
console.log('{ "error": "Could not parse input data" }');
return console.error("Could not parse input data: ", e);
}
-
var pumpprofile_data = { };
if (typeof pumpprofile_input !== 'undefined') {
try {
@@ -103,7 +107,7 @@ if (!module.parent) {
try {
var glucose_data = JSON.parse(fs.readFileSync(glucose_input, 'utf8'));
} catch (e) {
- console.error("Warning: could not parse "+glucose_input);
+ return console.error("Warning: could not parse "+glucose_input, e);
}
var carb_data = { };
@@ -129,6 +133,10 @@ if (!module.parent) {
};
var prepped_glucose = generate(inputs);
- console.log(JSON.stringify(prepped_glucose));
+ if (params['output-file']) {
+ fs.writeFileSync(params['output-file'], JSON.stringify(prepped_glucose))
+ } else {
+ console.log(JSON.stringify(prepped_glucose));
+ }
}
diff --git a/bin/oref0-autotune.py b/bin/oref0-autotune.py
index c936641c1..a6ec91989 100755
--- a/bin/oref0-autotune.py
+++ b/bin/oref0-autotune.py
@@ -2,15 +2,15 @@
# Python version of oref0-autotune.sh
# Original bash code: scottleibrand, pietergit, beached, danamlewis
-# This script sets up an easy test environment for autotune, allowing the user to vary parameters
+# This script sets up an easy test environment for autotune, allowing the user to vary parameters
# like start/end date and number of runs.
#
-# Required Inputs:
+# Required Inputs:
# DIR, (--dir=)
# NIGHTSCOUT_HOST, (--ns-host=)
# Optional Inputs:
-# END_DATE, (--end-date=)
+# END_DATE, (--end-date=)
# if no end date supplied, assume we want a months worth or until day before current day
# NUMBER_OF_RUNS (--runs=)
# if no number of runs designated, then default to 5
@@ -25,29 +25,22 @@
import datetime
import os, errno
import logging
+import pytz
from subprocess import call
import shutil
+import six
-DIR = ''
-NIGHTSCOUT_HOST = ''
-START_DATE = datetime.datetime.today() - datetime.timedelta(days=1)
-END_DATE = datetime.datetime.today()
-NUMBER_OF_RUNS = 1
-EXPORT_EXCEL = None
-TERMINAL_LOGGING = True
-RECOMMENDS_REPORT = True
-
def get_input_arguments():
parser = argparse.ArgumentParser(description='Autotune')
-
+
# Required
# NOTE: As the code runs right now, this directory needs to exist and as well as the subfolders: autotune, settings
parser.add_argument('--dir',
'-d',
type=str,
required=True,
- help='(--dir=)')
+ help='(--dir=)')
parser.add_argument('--ns-host',
'-n',
type=str,
@@ -73,56 +66,46 @@ def get_input_arguments():
'-x',
type=str,
metavar='EXPORT_EXCEL',
- help='(--xlsx=)')
+ help='(--xlsx=)')
parser.add_argument('--log',
'-l',
- type=str,
+ type=bool,
+ default=True,
metavar='TERMINAL_LOGGING',
help='(--log )')
-
+
return parser.parse_args()
def assign_args_to_variables(args):
# TODO: Input checking.
-
- global DIR, NIGHTSCOUT_HOST, START_DATE, END_DATE, NUMBER_OF_RUNS, \
- EXPORT_EXCEL, TERMINAL_LOGGING, RECOMMENDS_REPORT
-
+
# On Unix and Windows, return the argument with an initial component of
# ~ or ~user replaced by that user's home directory.
- DIR = os.path.expanduser(args.dir)
-
- NIGHTSCOUT_HOST = args.ns_host
-
- START_DATE = args.start_date
-
- if args.end_date is not None:
- END_DATE = args.end_date
-
- if args.runs is not None:
- NUMBER_OF_RUNS = args.runs
-
- if args.xlsx is not None:
- EXPORT_EXCEL = args.xlsx
-
- if args.log is not None:
- RECOMMENDS_REPORT = args.logs
-
-def get_nightscout_profile(nightscout_host):
+ directory = os.path.expanduser(args.dir)
+ nightscout_host = args.ns_host
+ start_date = args.start_date
+ end_date = args.end_date or datetime.datetime.today()
+ number_of_runs = args.runs or 1
+ export_excel = args.xlsx
+ recommends_report = args.log
+
+ return directory, nightscout_host, start_date, end_date, number_of_runs, export_excel, recommends_report
+
+def get_nightscout_profile(nightscout_host, directory):
#TODO: Add ability to use API secret for Nightscout.
res = requests.get(nightscout_host + '/api/v1/profile.json')
- with open(os.path.join(autotune_directory, 'nightscout.profile.json'), 'w') as f: # noqa: F821
- f.write(res.text)
+ with open(os.path.join(directory, 'autotune', 'nightscout.profile.json'), 'w') as f: # noqa: F821
+ f.write(six.ensure_str(res.text, encoding='utf-8'))
def get_openaps_profile(directory):
shutil.copy(os.path.join(directory, 'settings', 'pumpprofile.json'), os.path.join(directory, 'autotune', 'profile.pump.json'))
-
+
# If a previous valid settings/autotune.json exists, use that; otherwise start from settings/profile.json
-
+
# This allows manual users to be able to run autotune by simply creating a settings/pumpprofile.json file.
# cp -up settings/pumpprofile.json settings/profile.json
shutil.copy(os.path.join(directory, 'settings', 'pumpprofile.json'), os.path.join(directory, 'settings', 'profile.json'))
-
+
# TODO: Get this to work. For now, just copy from settings/profile.json each time.
# If a previous valid settings/autotune.json exists, use that; otherwise start from settings/profile.json
# cp settings/autotune.json autotune/profile.json && cat autotune/profile.json | json | grep -q start || cp autotune/profile.pump.json autotune/profile.json
@@ -130,26 +113,34 @@ def get_openaps_profile(directory):
# print create_autotune_json
# call(create_autotune_json, shell=True)
- # cp settings/autotune.json autotune/profile.json
+ # cp settings/profile.json settings/autotune.json
shutil.copy(os.path.join(directory, 'settings', 'profile.json'), os.path.join(directory, 'settings', 'autotune.json'))
-
+
# cp settings/autotune.json autotune/profile.json
shutil.copy(os.path.join(directory, 'settings', 'autotune.json'), os.path.join(directory, 'autotune', 'profile.json'))
-
+
+ # cp settings/autotune.json autotune/pumpprofile.json
+ shutil.copy(os.path.join(directory, 'settings', 'autotune.json'), os.path.join(directory, 'autotune', 'pumpprofile.json'))
+
#TODO: Do the correct copying here.
# cat autotune/profile.json | json | grep -q start || cp autotune/profile.pump.json autotune/profile.json'])
def get_nightscout_carb_and_insulin_treatments(nightscout_host, start_date, end_date, directory):
logging.info('Grabbing NIGHTSCOUT treatments.json for date range: {0} to {1}'.format(start_date, end_date))
- # TODO: What does 'T20:00-05:00' mean?
output_file_name = os.path.join(directory, 'autotune', 'ns-treatments.json')
- start_date = start_date.strftime("%Y-%m-%d") + 'T20:00-05:00'
- end_date = end_date.strftime("%Y-%m-%d") + 'T20:00-05:00'
+
+ def _normalize_datetime(dt):
+ dt = dt.replace(hour=20, minute=0, second=0, microsecond=0, tzinfo=None)
+ dt = pytz.timezone('US/Eastern').localize(dt)
+ return dt
+
+ start_date = _normalize_datetime(start_date)
+ end_date = _normalize_datetime(end_date)
url='{0}/api/v1/treatments.json?find\[created_at\]\[\$gte\]=`date --date="{1} -4 hours" -Iminutes`&find\[created_at\]\[\$lte\]=`date --date="{2} +1 days" -Iminutes`'.format(nightscout_host, start_date, end_date)
#TODO: Add ability to use API secret for Nightscout.
res = requests.get(url)
with open(output_file_name, 'w') as f:
- f.write(res.text.encode('utf-8'))
+ f.write(six.ensure_str(res.text, 'utf-8'))
def get_nightscout_bg_entries(nightscout_host, start_date, end_date, directory):
logging.info('Grabbing NIGHTSCOUT enries/sgv.json for date range: {0} to {1}'.format(start_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d")))
@@ -161,50 +152,50 @@ def get_nightscout_bg_entries(nightscout_host, start_date, end_date, directory):
#TODO: Add ability to use API secret for Nightscout.
res = requests.get(url)
with open(os.path.join(directory, 'autotune', 'ns-entries.{date}.json'.format(date=date.strftime("%Y-%m-%d"))), 'w') as f:
- f.write(res.text.encode('utf-8'))
+ f.write(six.ensure_str(res.text, 'utf-8'))
def run_autotune(start_date, end_date, number_of_runs, directory):
date_list = [start_date + datetime.timedelta(days=x) for x in range(0, (end_date - start_date).days)]
autotune_directory = os.path.join(directory, 'autotune')
+ FNULL = open(os.devnull, 'w')
for run_number in range(1, number_of_runs + 1):
for date in date_list:
# cp profile.json profile.$run_number.$i.json
shutil.copy(os.path.join(autotune_directory, 'profile.json'),
os.path.join(autotune_directory, 'profile.{run_number}.{date}.json'
.format(run_number=run_number, date=date.strftime("%Y-%m-%d"))))
-
- # Autotune Prep (required args, ), output prepped glucose
+
+ # Autotune Prep (required args, ), output prepped glucose
# data or below
# oref0-autotune-prep ns-treatments.json profile.json ns-entries.$DATE.json > autotune.$RUN_NUMBER.$DATE.json
ns_treatments = os.path.join(autotune_directory, 'ns-treatments.json')
profile = os.path.join(autotune_directory, 'profile.json')
+ pump_profile = os.path.join(autotune_directory, "pumpprofile.json")
ns_entries = os.path.join(autotune_directory, 'ns-entries.{date}.json'.format(date=date.strftime("%Y-%m-%d")))
- autotune_prep = 'oref0-autotune-prep {ns_treatments} {profile} {ns_entries}'.format(ns_treatments=ns_treatments, profile=profile, ns_entries=ns_entries)
-
- # autotune.$RUN_NUMBER.$DATE.json
+
+ # autotune.$RUN_NUMBER.$DATE.json
autotune_run_filename = os.path.join(autotune_directory, 'autotune.{run_number}.{date}.json'
.format(run_number=run_number, date=date.strftime("%Y-%m-%d")))
- with open(autotune_run_filename, "w+") as output:
- logging.info('Running {script}'.format(script=autotune_prep))
- call(autotune_prep, stdout=output, shell=True)
- logging.info('Writing output to {filename}'.format(filename=autotune_run_filename))
-
- # Autotune (required args, ),
+ autotune_prep = 'oref0-autotune-prep {ns_treatments} {profile} {ns_entries} {pump_profile} --output-file {autotune_run_filename}'.format(ns_treatments=ns_treatments, profile=profile, ns_entries=ns_entries, pump_profile=pump_profile, autotune_run_filename=autotune_run_filename)
+ logging.info('Running {script}'.format(script=autotune_prep))
+ call(autotune_prep, stdout=FNULL, shell=True)
+ logging.info('Writing output to {filename}'.format(filename=autotune_run_filename))
+
+ # Autotune (required args, ),
# output autotuned profile or what will be used as in the next iteration
# oref0-autotune-core autotune.$RUN_NUMBER.$DATE.json profile.json profile.pump.json > newprofile.$RUN_NUMBER.$DATE.json
-
+
# oref0-autotune-core autotune.$run_number.$i.json profile.json profile.pump.json > newprofile.$RUN_NUMBER.$DATE.json
profile_pump = os.path.join(autotune_directory, 'profile.pump.json')
- autotune_core = 'oref0-autotune-core {autotune_run} {profile} {profile_pump}'.format(profile=profile, profile_pump = profile_pump, autotune_run=autotune_run_filename)
-
+
# newprofile.$RUN_NUMBER.$DATE.json
newprofile_run_filename = os.path.join(autotune_directory, 'newprofile.{run_number}.{date}.json'
.format(run_number=run_number, date=date.strftime("%Y-%m-%d")))
- with open(newprofile_run_filename, "w+") as output:
- logging.info('Running {script}'.format(script=autotune_core))
- call(autotune_core, stdout=output, shell=True)
- logging.info('Writing output to {filename}'.format(filename=autotune_run_filename))
-
+ autotune_core = 'oref0-autotune-core {autotune_run} {profile} {profile_pump} --output-file {newprofile_run_filename}'.format(profile=profile, profile_pump = profile_pump, autotune_run=autotune_run_filename, newprofile_run_filename=newprofile_run_filename)
+ logging.info('Running {script}'.format(script=autotune_core))
+ call(autotune_core, stdout=FNULL, shell=True)
+ logging.info('Writing output to {filename}'.format(filename=newprofile_run_filename))
+
# Copy tuned profile produced by autotune to profile.json for use with next day of data
# cp newprofile.$RUN_NUMBER.$DATE.json profile.json
shutil.copy(os.path.join(autotune_directory, 'newprofile.{run_number}.{date}.json'.format(run_number=run_number, date=date.strftime("%Y-%m-%d"))),
@@ -218,13 +209,13 @@ def create_summary_report_and_display_results(output_directory):
print()
print("Autotune pump profile recommendations:")
print("---------------------------------------------------------")
-
+
report_file = os.path.join(output_directory, 'autotune', 'autotune_recommendations.log')
autotune_recommends_report = 'oref0-autotune-recommends-report {0}'.format(output_directory)
-
+
call(autotune_recommends_report, shell=True)
print("Recommendations Log File: {0}".format(report_file))
-
+
# Go ahead and echo autotune_recommendations.log to the terminal, minus blank lines
# cat $report_file | egrep -v "\| *\| *$"
call(['cat {0} | egrep -v "\| *\| *$"'.format(report_file)], shell=True)
@@ -234,20 +225,20 @@ def create_summary_report_and_display_results(output_directory):
logging.basicConfig(level=logging.DEBUG)
# Supress non-essential logs (below WARNING) from requests module.
logging.getLogger("requests").setLevel(logging.WARNING)
-
+
args = get_input_arguments()
- assign_args_to_variables(args)
-
+ directory, nightscout_host, start_date, end_date, number_of_runs, export_excel, recommends_report = assign_args_to_variables(args)
+
# TODO: Convert Nightscout profile to OpenAPS profile format.
- #get_nightscout_profile(NIGHTSCOUT_HOST)
-
- get_openaps_profile(DIR)
- get_nightscout_carb_and_insulin_treatments(NIGHTSCOUT_HOST, START_DATE, END_DATE, DIR)
- get_nightscout_bg_entries(NIGHTSCOUT_HOST, START_DATE, END_DATE, DIR)
- run_autotune(START_DATE, END_DATE, NUMBER_OF_RUNS, DIR)
-
- if EXPORT_EXCEL:
- export_to_excel(DIR, EXPORT_EXCEL)
-
- if RECOMMENDS_REPORT:
- create_summary_report_and_display_results(DIR)
+ #get_nightscout_profile(NIGHTSCOUT_HOST, DIR)
+
+ get_openaps_profile(directory)
+ get_nightscout_carb_and_insulin_treatments(nightscout_host, start_date, end_date, directory)
+ get_nightscout_bg_entries(nightscout_host, start_date, end_date, directory)
+ run_autotune(start_date, end_date, number_of_runs, directory)
+
+ if export_excel:
+ export_to_excel(directory, export_excel)
+
+ if recommends_report:
+ create_summary_report_and_display_results(directory)
diff --git a/bin/oref0-autotune.sh b/bin/oref0-autotune.sh
index ac3c53a36..393a87303 100755
--- a/bin/oref0-autotune.sh
+++ b/bin/oref0-autotune.sh
@@ -248,7 +248,7 @@ do
cp profile.pump.json profile.json
exit
else
- die "Could not run oref0-autotune-core autotune.$i.json profile.json profile.pump.json"
+ die "Could not run oref0-autotune-core autotune.$i.json profile.json profile.pump.json. Make sure Nightscout contains BG-values for the selected date range, Autotune(Web) does not work without BG-values. See documentation on the how-to check http://nightscout.github.io/nightscout/reports/#day-to-day ."
fi
else
# Copy tuned profile produced by autotune to profile.json for use with next day of data
diff --git a/bin/oref0-bash-common-functions.sh b/bin/oref0-bash-common-functions.sh
index b68ae2b4f..9a8d5b3e7 100755
--- a/bin/oref0-bash-common-functions.sh
+++ b/bin/oref0-bash-common-functions.sh
@@ -8,6 +8,45 @@ self=$(basename $0)
PREFERENCES_FILE="preferences.json"
+function run_remote_command () {
+ set -o pipefail
+ out_file=$( mktemp /tmp/shared_node.XXXXXXXXXXXX)
+ #echo $out_file
+ echo -n $1 |socat -t90 - UNIX-CONNECT:/tmp/oaps_shared_node > $out_file || return 1
+ #cat $out_file
+ jq -j .err $out_file >&2
+ jq -j .stdout $out_file
+ return_val=$( jq -r .return_val $out_file)
+ rm $out_file
+ return $(( return_val ))
+}
+
+function start_share_node_if_needed() {
+ # First check if node is alive
+ output="$(echo ping |socat -t90 - UNIX-CONNECT:/tmp/oaps_shared_node)"
+ echo $output
+ if [ "$output" = '{"err":"","stdout":"pong","return_val":0}' ]; then
+ echo shared node is alive
+ return 0
+ fi
+ echo 'killing node so it will restart later'
+ node_pid="$(ps -ef | grep node | grep oref0-shared-node.js | grep -v grep | awk '{print $2 }')"
+ echo $node_pid
+ kill -9 $node_pid
+ # Node should start automaticly by oref0-shared-node-loop
+ # Waiting 90 seconds for it to start
+ for i in {1..90}
+ do
+ sleep 1
+ output="$(echo ping |socat -t90 - UNIX-CONNECT:/tmp/oaps_shared_node)"
+ echo $output
+ if [ "$output" = '{"err":"","stdout":"pong","return_val":0}' ]; then
+ echo shared node is alive
+ return 0
+ fi
+ done
+ die Waiting for shared node failed
+}
function overtemp {
# check for CPU temperature above 85°C
@@ -503,7 +542,7 @@ function wait_for_silence {
echo -n .
# returns true if it hears pump comms, false otherwise
if ! listen -t $waitfor's' 2>&4 ; then
- echo "No interfering pump comms detected from other rigs (this is a good thing!)"
+ echo " All clear."
echo -n "Continuing oref0-pump-loop at "; date
return 0
else
diff --git a/bin/oref0-calculate-iob.js b/bin/oref0-calculate-iob.js
index b64d2464f..b0723def9 100755
--- a/bin/oref0-calculate-iob.js
+++ b/bin/oref0-calculate-iob.js
@@ -1,5 +1,5 @@
#!/usr/bin/env node
-
+'use strict';
/*
Insulin On Board (IOB) calculations.
@@ -19,13 +19,16 @@
*/
var generate = require('../lib/iob');
+var fs = require('fs');
function usage ( ) {
console.log('usage: ', process.argv.slice(0, 2), ' [autosens.json] [pumphistory-24h-zoned.json]');
}
-if (!module.parent) {
- var argv = require('yargs')
+
+
+var oref0_calculate_iob = function oref0_calculate_iob(argv_params) {
+ var argv = require('yargs')(argv_params)
.usage("$0 [] []")
.strict(true)
.help('help');
@@ -46,21 +49,21 @@ if (!module.parent) {
var pumphistory_24_input = inputs[4];
var cwd = process.cwd();
- var pumphistory_data = require(cwd + '/' + pumphistory_input);
- var profile_data = require(cwd + '/' + profile_input);
- var clock_data = require(cwd + '/' + clock_input);
+ var pumphistory_data = JSON.parse(fs.readFileSync(cwd + '/' + pumphistory_input));
+ var profile_data = JSON.parse(fs.readFileSync(cwd + '/' + profile_input));
+ var clock_data = JSON.parse(fs.readFileSync(cwd + '/' + clock_input));
var autosens_data = null;
if (autosens_input) {
try {
- autosens_data = require(cwd + '/' + autosens_input);
+ autosens_data = JSON.parse(fs.readFileSync(cwd + '/' + autosens_input));
} catch (e) {}
//console.error(autosens_input, JSON.stringify(autosens_data));
}
var pumphistory_24_data = null;
if (pumphistory_24_input) {
try {
- pumphistory_24_data = require(cwd + '/' + pumphistory_24_input);
+ pumphistory_24_data = JSON.parse(fs.readFileSync(cwd + '/' + pumphistory_24_input));
} catch (e) {}
}
@@ -77,6 +80,16 @@ if (!module.parent) {
}
var iob = generate(inputs);
- console.log(JSON.stringify(iob));
+ return(JSON.stringify(iob));
+}
+
+if (!module.parent) {
+ // remove the first parameter.
+ var command = process.argv;
+ command.shift();
+ command.shift();
+ var result = oref0_calculate_iob(command)
+ console.log(result);
}
+exports = module.exports = oref0_calculate_iob
\ No newline at end of file
diff --git a/bin/oref0-cron-every-15min.sh b/bin/oref0-cron-every-15min.sh
index ea79a3405..b2b5a4a38 100755
--- a/bin/oref0-cron-every-15min.sh
+++ b/bin/oref0-cron-every-15min.sh
@@ -12,12 +12,24 @@ assert_cwd_contains_ini
# proper shutdown once the EdisonVoltage very low (< 3050mV; 2950 is dead)
if is_edison; then
- sudo ~/src/EdisonVoltage/voltage json batteryVoltage battery | jq .batteryVoltage | awk '{if ($1<=3050)system("sudo shutdown -h now")}' &
+ BATTERY_VOLTAGE="$(sudo ~/src/EdisonVoltage/voltage json batteryVoltage battery | jq .batteryVoltage)"
+ echo "Battery voltage is $BATTERY_VOLTAGE."
+ BATTERY_CUTOFF=$(get_pref_float .edison_battery_shutdown_voltage 3050)
+ if (( "$BATTERY_VOLTAGE" <= "$BATTERY_CUTOFF" )); then
+ echo "Critically low battery! Shutting down."
+ sudo shutdown -h now
+ fi
fi
# proper shutdown of pi rigs once the battery level is below 2 % (should be more than enough to shut down on a standard 18600 ~2Ah cell)
if is_pi; then
- sudo ~/src/openaps-menu/scripts/getvoltage.sh | tee ~/myopenaps/monitor/edison-battery.json | jq .battery | awk '{if ($1<2)system("sudo shutdown -h now")}' &
+ BATTERY_PERCENT="$(sudo ~/src/openaps-menu/scripts/getvoltage.sh | tee ~/myopenaps/monitor/edison-battery.json | jq .battery)"
+ BATTERY_CUTOFF=$(get_pref_float .pi_battery_shutdown_percent 2)
+ echo "Battery level is $BATTERY_PERCENT percent"
+ if (( "$BATTERY_PERCENT" < "$BATTERY_CUTOFF" )); then
+ echo "Critically low battery! Shutting down."
+ sudo shutdown -h now
+ fi
fi
# temporarily disable hotspot for 1m every 15m to allow it to try to connect via wifi again
@@ -28,3 +40,4 @@ fi
) &
oref0-version --check-for-updates > /tmp/oref0-updates.txt &
+/root/src/oref0/bin/oref0-upgrade.sh
diff --git a/bin/oref0-cron-every-minute.sh b/bin/oref0-cron-every-minute.sh
index 28f2916f6..843d5d801 100755
--- a/bin/oref0-cron-every-minute.sh
+++ b/bin/oref0-cron-every-minute.sh
@@ -112,6 +112,10 @@ if ! is_bash_process_running_named oref0-pump-loop; then
oref0-pump-loop 2>&1 | tee -a /var/log/openaps/pump-loop.log | adddate openaps.pump-loop | uncolor |tee -a /var/log/openaps/openaps-date.log &
fi
+if ! is_bash_process_running_named oref0-shared-node-loop; then
+ oref0-shared-node-loop 2>&1 | tee -a /var/log/openaps/shared-node.log | adddate openaps.shared-node | uncolor |tee -a /var/log/openaps/openaps-date.log &
+fi
+
if [[ ! -z "$BT_PEB" ]]; then
if ! is_process_running_named "peb-urchin-status $BT_PEB"; then
peb-urchin-status $BT_PEB 2>&1 | tee -a /var/log/openaps/urchin-loop.log | adddate openaps.urchin-loop | uncolor |tee -a /var/log/openaps/openaps-date.log &
@@ -124,9 +128,9 @@ if [[ ! -z "$BT_PEB" || ! -z "$BT_MAC" ]]; then
fi
fi
-if [[ ! -z "$PUSHOVER_TOKEN" && ! -z "$PUSHOVER_USER" ]]; then
- oref0-pushover $PUSHOVER_TOKEN $PUSHOVER_USER 2>&1 >> /var/log/openaps/pushover.log &
-fi
+#if [[ ! -z "$PUSHOVER_TOKEN" && ! -z "$PUSHOVER_USER" ]]; then
+ #oref0-pushover $PUSHOVER_TOKEN $PUSHOVER_USER 2>&1 >> /var/log/openaps/pushover.log &
+#fi
# if disk has less than 10MB free, delete something and logrotate
cd /var/log/openaps/ && df . | awk '($4 < 10000) {print $4}' | while read line; do
@@ -138,6 +142,7 @@ done | while read file; do
# attempt a logrotate
logrotate /etc/logrotate.conf -f
done
+start_share_node_if_needed
# check if 5 minutes have passed, and if yes, turn of the screen to save power
ttyport="$(get_pref_string .ttyport)"
diff --git a/bin/oref0-detect-sensitivity.js b/bin/oref0-detect-sensitivity.js
index f2a171ce4..d2ddfe710 100755
--- a/bin/oref0-detect-sensitivity.js
+++ b/bin/oref0-detect-sensitivity.js
@@ -14,7 +14,7 @@
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
-var detect = require('../lib/determine-basal/autosens');
+var detectSensitivity = require('../lib/determine-basal/autosens');
if (!module.parent) {
var argv = require('yargs')
@@ -112,14 +112,14 @@ if (!module.parent) {
};
console.error("Calculating sensitivity using 8h of non-exluded data");
detection_inputs.deviations = 96;
- detect(detection_inputs);
- var ratio8h = ratio;
- var newisf8h = newisf;
+ var result = detectSensitivity(detection_inputs);
+ var ratio8h = result.ratio;
+ var newisf8h = result.newisf;
console.error("Calculating sensitivity using all non-exluded data (up to 24h)");
detection_inputs.deviations = 288;
- detect(detection_inputs);
- var ratio24h = ratio;
- var newisf24h = newisf;
+ result = detectSensitivity(detection_inputs);
+ var ratio24h = result.ratio;
+ var newisf24h = result.newisf;
if ( ratio8h < ratio24h ) {
console.error("Using 8h autosens ratio of",ratio8h,"(ISF",newisf8h+")");
} else {
diff --git a/bin/oref0-get-ns-entries.js b/bin/oref0-get-ns-entries.js
index 9bacfefe2..4932aac31 100755
--- a/bin/oref0-get-ns-entries.js
+++ b/bin/oref0-get-ns-entries.js
@@ -1,4 +1,5 @@
#!/usr/bin/env node
+'use strict';
/*
oref0 Nightscout treatment fetch tool
@@ -25,13 +26,16 @@ var request = require('request');
var _ = require('lodash');
var fs = require('fs');
var network = require('network');
+var shared_node = require('./oref0-shared-node-utils');
+var console_error = shared_node.console_error;
+var console_log = shared_node.console_log;
+var initFinalResults = shared_node.initFinalResults;
-var safe_errors = ['ECONNREFUSED', 'ESOCKETTIMEDOUT', 'ETIMEDOUT'];
-var log_errors = true;
+var oref0_get_ns_engtires = function oref0_get_ns_engtires(argv_params, print_callback, final_result) {
+ var safe_errors = ['ECONNREFUSED', 'ESOCKETTIMEDOUT', 'ETIMEDOUT'];
+ var log_errors = true;
-if (!module.parent) {
-
- var argv = require('yargs')
+ var argv = require('yargs')(argv_params)
.usage("$0 ns-glucose.json NSURL API-SECRET ")
.strict(true)
.help('help');
@@ -45,11 +49,10 @@ if (!module.parent) {
if ([null, '--help', '-h', 'help'].indexOf(glucose_input) > 0) {
usage();
- process.exit(0);
+ process.exit(0); //???????
}
var nsurl = params._.slice(1, 2).pop();
- if (nsurl && nsurl.charAt(nsurl.length - 1) == "/") nsurl = nsurl.substr(0, nsurl.length - 1); // remove trailing slash if it exists
var apisecret = params._.slice(2, 3).pop();
var hours = Number(params._.slice(3, 4).pop());
@@ -63,6 +66,8 @@ if (!module.parent) {
usage();
process.exit(1);
}
+ // remove trailing slash if it exists
+ if (nsurl && nsurl.charAt(nsurl.length - 1) == "/") nsurl = nsurl.substr(0, nsurl.length - 1);
if (apisecret != null && !apisecret.startsWith("token=") && apisecret.length != 40) {
var shasum = crypto.createHash('sha1');
@@ -87,21 +92,21 @@ if (!module.parent) {
, headers: headers
};
- if (log_errors) console.error('Connected to ' + ip +', testing for xDrip API availability');
+ if (log_errors) console_error(final_result, 'Connecting to ' + ip +', testing for xDrip API availability');
request(options, function(error, res, data) {
var failed = false;
if (res && res.statusCode == 403) {
- console.error("Load from xDrip failed: API_SECRET didn't match");
+ console_error(final_result, "Load from xDrip failed: API_SECRET didn't match");
failed = true;
}
if (error) {
if (safe_errors.includes(error.code)) {
- if (log_errors) console.error('Load from local xDrip timed out, likely not connected to xDrip hotspot');
+ if (log_errors) console_error(final_result, 'Load from local xDrip timed out, likely not connected to xDrip hotspot');
log_errors = false;
} else {
- if (log_errors) console.error("Load from xDrip failed", error);
+ if (log_errors) console_error(final_result, "Load from xDrip failed", error);
log_errors = false;
failed = true;
}
@@ -110,12 +115,18 @@ if (!module.parent) {
}
if (!failed && data) {
- console.error("CGM results loaded from xDrip");
+ console_error(final_result, "CGM results loaded from xDrip");
processAndOutput(data);
return true;
}
- if (failed && callback) callback();
+ if (failed && callback) {
+ // printing will happen in the callback
+ callback();
+ } else {
+ print_callback(final_result);
+ }
+
});
return false;
@@ -130,7 +141,7 @@ if (!module.parent) {
fs.readFile(outputPath, 'utf8', function(err, fileContent) {
if (err) {
- console.error(err);
+ console_error(final_result, err);
} else {
try {
glucosedata = JSON.parse(fileContent);
@@ -146,10 +157,11 @@ if (!module.parent) {
glucosedata = null;
}
} catch (e) {
- console.error(e);
+ console_error(final_result, e);
}
}
loadFromNightscoutWithDate(lastDate, glucosedata);
+ // callback will happen in loadFromNightscoutWithDate
});
}
@@ -168,30 +180,33 @@ if (!module.parent) {
headers["If-Modified-Since"] = lastDate.toISOString();
}
+ headers["User-Agent"] = 'openaps';
var uri = nsurl + '/api/v1/entries/sgv.json?count=' + records + tokenAuth;
var options = {
uri: uri
, json: true
, timeout: 90000
, headers: headers
+ , gzip : true
};
request(options, function(error, res, data) {
if (res && (res.statusCode == 200 || res.statusCode == 304)) {
if (data) {
- console.error("Got CGM results from Nightscout");
+ console_error(final_result, "Got CGM results from Nightscout");
processAndOutput(data);
} else {
- console.error("Got Not Changed response from Nightscout, assuming no new data is available");
+ console_error(final_result, "Got Not Changed response from Nightscout, assuming no new data is available");
// output old file
if (!_.isNil(glucosedata)) {
- console.log(JSON.stringify(glucosedata));
+ console_log(final_result, JSON.stringify(glucosedata));
}
}
} else {
- console.error("Loading CGM data from Nightscout failed", error);
+ console_error(final_result, "Loading CGM data from Nightscout failed", error);
}
+ print_callback(final_result);
});
}
@@ -202,11 +217,28 @@ if (!module.parent) {
sgvrecord.glucose = sgvrecord.sgv;
});
- console.log(JSON.stringify(glucosedata));
+ console_log(final_result, JSON.stringify(glucosedata));
}
network.get_gateway_ip(function(err, ip) {
loadFromxDrip(nsCallback, ip);
});
+}
+function print_callback(final_result) {
+ console.log(final_result.stdout);
+ console.error(final_result.err);
}
+
+
+if (!module.parent) {
+ var final_result = initFinalResults();
+
+ // remove the first parameter.
+ var command = process.argv;
+ command.shift();
+ command.shift();
+ var result = oref0_get_ns_engtires(command, print_callback, final_result)
+}
+
+exports = module.exports = oref0_get_ns_engtires
diff --git a/bin/oref0-get-profile.js b/bin/oref0-get-profile.js
index 96a247fda..0f08ff1f9 100755
--- a/bin/oref0-get-profile.js
+++ b/bin/oref0-get-profile.js
@@ -1,4 +1,5 @@
#!/usr/bin/env node
+'use strict';
/*
Get Basal Information
@@ -16,15 +17,21 @@
*/
+var fs = require('fs');
var generate = require('../lib/profile/');
+var shared_node_utils = require('./oref0-shared-node-utils');
+var console_error = shared_node_utils.console_error;
+var console_log = shared_node_utils.console_log;
+var process_exit = shared_node_utils.process_exit;
+var initFinalResults = shared_node_utils.initFinalResults;
-function exportDefaults () {
- var defaults = generate.displayedDefaults();
- console.log(JSON.stringify(defaults, null, '\t'));
+function exportDefaults (final_result) {
+ var defaults = generate.displayedDefaults(final_result);
+ console_log(final_result, JSON.stringify(defaults, null, '\t'));
}
-function updatePreferences (prefs) {
- var defaults = generate.displayedDefaults();
+function updatePreferences (final_result, prefs) {
+ var defaults = generate.displayedDefaults(final_result);
// check for any displayedDefaults missing from current prefs and add from defaults
@@ -34,12 +41,11 @@ function updatePreferences (prefs) {
}
}
- console.log(JSON.stringify(prefs, null, '\t'));
+ console_log(final_result, JSON.stringify(prefs, null, '\t'));
}
-if (!module.parent) {
-
- var argv = require('yargs')
+var oref0_get_profile = function oref0_get_profile(final_result, argv_params) {
+ var argv = require('yargs')(argv_params)
.usage("$0 [] [] [] [--model ] [--autotune ] [--exportDefaults] [--updatePreferences ]")
.option('model', {
alias: 'm',
@@ -71,22 +77,23 @@ if (!module.parent) {
if (!params.exportDefaults && !params.updatePreferences) {
if (params._.length < 4 || params._.length > 7) {
argv.showHelp();
- process.exit(1);
+ process_exit(final_result, 1);
+ return;
}
}
var pumpsettings_input = params._[0];
if (params.exportDefaults) {
- exportDefaults();
- process.exit(0);
+ exportDefaults(final_result);
+ return;
}
if (params.updatePreferences) {
var preferences = {};
var cwd = process.cwd()
- preferences = require(cwd + '/' + params.updatePreferences);
- updatePreferences(preferences);
- process.exit(0);
+ preferences = JSON.parse(fs.readFileSync(cwd + '/' + params.updatePreferences));
+ updatePreferences(final_result, preferences);
+ return;
}
var bgtargets_input = params._[1]
@@ -99,8 +106,8 @@ if (!module.parent) {
var autotune_input = params.autotune;
cwd = process.cwd()
- var pumpsettings_data = require(cwd + '/' + pumpsettings_input);
- var bgtargets_data = require(cwd + '/' + bgtargets_input);
+ var pumpsettings_data = JSON.parse(fs.readFileSync(cwd + '/' + pumpsettings_input));
+ var bgtargets_data = JSON.parse(fs.readFileSync(cwd + '/' + bgtargets_input));
if (bgtargets_data.units !== 'mg/dL') {
if (bgtargets_data.units === 'mmol/L') {
for (var i = 0, len = bgtargets_data.targets.length; i < len; i++) {
@@ -109,13 +116,14 @@ if (!module.parent) {
}
bgtargets_data.units = 'mg/dL';
} else {
- console.log('BG Target data is expected to be expressed in mg/dL or mmol/L.'
+ console_log(final_result, 'BG Target data is expected to be expressed in mg/dL or mmol/L.'
, 'Found', bgtargets_data.units, 'in', bgtargets_input, '.');
- process.exit(2);
+ process_exit(final_result, 2);
+ return;
}
}
- var isf_data = require(cwd + '/' + isf_input);
+ var isf_data = JSON.parse(fs.readFileSync(cwd + '/' + isf_input));
if (isf_data.units !== 'mg/dL') {
if (isf_data.units === 'mmol/L') {
for (i = 0, len = isf_data.sensitivities.length; i < len; i++) {
@@ -123,18 +131,18 @@ if (!module.parent) {
}
isf_data.units = 'mg/dL';
} else {
- console.log('ISF is expected to be expressed in mg/dL or mmol/L.'
+ console_log(final_result, 'ISF is expected to be expressed in mg/dL or mmol/L.'
, 'Found', isf_data.units, 'in', isf_input, '.');
- process.exit(2);
+ process_exit(final_result, 2);
+ return;
}
}
- var basalprofile_data = require(cwd + '/' + basalprofile_input);
+ var basalprofile_data = JSON.parse(fs.readFileSync(cwd + '/' + basalprofile_input));
preferences = {};
if (typeof preferences_input !== 'undefined') {
- preferences = require(cwd + '/' + preferences_input);
+ preferences = JSON.parse(fs.readFileSync(cwd + '/' + preferences_input));
}
- var fs = require('fs');
var model_data = { }
if (params.model) {
@@ -143,9 +151,10 @@ if (!module.parent) {
model_data = model_string.replace(/"/gi, '');
} catch (e) {
var msg = { error: e, msg: "Could not parse model_data", file: model_input};
- console.error(msg.msg);
- console.log(JSON.stringify(msg));
- process.exit(1);
+ console_error(final_result, msg.msg);
+ console_log(final_result, JSON.stringify(msg));
+ process_exit(final_result, 1);
+ return;
}
}
var autotune_data = { }
@@ -155,7 +164,7 @@ if (!module.parent) {
} catch (e) {
msg = { error: e, msg: "Could not parse autotune_data", file: autotune_input};
- console.error(msg.msg);
+ console_error(final_result, msg.msg);
// Continue and output a non-autotuned profile if we don't have autotune_data
//console.log(JSON.stringify(msg));
//process.exit(1);
@@ -170,9 +179,10 @@ if (!module.parent) {
} catch (e) {
msg = { error: e, msg: "Could not parse carbratio_data. Feature Meal Assist enabled but cannot find required carb_ratios.", file: carbratio_input };
- console.error(msg.msg);
- console.log(JSON.stringify(msg));
- process.exit(1);
+ console_error(final_result, msg.msg);
+ console.log(final_result, JSON.stringify(msg));
+ process_exit(final_result, 1);
+ return;
}
var errors = [ ];
@@ -186,10 +196,12 @@ if (!module.parent) {
if (errors.length) {
errors.forEach(function (msg) {
- console.error(msg.msg);
+ console_error(final_result, msg.msg);
});
- console.log(JSON.stringify(errors));
- process.exit(1);
+ console_log(final_result, JSON.stringify(errors));
+ process_exit(final_result, 1);
+
+ return;
}
}
var temptargets_data = { };
@@ -197,7 +209,7 @@ if (!module.parent) {
try {
temptargets_data = JSON.parse(fs.readFileSync(temptargets_input, 'utf8'));
} catch (e) {
- console.error("Could not parse temptargets_data.");
+ console_error(final_result, "Could not parse temptargets_data.");
}
}
@@ -229,8 +241,25 @@ if (!module.parent) {
if (autotune_data.isfProfile) { inputs.isf = autotune_data.isfProfile; }
if (autotune_data.carb_ratio) { inputs.carbratio.schedule[0].ratio = autotune_data.carb_ratio; }
}
- var profile = generate(inputs);
+ var profile = generate(final_result, inputs);
+
+ console_log(final_result, JSON.stringify(profile));
+
+}
- console.log(JSON.stringify(profile));
+if (!module.parent) {
+ var final_result = initFinalResults();
+ // remove the first parameter.
+ var command = process.argv;
+ command.shift();
+ command.shift();
+ oref0_get_profile(final_result, command)
+ console.log(final_result.stdout);
+ if(final_result.err.length > 0) {
+ console.error(final_result.err);
+ }
+ process.exit(final_result.return_val);
}
+
+exports = module.exports = oref0_get_profile;
diff --git a/bin/oref0-meal.js b/bin/oref0-meal.js
index 572a18baa..50ad3d1d0 100755
--- a/bin/oref0-meal.js
+++ b/bin/oref0-meal.js
@@ -1,4 +1,5 @@
#!/usr/bin/env node
+'use strict';
/*
oref0 meal data tool
@@ -20,9 +21,14 @@
*/
var generate = require('../lib/meal');
-
-if (!module.parent) {
- var argv = require('yargs')
+var shared_node_utils = require('./oref0-shared-node-utils');
+var console_error = shared_node_utils.console_error;
+var console_log = shared_node_utils.console_log;
+var process_exit = shared_node_utils.process_exit;
+var initFinalResults = shared_node_utils.initFinalResults;
+
+var oref0_meal = function oref0_meal(final_result, argv_params) {
+ var argv = require('yargs')(argv_params)
.usage('$0 []')
// error and show help if some other args given
.strict(true)
@@ -40,8 +46,9 @@ if (!module.parent) {
if (inputs.length < 5 || inputs.length > 6) {
argv.showHelp();
- console.log('{ "carbs": 0, "reason": "Insufficient arguments" }');
- process.exit(1);
+ console_log(final_result, '{ "carbs": 0, "reason": "Insufficient arguments" }');
+ process_exit(1);
+ return;
}
var fs = require('fs');
@@ -53,41 +60,41 @@ if (!module.parent) {
try {
pumphistory_data = JSON.parse(fs.readFileSync(pumphistory_input, 'utf8'));
} catch (e) {
- console.log('{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse pumphistory data" }');
- return console.error("Could not parse pumphistory data: ", e);
+ console_log(final_result, '{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse pumphistory data" }'); //??
+ return console_error(final_result, "Could not parse pumphistory data: ", e);
}
try {
profile_data = JSON.parse(fs.readFileSync(profile_input, 'utf8'));
} catch (e) {
- console.log('{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse profile data" }');
- return console.error("Could not parse profile data: ", e);
+ console_log(final_result, '{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse profile data" }');
+ return console_error(final_result, "Could not parse profile data: ", e);
}
try {
clock_data = JSON.parse(fs.readFileSync(clock_input, 'utf8'));
} catch (e) {
- console.log('{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse clock data" }');
- return console.error("Could not parse clock data: ", e);
+ console_log(final_result, '{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse clock data" }');
+ return console_error(final_result, "Could not parse clock data: ", e);
}
try {
basalprofile_data = JSON.parse(fs.readFileSync(basalprofile_input, 'utf8'));
} catch (e) {
- console.log('{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse basalprofile data" }');
- return console.error("Could not parse basalprofile data: ", e);
+ console_log(final_result, '{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse basalprofile data" }');
+ return console_error(final_result, "Could not parse basalprofile data: ", e);
}
// disallow impossibly low carbRatios due to bad decoding
if ( typeof(profile_data.carb_ratio) === 'undefined' || profile_data.carb_ratio < 3 ) {
- console.log('{ "carbs": 0, "mealCOB": 0, "reason": "carb_ratio ' + profile_data.carb_ratio + ' out of bounds" }');
- return console.error("Error: carb_ratio " + profile_data.carb_ratio + " out of bounds");
+ console_log(final_result, '{ "carbs": 0, "mealCOB": 0, "reason": "carb_ratio ' + profile_data.carb_ratio + ' out of bounds" }');
+ return console_error(final_result, "Error: carb_ratio " + profile_data.carb_ratio + " out of bounds");
}
try {
var glucose_data = JSON.parse(fs.readFileSync(glucose_input, 'utf8'));
} catch (e) {
- console.error("Warning: could not parse "+glucose_input);
+ console_error(final_result, "Warning: could not parse "+glucose_input);
}
var carb_data = { };
@@ -95,19 +102,19 @@ if (!module.parent) {
try {
carb_data = JSON.parse(fs.readFileSync(carb_input, 'utf8'));
} catch (e) {
- console.error("Warning: could not parse "+carb_input);
+ console_error(final_result, "Warning: could not parse "+carb_input);
}
}
if (typeof basalprofile_data[0] === 'undefined') {
- return console.error("Error: bad basalprofile_data:" + basalprofile_data);
+ return console_error(final_result, "Error: bad basalprofile_data:" + basalprofile_data);
}
if (typeof basalprofile_data[0].glucose !== 'undefined') {
- console.error("Warning: Argument order has changed: please update your oref0-meal device and meal.json report to place carbhistory.json after basalprofile.json");
- var temp = carb_data;
- carb_data = glucose_data;
- glucose_data = basalprofile_data;
- basalprofile_data = temp;
+ console_error(final_result, "Warning: Argument order has changed: please update your oref0-meal device and meal.json report to place carbhistory.json after basalprofile.json");
+ var temp = carb_data;
+ carb_data = glucose_data;
+ glucose_data = basalprofile_data;
+ basalprofile_data = temp;
}
inputs = {
@@ -122,11 +129,26 @@ if (!module.parent) {
var recentCarbs = generate(inputs);
if (glucose_data.length < 36) {
- console.error("Not enough glucose data to calculate carb absorption; found:", glucose_data.length);
+ console_error(final_result, "Not enough glucose data to calculate carb absorption; found:", glucose_data.length);
recentCarbs.mealCOB = 0;
recentCarbs.reason = "not enough glucose data to calculate carb absorption";
}
- console.log(JSON.stringify(recentCarbs));
+ console_log(final_result, recentCarbs);
+}
+
+if (!module.parent) {
+ var final_result = initFinalResults();
+ // remove the first parameter.
+ var command = process.argv;
+ command.shift();
+ command.shift();
+ oref0_meal(final_result, command);
+ console.log(final_result.stdout);
+ if(final_result.err.length > 0) {
+ console.error(final_result.err);
+ }
+ process.exit(final_result.return_val);
}
+exports = module.exports = oref0_meal
\ No newline at end of file
diff --git a/bin/oref0-normalize-temps.js b/bin/oref0-normalize-temps.js
index 3320713b3..2acdb6f70 100755
--- a/bin/oref0-normalize-temps.js
+++ b/bin/oref0-normalize-temps.js
@@ -1,4 +1,5 @@
#!/usr/bin/env node
+'use strict';
/*
Released under MIT license. See the accompanying LICENSE.txt file for
@@ -17,9 +18,12 @@
var find_insulin = require('../lib/temps');
var find_bolus = require('../lib/bolus');
var describe_pump = require('../lib/pump');
+var fs = require('fs');
-if (!module.parent) {
- var argv = require('yargs')
+
+
+var oref0_normalize_temps = function oref0_normalize_temps(argv_params) {
+ var argv = require('yargs')(argv_params)
.usage('$0 ')
.demand(1)
// error and show help if some other args given
@@ -31,13 +35,12 @@ if (!module.parent) {
if (params._.length > 1) {
argv.showHelp();
- console.error('Too many arguments');
- process.exit(1);
+ return console.error('Too many arguments');
}
var cwd = process.cwd()
try {
- var all_data = require(cwd + '/' + iob_input);
+ var all_data = JSON.parse(fs.readFileSync(cwd + '/' + iob_input));
} catch (e) {
return console.error("Could not parse pumphistory: ", e);
}
@@ -50,6 +53,18 @@ if (!module.parent) {
// treatments.sort(function (a, b) { return a.date > b.date });
- console.log(JSON.stringify(treatments));
+ return JSON.stringify(treatments);
+}
+
+if (!module.parent) {
+ // remove the first parameter.
+ var command = process.argv;
+ command.shift();
+ command.shift();
+ var result = oref0_normalize_temps(command)
+ if(result !== undefined) {
+ console.log(result);
+ }
}
+exports = module.exports = oref0_normalize_temps
diff --git a/bin/oref0-ns-loop.sh b/bin/oref0-ns-loop.sh
index ce10823a1..34e929e92 100755
--- a/bin/oref0-ns-loop.sh
+++ b/bin/oref0-ns-loop.sh
@@ -25,7 +25,7 @@ main() {
fi
fi
- pushover_snooze
+ #pushover_snooze
ns_temptargets || die "ns_temptargets failed"
ns_meal_carbs || echo "ns_meal_carbs failed"
battery_status
@@ -41,7 +41,13 @@ EOT
function pushover_snooze {
URL=$NIGHTSCOUT_HOST/api/v1/devicestatus.json?count=100
- if snooze=$(curl -s $URL | jq '.[] | select(.snooze=="carbsReq") | select(.date>'$(date +%s -d "10 minutes ago")')' | jq -s .[0].date | noquotes); then
+ if [[ "${API_SECRET}" =~ "token=" ]]; then
+ URL="${URL}&${API_SECRET}"
+ else
+ CURL_AUTH='-H api-secret:'${API_SECRET}
+ fi
+
+ if snooze=$(curl -s ${CURL_AUTH} ${URL} | jq '.[] | select(.snooze=="carbsReq") | select(.date>'$(date +%s -d "10 minutes ago")')' | jq -s .[0].date | noquotes); then
#echo $snooze
#echo date -Is -d @$snooze; echo
touch -d $(date -Is -d @$snooze) monitor/pushover-sent
@@ -56,18 +62,18 @@ function get_ns_bg {
|| ! jq . cgm/ns-glucose-24h.json | grep -c glucose | jq -e '. > 36' >/dev/null; then
#nightscout ns $NIGHTSCOUT_HOST $API_SECRET oref0_glucose_since -24hours > cgm/ns-glucose-24h.json
cp cgm/ns-glucose-24h.json cgm/ns-glucose-24h-temp.json
- oref0-get-ns-entries cgm/ns-glucose-24h-temp.json $NIGHTSCOUT_HOST $API_SECRET 24 2>&1 >cgm/ns-glucose-24h.json
+ run_remote_command "oref0-get-ns-entries cgm/ns-glucose-24h-temp.json $NIGHTSCOUT_HOST $API_SECRET 24" 2>&1 >cgm/ns-glucose-24h.json
fi
#nightscout ns $NIGHTSCOUT_HOST $API_SECRET oref0_glucose_since -1hour > cgm/ns-glucose-1h.json
cp cgm/ns-glucose-1h.json cgm/ns-glucose-1h-temp.json
- oref0-get-ns-entries cgm/ns-glucose-1h-temp.json $NIGHTSCOUT_HOST $API_SECRET 1 2>&1 >cgm/ns-glucose-1h.json
+ run_remote_command "oref0-get-ns-entries cgm/ns-glucose-1h-temp.json $NIGHTSCOUT_HOST $API_SECRET 1" 2>&1 >cgm/ns-glucose-1h.json
jq -s '.[0] + .[1]|unique|sort_by(.date)|reverse' cgm/ns-glucose-24h.json cgm/ns-glucose-1h.json > cgm/ns-glucose.json
glucose_fresh # update timestamp on cgm/ns-glucose.json
# if ns-glucose.json data is <10m old, no more than 5m in the future, and valid (>38),
# copy cgm/ns-glucose.json over to cgm/glucose.json if it's newer
valid_glucose=$(find_valid_ns_glucose)
- if echo $valid_glucose | grep -q glucose; then
+ if echo $valid_glucose | grep glucose >/dev/null; then
echo Found recent valid BG:
echo $valid_glucose | colorize_json '.[0] | { glucose: .glucose, dateString: .dateString }'
cp -pu cgm/ns-glucose.json cgm/glucose.json
@@ -92,14 +98,13 @@ function glucose_fresh {
}
function find_valid_ns_glucose {
- # TODO: use jq for this if possible
- cat cgm/ns-glucose.json | json -c "minAgo=(new Date()-new Date(this.dateString))/60/1000; return minAgo < 10 && minAgo > -5 && this.glucose > 38"
+ run_remote_command 'json -f cgm/ns-glucose.json -c "minAgo=(new Date()-new Date(this.dateString))/60/1000; return minAgo < 10 && minAgo > -5 && this.glucose > 38"'
}
function ns_temptargets {
#openaps report invoke settings/temptargets.json settings/profile.json >/dev/null
nightscout ns $NIGHTSCOUT_HOST $API_SECRET temp_targets > settings/ns-temptargets.json.new
- cat settings/ns-temptargets.json.new | jq .[0].duration | egrep -q [0-9] && mv settings/ns-temptargets.json.new settings/ns-temptargets.json
+ cat settings/ns-temptargets.json.new | jq .[0].duration | egrep "[0-9]" >/dev/null && mv settings/ns-temptargets.json.new settings/ns-temptargets.json
# TODO: merge local-temptargets.json with ns-temptargets.json
#openaps report invoke settings/ns-temptargets.json settings/profile.json
echo -n "Latest NS temptargets: "
@@ -111,20 +116,33 @@ function ns_temptargets {
jq -s '.[0] + .[1]|unique|sort_by(.created_at)|reverse' settings/ns-temptargets.json settings/local-temptargets.json > settings/temptargets.json
echo -n "Temptargets merged: "
cat settings/temptargets.json | colorize_json '.[0] | { target: .targetBottom, duration: .duration, start: .created_at }'
- oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json --autotune settings/autotune.json | jq . > settings/profile.json.new || die "Couldn't refresh profile"
- if cat settings/profile.json.new | jq . | grep -q basal; then
+
+ dir_name=~/test_data/oref0-get-profile$(date +"%Y-%m-%d-%H%M")-ns
+ #echo dir_name = $dir_name
+ # mkdir -p $dir_name
+ #cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json settings/autotune.json $dir_name
+
+ run_remote_command 'oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json --autotune settings/autotune.json' | jq . > settings/profile.json.new || die "Couldn't refresh profile"
+ if cat settings/profile.json.new | jq . | grep basal > /dev/null; then
mv settings/profile.json.new settings/profile.json
else
die "Invalid profile.json.new after refresh"
fi
}
-# openaps report invoke monitor/carbhistory.json; oref0-meal monitor/pumphistory-merged.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json > monitor/meal.json.new; grep -q COB monitor/meal.json.new && mv monitor/meal.json.new monitor/meal.json; exit 0
+# openaps report invoke monitor/carbhistory.json; oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json > monitor/meal.json.new; grep -q COB monitor/meal.json.new && mv monitor/meal.json.new monitor/meal.json; exit 0
function ns_meal_carbs {
#openaps report invoke monitor/carbhistory.json >/dev/null
nightscout ns $NIGHTSCOUT_HOST $API_SECRET carb_history > monitor/carbhistory.json.new
- cat monitor/carbhistory.json.new | jq .[0].carbs | egrep -q [0-9] && mv monitor/carbhistory.json.new monitor/carbhistory.json
- oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json > monitor/meal.json.new
+ cat monitor/carbhistory.json.new | jq .[0].carbs | egrep "[0-9]" >/dev/null && mv monitor/carbhistory.json.new monitor/carbhistory.json
+
+ dir_name=~/test_data/oref0-meal$(date +"%Y-%m-%d-%H%M")
+ #echo dir_name = $dir_name
+ # mkdir -p $dir_name
+ #cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json $dir_name
+
+
+ run_remote_command 'oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json' > monitor/meal.json.new
#grep -q COB monitor/meal.json.new && mv monitor/meal.json.new monitor/meal.json
check_cp_meal || return 1
echo -n "Refreshed carbhistory; COB: "
@@ -171,8 +189,9 @@ function upload {
# grep -q iob monitor/iob.json && find enact/ -mmin -5 -size +5c | grep -q suggested.json && openaps format-ns-status && grep -q iob upload/ns-status.json && ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json upload/ns-status.json
function upload_ns_status {
+ set -o pipefail
#echo Uploading devicestatus
- grep -q iob monitor/iob.json || die "IOB not found"
+ grep iob monitor/iob.json >/dev/null || die "IOB not found"
# set the timestamp on enact/suggested.json to match the deliverAt time
touch -d $(cat enact/suggested.json | jq .deliverAt | sed 's/"//g') enact/suggested.json
if ! file_is_recent_and_min_size enact/suggested.json 10; then
@@ -180,17 +199,30 @@ function upload_ns_status {
ls -la enact/suggested.json | awk '{print $6,$7,$8}'
return 1
fi
- format_ns_status && grep -q iob upload/ns-status.json || die "Couldn't generate ns-status.json"
- ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json upload/ns-status.json | colorize_json '.[0].openaps.suggested | {BG: .bg, IOB: .IOB, rate: .rate, duration: .duration, units: .units}' || die "Couldn't upload devicestatus to NS"
+ ns_status_file_name=ns-status$(date +"%Y-%m-%d-%T").json
+ format_ns_status $ns_status_file_name && grep iob upload/$ns_status_file_name >/dev/null || die "Couldn't generate ns-status.json"
+ # Delete files older than 24 hours.
+ find upload -maxdepth 1 -mmin +1440 -type f -name "ns-status*.json" -delete
+ # Upload the files one by one according to their order.
+ ls upload/ns-status*.json | while read -r file_name ; do
+ if ! grep iob $file_name >/dev/null ; then
+ #echo deleteing file $file_name
+ rm $file_name
+ continue
+ fi
+ ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json $file_name | colorize_json '.[0].openaps.suggested | {BG: .bg, IOB: .IOB, rate: .rate, duration: .duration, units: .units}' || die "Couldn't upload devicestatus to NS"
+ rm $file_name
+ done
}
#ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json > upload/ns-status.json
# ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --uploader monitor/edison-battery.json > upload/ns-status.json
+# first parameter - ns_status file name
function format_ns_status {
if [ -s monitor/edison-battery.json ]; then
- ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --preferences preferences.json --uploader monitor/edison-battery.json > upload/ns-status.json
+ run_remote_command 'ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --preferences preferences.json --uploader monitor/edison-battery.json' > upload/$1
else
- ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --preferences preferences.json > upload/ns-status.json
+ run_remote_command 'ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --preferences preferences.json' > upload/$1
fi
}
@@ -198,7 +230,8 @@ function format_ns_status {
function upload_recent_treatments {
#echo Uploading treatments
format_latest_nightscout_treatments || die "Couldn't format latest NS treatments"
- if test $(json -f upload/latest-treatments.json -a created_at eventType | wc -l ) -gt 0; then
+
+ if test $(jq -r '.[] |.created_at + " " + .eventType' upload/latest-treatments.json | wc -l ) -gt 0; then
ns-upload $NIGHTSCOUT_HOST $API_SECRET treatments.json upload/latest-treatments.json | colorize_json || die "Couldn't upload latest treatments to NS"
else
echo "No new treatments to upload"
@@ -206,7 +239,7 @@ function upload_recent_treatments {
}
function latest_ns_treatment_time {
- nightscout latest-openaps-treatment $NIGHTSCOUT_HOST $API_SECRET | jq -r .created_at
+ date -Is -d $(nightscout latest-openaps-treatment $NIGHTSCOUT_HOST $API_SECRET | jq -r .created_at)
}
#nightscout cull-latest-openaps-treatments monitor/pumphistory-zoned.json settings/model.json $(openaps latest-ns-treatment-time) > upload/latest-treatments.json
diff --git a/bin/oref0-pump-loop.sh b/bin/oref0-pump-loop.sh
index 8af151386..a3ec33b00 100755
--- a/bin/oref0-pump-loop.sh
+++ b/bin/oref0-pump-loop.sh
@@ -68,9 +68,17 @@ main() {
fi
fi
touch /tmp/pump_loop_completed -r /tmp/pump_loop_enacted
+ # run pushover immediately after completing loop for more timely carbsReq notifications without race conditions
+ PUSHOVER_TOKEN="$(get_pref_string .pushover_token "")"
+ PUSHOVER_USER="$(get_pref_string .pushover_user "")"
+ if [[ ! -z "$PUSHOVER_TOKEN" && ! -z "$PUSHOVER_USER" ]]; then
+ oref0-pushover $PUSHOVER_TOKEN $PUSHOVER_USER # 2>&1 >> /var/log/openaps/pushover.log &
+ fi
+
# before each of these (optional) refresh checks, make sure we don't have fresh glucose data
# if we do, then skip the optional checks to finish up this loop and start the next one
if ! glucose-fresh; then
+ wait_for_silence $upto10s
if onbattery; then
refresh_profile 30
else
@@ -161,8 +169,13 @@ function fail {
refresh_after_bolus_or_enact
echo "Incomplete oref0-pump-loop (pump suspended) at $(date)"
else
- pumphistory_daily_refresh
- maybe_mmtune
+ # wait upto45s and try preflight; if successful, refresh pumphistory, else mmtune
+ wait_for_silence $upto45s
+ if try_return preflight; then
+ pumphistory_daily_refresh
+ else
+ maybe_mmtune
+ fi
echo "If pump and rig are close enough, this error usually self-resolves. Stand by for the next loop."
echo Unsuccessful oref0-pump-loop at $(date)
fi
@@ -317,7 +330,7 @@ function smb_suggest {
}
function determine_basal {
- cat monitor/meal.json
+ #cat monitor/meal.json
update_glucose_noise
@@ -480,7 +493,7 @@ function refresh_after_bolus_or_enact {
function unsuspend_if_no_temp {
# If temp basal duration is zero, unsuspend pump
- if (cat monitor/temp_basal.json | jq '. | select(.duration == 0)' | grep -q duration); then
+ if (cat monitor/temp_basal.json | jq '. | select(.duration == 0)' | grep duration); then
if check_pref_bool .unsuspend_if_no_temp false; then
echo Temp basal has ended: unsuspending pump
mdt resume 2>&3
@@ -597,13 +610,18 @@ function refresh_pumphistory_and_meal {
try_return invoke_pumphistory_etc || return 1
try_return invoke_reservoir_etc || return 1
echo -n "meal.json "
- if ! retry_return oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json > monitor/meal.json.new ; then
+
+ dir_name=~/test_data/oref0-meal$(date +"%Y-%m-%d-%H%M")
+ #echo dir_name = $dir_name
+ # mkdir -p $dir_name
+ #cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json $dir_name
+ if ! retry_return run_remote_command 'oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json' > monitor/meal.json.new ; then
echo; echo "Couldn't calculate COB"
return 1
fi
try_return check_cp_meal || return 1
echo -n "refreshed: "
- cat monitor/meal.json
+ cat monitor/meal.json | jq -cC .
}
function check_cp_meal {
@@ -624,7 +642,12 @@ function check_cp_meal {
}
function calculate_iob {
- oref0-calculate-iob monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json settings/autosens.json > monitor/iob.json.new || { echo; echo "Couldn't calculate IOB"; fail "$@"; }
+ dir_name=~/test_data/oref0-calculate-iob$(date +"%Y-%m-%d-%H%M")
+ #echo dir_name = $dir_name
+ # mkdir -p $dir_name
+ #cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json settings/autosens.json $dir_name
+
+ run_remote_command 'oref0-calculate-iob monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json settings/autosens.json' > monitor/iob.json.new || { echo; echo "Couldn't calculate IOB"; fail "$@"; }
[ -s monitor/iob.json.new ] && jq -e .[0].iob monitor/iob.json.new >&3 && cp monitor/iob.json.new monitor/iob.json || { echo; echo "Couldn't copy IOB"; fail "$@"; }
}
@@ -674,7 +697,13 @@ function get_settings {
fi
# generate settings/pumpprofile.json without autotune
- oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json 2>&3 | jq . > settings/pumpprofile.json.new || { echo "Couldn't refresh pumpprofile"; fail "$@"; }
+
+ #dir_name=~/test_data/oref0-get-profile$(date +"%Y-%m-%d-%H%M")-pump
+ #echo dir_name = $dir_name
+ # mkdir -p $dir_name
+ #cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json $dir_name
+
+ run_remote_command 'oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json' 2>&3 | jq . > settings/pumpprofile.json.new || { echo "Couldn't refresh pumpprofile"; fail "$@"; }
if [ -s settings/pumpprofile.json.new ] && jq -e .current_basal settings/pumpprofile.json.new >&4; then
mv settings/pumpprofile.json.new settings/pumpprofile.json
echo -n "Pump profile refreshed; "
@@ -683,7 +712,12 @@ function get_settings {
ls -lart settings/pumpprofile.json.new
fi
# generate settings/profile.json.new with autotune
- oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json --autotune settings/autotune.json | jq . > settings/profile.json.new || { echo "Couldn't refresh profile"; fail "$@"; }
+ dir_name=~/test_data/oref0-get-profile$(date +"%Y-%m-%d-%H%M")-pump-auto
+ #echo dir_name = $dir_name
+ # mkdir -p $dir_name
+ #cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json settings/autotune.json $dir_name
+
+ run_remote_command 'oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json --autotune settings/autotune.json' | jq . > settings/profile.json.new || { echo "Couldn't refresh profile"; fail "$@"; }
if [ -s settings/profile.json.new ] && jq -e .current_basal settings/profile.json.new >&4; then
mv settings/profile.json.new settings/profile.json
echo -n "Settings refreshed; "
@@ -715,7 +749,7 @@ function onbattery {
function wait_for_bg {
if [ "$(get_pref_string .cgm '')" == "mdt" ]; then
echo "MDT CGM configured; not waiting"
- elif egrep -q "Warning:" enact/smb-suggested.json 2>&3; then
+ elif egrep -q "Warning:" enact/smb-suggested.json 2>&3 || egrep -q "Could not parse clock data" monitor/meal.json 2>&3; then
echo "Retrying without waiting for new BG"
elif egrep -q "Waiting [0](\.[0-9])?m ([0-6]?[0-9]s )?to microbolus again." enact/smb-suggested.json 2>&3; then
echo "Retrying microbolus without waiting for new BG"
diff --git a/bin/oref0-pushover.sh b/bin/oref0-pushover.sh
index bc13dd099..5820b39c6 100755
--- a/bin/oref0-pushover.sh
+++ b/bin/oref0-pushover.sh
@@ -1,4 +1,4 @@
-#!/usr/bin/env bash
+#!/usr/bin/env bash
source $(dirname $0)/oref0-bash-common-functions.sh || (echo "ERROR: Failed to run oref0-bash-common-functions.sh. Is oref0 correctly installed?"; exit 1)
@@ -76,21 +76,38 @@ else
PRIORITY_OPTIONS=""
fi
-date
+#date
-if file_is_recent monitor/pushover-sent $SNOOZE; then
- echo "Last pushover sent less than $SNOOZE minutes ago."
-elif ! file_is_recent "$FILE"; then
+#function pushover_snooze {
+# check Nightscout to see if another rig has already sent a carbsReq pushover recently
+ URL=$NIGHTSCOUT_HOST/api/v1/devicestatus.json?count=100
+ if [[ "${API_SECRET}" =~ "token=" ]]; then
+ URL="${URL}&${API_SECRET}"
+ else
+ CURL_AUTH='-H api-secret:'${API_SECRET}
+ fi
+
+ if snooze=$(curl --compressed -s ${CURL_AUTH} ${URL} | jq '.[] | select(.snooze=="carbsReq") | select(.date>'$(date +%s -d "10 minutes ago")')' | jq -s .[0].date | noquotes | grep -v null); then
+ #echo $snooze
+ #echo date -Is -d @$snooze; echo
+ touch -d $(date -Is -d @$snooze) monitor/pushover-sent
+ #ls -la monitor/pushover-sent | awk '{print $8,$9}'
+ fi
+#}
+
+if ! file_is_recent "$FILE"; then
echo "$FILE more than 5 minutes old"
exit
-elif ! cat $FILE | egrep "add'l|maxBolus"; then
- echo "No additional carbs or bolus required."
-elif [[ $ONLYFOR =~ "carb" ]] && ! cat $FILE | egrep "add'l"; then
- echo "No additional carbs required."
-elif [[ $ONLYFOR =~ "insulin" ]] && ! cat $FILE | egrep "maxBolus"; then
- echo "No additional insulin required."
+elif ! cat $FILE | egrep "add'l|maxBolus" > /dev/null; then
+ echo -n "No carbsReq. "
+elif [[ $ONLYFOR =~ "carb" ]] && ! cat $FILE | egrep "add'l" > /dev/null; then
+ echo -n "No carbsReq. "
+elif [[ $ONLYFOR =~ "insulin" ]] && ! cat $FILE | egrep "maxBolus" > /dev/null; then
+ echo -n "No additional insulin required. "
+elif file_is_recent monitor/pushover-sent $SNOOZE; then
+ echo -n "Last pushover sent less than $SNOOZE minutes ago. "
else
- curl -s -F token=$TOKEN -F user=$USER $SOUND_OPTION -F priority=$PRIORITY $PRIORITY_OPTIONS -F "message=$(jq -c "{bg, tick, carbsReq, insulinReq, reason}|del(.[] | nulls)" $FILE) - $(hostname)" https://api.pushover.net/1/messages.json && touch monitor/pushover-sent && echo '{"date":'$(epochtime_now)',"device":"openaps://'$(hostname)'","snooze":"carbsReq"}' | tee /tmp/snooze.json && ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json /tmp/snooze.json
+ curl ---compressed s -F token=$TOKEN -F user=$USER $SOUND_OPTION -F priority=$PRIORITY $PRIORITY_OPTIONS -F "message=$(jq -c "{bg, tick, carbsReq, insulinReq, reason}|del(.[] | nulls)" $FILE) - $(hostname)" https://api.pushover.net/1/messages.json | jq .status| grep 1 >/dev/null && touch monitor/pushover-sent && echo '{"date":'$(epochtime_now)',"device":"openaps://'$(hostname)'","snooze":"carbsReq"}' > /tmp/snooze.json && ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json /tmp/snooze.json >/dev/null && echo "carbsReq pushover sent."
echo
fi
@@ -106,6 +123,8 @@ source $HOME/.bash_profile
key=${MAKER_KEY:-"null"}
carbsReq=`jq .carbsReq ${FILE}`
tick=`jq .tick ${FILE}`
+tick="${tick%\"}"
+tick="${tick#\"}"
bgNow=`jq .bg ${FILE}`
delta=`echo "${tick}" | tr -d +`
delta="${delta%\"}"
@@ -119,50 +138,68 @@ pushoverGlances=$(get_prefs_json | jq -M '.pushoverGlances')
if [ "${pushoverGlances}" == "null" -o "${pushoverGlances}" == "false" ]; then
echo "pushoverGlances not enabled in preferences.json"
else
+ # if pushoverGlances is a number instead of just true, use it to set the minutes allowed between glances
+ re='^[0-9]+$'
+ if [[ ${pushoverGlances} =~ $re ]]; then
+ glanceDelay=${pushoverGlances}
+ else
+ glanceDelay=10
+ fi
GLANCES="monitor/last_glance"
GLUCOSE="monitor/glucose.json"
if [ ! -f $GLANCES ]; then
- # First time through it will get created older than 10 minutes so it'll fire
- touch $GLANCES && touch -r $GLANCES -d '-11 mins' $GLANCES
+ # First time through it will get created 1h old so it'll fire
+ touch $GLANCES && touch -r $GLANCES -d '-60 mins' $GLANCES
+ fi
+
+ if snooze=$(curl --compressed -s ${CURL_AUTH} ${URL} | jq '.[] | select(.snooze=="glance") | select(.date>'$(date +%s -d "$glanceDelay minutes ago")')' | jq -s .[0].date | noquotes | grep -v null); then
+ #echo $snooze
+ #echo date -Is -d @$snooze; echo
+ touch -d $(date -Is -d @$snooze) $GLANCES
+ #ls -la $GLANCES | awk '{print $8,$9}'
fi
- if test `find $GLANCES -mmin +10`
+ if test `find $GLANCES -mmin +$glanceDelay` || cat $FILE | egrep "add'l" >/dev/null
then
- enactTime=$(ls -l --time-style=+"%l:%M" ${FILE} | awk '{printf ($6)}')
-
+ curTime=$(ls -l --time-style=+"%l:%M" ${FILE} | awk '{printf ($6)}')
+
lastDirection=`jq -M '.[0] .direction' $GLUCOSE`
lastDirection="${lastDirection%\"}"
lastDirection="${lastDirection#\"}"
+ rate=`jq -M '.rate' monitor/temp_basal.json`
+ duration=`jq -M '.duration' monitor/temp_basal.json`
#echo lastDirection=$lastDirection
if [ "${lastDirection}" == "SingleUp" ]; then
- direction="+"
+ direction="↑"
elif [ "${lastDirection}" == "FortyFiveUp" ]; then
- direction="++"
+ direction="↗"
elif [ "${lastDirection}" == "DoubleUp" ]; then
- direction="+++"
+ direction="↑↑"
elif [ "${lastDirection}" == "SingleDown" ]; then
- direction="-"
+ direction="↓"
elif [ "${lastDirection}" == "FortyFiveDown" ]; then
- direction="--"
+ direction="↘"
elif [ "${lastDirection}" == "DoubleDown" ]; then
- direction="---"
+ direction="↓↓"
else
- direction="" # default for NONE or Flat
+ direction="→" # default for NONE or Flat
fi
- if [ test cat $FILE | egrep "add'l" ]; then
- subtext="cr ${carbsReq}g"
- else
- subtext="e${enactTime}"
+ title="${bgNow} ${tick} ${direction} @ ${curTime}"
+ text="IOB ${iob}, COB ${cob}"
+ if cat $FILE | egrep "add'l" >/dev/null; then
+ carbsMsg="${carbsReq}g req "
fi
- text="${bgNow}${direction}"
- title="cob ${cob}, iob ${iob}"
+ subtext="$carbsMsg${rate}U/h ${duration}m"
# echo "pushover glance text=${text} subtext=${subtext} delta=${delta} title=${title} battery percent=${battery}"
- curl -s -F "token=$TOKEN" -F "user=$USER" -F "text=${text}" -F "subtext=${subtext}" -F "count=$bgNow" -F "percent=${battery}" -F "title=${title}" https://api.pushover.net/1/glances.json
+ curl --compressed -s -F "token=$TOKEN" -F "user=$USER" -F "text=${text}" -F "subtext=${subtext}" -F "count=$bgNow" -F "percent=${battery}" -F "title=${title}" https://api.pushover.net/1/glances.json | jq .status| grep 1 >/dev/null && echo '{"date":'$(epochtime_now)',"device":"openaps://'$(hostname)'","snooze":"glance"}' > /tmp/snooze.json && ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json /tmp/snooze.json >/dev/null && echo "Glance uploaded and snoozed"
touch $GLANCES
+ else
+ echo -n "Pushover glance last updated less than $glanceDelay minutes ago @ "
+ ls -la $GLANCES | awk '{print $8}'
fi
fi
@@ -174,7 +211,7 @@ fi
# call with this event that will read out in human language the additional carbs and other
# vital facts. It will leave a voice mail if not answered.
-if [[ "$MAKER_KEY" != "null" ]] && cat $FILE | egrep "add'l"; then
+if ! [ -z "$MAKER_KEY" ] && [[ "$MAKER_KEY" != "null" ]] && cat $FILE | egrep "add'l"; then
if file_is_recent monitor/ifttt-sent 60; then
echo "carbsReq=${carbsReq} but last IFTTT event sent less than 60 minutes ago."
else
@@ -186,7 +223,7 @@ if [[ "$MAKER_KEY" != "null" ]] && cat $FILE | egrep "add'l"; then
echo $values > $ifttt
- curl --request POST \
+ curl --compressed --request POST \
--header 'Content-Type: application/json' \
-d @$ifttt \
https://maker.ifttt.com/trigger/carbs-required/with/key/${key} && touch monitor/ifttt-sent
diff --git a/bin/oref0-setup.sh b/bin/oref0-setup.sh
index 71d9d4921..1c098cd8c 100755
--- a/bin/oref0-setup.sh
+++ b/bin/oref0-setup.sh
@@ -114,6 +114,9 @@ case $i in
;;
-npm=*|--npm_install=*)
npm_option="${i#*=}"
+ ;;
+ --hotspot=*)
+ hotspot_option="${i#*=}"
shift
;;
*)
@@ -282,13 +285,73 @@ function copy_go_binaries () {
function move_mmtune () {
request_stop_local_binary Go-mmtune
- if [ -f /usr/local/bin/mmtune ]; then
- mv /usr/local/bin/mmtune /usr/local/bin/Go-mmtune || die "Couldn't move mmtune to Go-mmtune"
+ if [ -f /root/go/bin/mmtune ]; then
+ ln -s /root/go/bin/mmtune /usr/local/bin/Go-mmtune
else
- die "Couldn't move_mmtune() because /usr/local/bin/mmtune exists"
+ echo "Couldn't move_mmtune()"
fi
}
+function check_nodejs_timing () {
+ # Redundant check that node is installed
+ # It is installed as part of openaps-packages.sh
+ if ! node --version | grep -q -e 'v[89]\.' -e 'v1[[:digit:]]\.'; then
+ die "No version of node (>=8,<=19) was found, which is an unexpected error (node installation should have been handled by previous installation steps)"
+ fi
+
+ # Check that the nodejs you have installed is not broken. In particular, we're
+ # checking for a problem with nodejs binaries that are present in the apt-get
+ # repo for RaspiOS builds from mid-2021 and earlier, where the node interpreter
+ # works, but has a 10x slower startup than expected (~30s on Pi Zero W
+ # hardware, as opposed to ~3s using a statically-linked binary of the same
+ # binary sourced from nvm).
+ sudo apt-get install -y time
+ NODE_EXECUTION_TIME="$(\time --format %e node -e 'true' 2>&1)"
+ if [ 1 -eq "$(echo "$NODE_EXECUTION_TIME > 10" |bc)" ]; then
+ echo "Your installed nodejs ($(node --version)) is very slow to start (took ${NODE_EXECUTION_TIME}s)"
+ echo "This is a known problem with certain versions of Raspberry Pi OS."
+
+ if prompt_yn "Confirm installation of replacement nodejs/npm versions?" Y; then
+ echo "Attempting to uninstall current nodejs/npm versions (apt-get remove)"
+ sudo apt-get -y remove nodejs npm
+ if [[ -n $NVM_DIR ]]; then
+ echo "Removing nvm ($NVM_DIR)..."
+ echo "(you may wish to optionally remove the nvm-related lines that still exist in ~/.bashrc; this script won't do it for you)"
+ rm -rf "$NVM_DIR"
+ fi
+
+ # Check that there node and npm are no longer available. If they are, warn the user.
+ nodePath=$(command -v node)
+ npmPath=$(command -v npm)
+ if [[ -e "$nodePath" ]]; then
+ echo "Note: A 'node' binary (located at '$nodePath') still exists and may interfere with the new installation of node"
+ fi
+ if [[ -e "$npmPath" ]]; then
+ echo "Note: A 'npm' binary (located at '$npmPath') still exists and may interfere with the new installation of npm"
+ fi
+
+ if [[ ! $(command -v n) ]]; then
+ echo "n already exists on the system, using it to install a new version of node..."
+ sudo n current
+ else
+ echo "Installing n and using it to replace the system-provided nodejs"
+ echo "Installing node via n..."
+ curl -L https://raw.githubusercontent.com/tj/n/master/bin/n -o n
+ # Install the latest version of node that is supported on this platform
+ sudo bash n current
+ # Delete the local n binary used to boostrap the install
+ rm n
+ # Install n globally
+ sudo npm install -g n
+ fi
+
+ NEW_NODE_EXECUTION_TIME="$(\time --format %e node -e 'true' 2>&1)"
+ echo "New nodejs took ${NEW_NODE_EXECUTION_TIME}s to start"
+ fi
+ else
+ echo "Your installed nodejs version is OK."
+ fi
+}
if ! validate_cgm "${CGM}"; then
DIR="" # to force a Usage prompt
@@ -471,6 +534,11 @@ if [[ -z "$DIR" || -z "$serial" ]]; then
echo
fi
+ if prompt_yn "Do you want to be able to set up a local-only wifi hotspot for offline monitoring?" N; then
+ HOTSPOT=true
+ else
+ HOTSPOT=false
+ fi
if [[ ! -z $BT_PEB ]]; then
prompt_and_validate BT_PEB "For Pancreabble enter Pebble mac id (i.e. AA:BB:CC:DD:EE:FF) hit enter to skip" validate_bt_peb
@@ -626,6 +694,9 @@ fi
if [[ ! -z "$radiotags" ]]; then
echo -n " --radiotags='$radiotags'" | tee -a $OREF0_RUNAGAIN
fi
+if [[ ! -z "$hotspot_option" ]]; then
+ echo -n " --hotspot='$hotspot_option'" | tee -a $OREF0_RUNAGAIN
+fi
echo; echo | tee -a $OREF0_RUNAGAIN
chmod 755 $OREF0_RUNAGAIN
@@ -678,20 +749,14 @@ if prompt_yn "" N; then
echo Running apt-get autoclean
sudo apt-get autoclean
- # install/upgrade to latest node 8 if neither node 8 nor node 10+ LTS are installed
- if ! nodejs --version | grep -e 'v8\.' -e 'v1[02468]\.' ; then
- echo Installing node 8
- # Use nodesource setup script to add nodesource repository to sources.list.d
- sudo bash -c "curl -sL https://deb.nodesource.com/setup_8.x | bash -" || die "Couldn't setup node 8"
- # Install nodejs and npm from nodesource
- sudo apt-get install -y nodejs=8.* || die "Couldn't install nodejs"
- fi
+ check_nodejs_timing
# Attempting to remove git to make install --nogit by default for existing users
echo Removing any existing git in $directory/.git
rm -rf $directory/.git
echo Removed any existing git
-
+ echo "Uninstalling parsedatetime, reinstalling correct version"
+ pip uninstall -y parsedatetime && pip install -I parsedatetime==2.5
# TODO: delete this after openaps 0.2.2 release
echo Checking openaps 0.2.2 installation with --nogit support
if ! openaps --version 2>&1 | egrep "0.[2-9].[2-9]"; then
@@ -730,10 +795,14 @@ if prompt_yn "" N; then
mkdir -p $HOME/src/
if [ -d "$HOME/src/oref0/" ]; then
echo "$HOME/src/oref0/ already exists; pulling latest"
- (cd $HOME/src/oref0 && git fetch && git pull) || die "Couldn't pull latest oref0"
+ (cd $HOME/src/oref0 && git fetch && git pull) || (
+ if ! prompt_yn "Couldn't pull latest oref0. Continue anyways?"; then
+ die "Failed to update oref0."
+ fi
+ )
else
echo -n "Cloning oref0: "
- (cd $HOME/src && git clone git://github.com/openaps/oref0.git) || die "Couldn't clone oref0"
+ (cd $HOME/src && git clone https://github.com/openaps/oref0.git) || die "Couldn't clone oref0"
fi
# Make sure jq version >1.5 is installed
@@ -746,11 +815,11 @@ if prompt_yn "" N; then
echo Checking oref0 installation
cd $HOME/src/oref0
if git branch | grep "* master"; then
- npm list -g --depth=0 | egrep oref0@0.6.[0] || (echo Installing latest oref0 package && sudo npm install -g oref0)
+ npm list -g --depth=0 | egrep oref0@0.7.[0] || (echo Installing latest oref0 package && sudo npm install -g oref0)
elif [[ ${npm_option,,} == "force" ]]; then
echo Forcing install of latest oref0 from $HOME/src/oref0/ && cd $HOME/src/oref0/ && npm run global-install
else
- npm list -g --depth=0 | egrep oref0@0.6.[1-9] || (echo Installing latest oref0 from $HOME/src/oref0/ && cd $HOME/src/oref0/ && npm run global-install)
+ npm list -g --depth=0 | egrep oref0@0.7.[1-9] || (echo Installing latest oref0 from $HOME/src/oref0/ && cd $HOME/src/oref0/ && npm run global-install)
fi
cd $directory || die "Can't cd $directory"
@@ -913,33 +982,37 @@ if prompt_yn "" N; then
else
echo bluez version ${bluetoothdversion} already installed
fi
- echo Installing prerequisites and configs for local-only hotspot
- apt-get install -y hostapd dnsmasq || die "Couldn't install hostapd dnsmasq"
- test ! -f /etc/dnsmasq.conf.bak && mv /etc/dnsmasq.conf /etc/dnsmasq.conf.bak
- cp $HOME/src/oref0/headless/dnsmasq.conf /etc/dnsmasq.conf || die "Couldn't copy dnsmasq.conf"
- test ! -f /etc/hostapd/hostapd.conf.bak && mv /etc/hostapd/hostapd.conf /etc/hostapd/hostapd.conf.bak
- cp $HOME/src/oref0/headless/hostapd.conf /etc/hostapd/hostapd.conf || die "Couldn't copy hostapd.conf"
- sed -i.bak -e "s|DAEMON_CONF=$|DAEMON_CONF=/etc/hostapd/hostapd.conf|g" /etc/init.d/hostapd
- cp $HOME/src/oref0/headless/interfaces.ap /etc/network/ || die "Couldn't copy interfaces.ap"
- cp /etc/network/interfaces /etc/network/interfaces.client || die "Couldn't copy interfaces.client"
- if [ ! -z "$BT_MAC" ]; then
- printf 'Checking for the bnep0 interface in the interfaces.client file and adding if missing...'
- # Make sure the bnep0 interface is in the /etc/networking/interface
- (grep -qa bnep0 /etc/network/interfaces.client && printf 'skipped.\n') || (printf '\n%s\n\n' "iface bnep0 inet dhcp" >> /etc/network/interfaces.client && printf 'added.\n')
- fi
- #Stop automatic startup of hostapd & dnsmasq
- update-rc.d -f hostapd remove
- update-rc.d -f dnsmasq remove
- # Edit /etc/hostapd/hostapd.conf for wifi using Hostname
- sed -i.bak -e "s/ssid=OpenAPS/ssid=${HOSTNAME}/" /etc/hostapd/hostapd.conf
- # Add Commands to /etc/rc.local
- # Interrupt Kernel Messages
- if ! grep -q 'sudo dmesg -n 1' /etc/rc.local; then
- sed -i.bak -e '$ i sudo dmesg -n 1' /etc/rc.local
- fi
- # Add to /etc/rc.local to check if in hotspot mode and turn back to client mode during bootup
- if ! grep -q 'cp /etc/network/interfaces.client /etc/network/interfaces' /etc/rc.local; then
- sed -i.bak -e "$ i if [ -f /etc/network/interfaces.client ]; then\n\tif grep -q '#wpa-' /etc/network/interfaces; then\n\t\tsudo ifdown wlan0\n\t\tsudo cp /etc/network/interfaces.client /etc/network/interfaces\n\t\tsudo ifup wlan0\n\tfi\nfi" /etc/rc.local || die "Couldn't modify /etc/rc.local"
+ if [[ ${hotspot_option,,} =~ "true" ]]; then
+ echo Installing prerequisites and configs for local-only hotspot
+ apt-get install -y hostapd dnsmasq || die "Couldn't install hostapd dnsmasq"
+ test ! -f /etc/dnsmasq.conf.bak && mv /etc/dnsmasq.conf /etc/dnsmasq.conf.bak
+ cp $HOME/src/oref0/headless/dnsmasq.conf /etc/dnsmasq.conf || die "Couldn't copy dnsmasq.conf"
+ test ! -f /etc/hostapd/hostapd.conf.bak && mv /etc/hostapd/hostapd.conf /etc/hostapd/hostapd.conf.bak
+ cp $HOME/src/oref0/headless/hostapd.conf /etc/hostapd/hostapd.conf || die "Couldn't copy hostapd.conf"
+ sed -i.bak -e "s|DAEMON_CONF=$|DAEMON_CONF=/etc/hostapd/hostapd.conf|g" /etc/init.d/hostapd
+ cp $HOME/src/oref0/headless/interfaces.ap /etc/network/ || die "Couldn't copy interfaces.ap"
+ cp /etc/network/interfaces /etc/network/interfaces.client || die "Couldn't copy interfaces.client"
+ if [ ! -z "$BT_MAC" ]; then
+ printf 'Checking for the bnep0 interface in the interfaces.client file and adding if missing...'
+ # Make sure the bnep0 interface is in the /etc/networking/interface
+ (grep -qa bnep0 /etc/network/interfaces.client && printf 'skipped.\n') || (printf '\n%s\n\n' "iface bnep0 inet dhcp" >> /etc/network/interfaces.client && printf 'added.\n')
+ fi
+ #Stop automatic startup of hostapd & dnsmasq
+ update-rc.d -f hostapd remove
+ update-rc.d -f dnsmasq remove
+ # Edit /etc/hostapd/hostapd.conf for wifi using Hostname
+ sed -i.bak -e "s/ssid=OpenAPS/ssid=${HOSTNAME}/" /etc/hostapd/hostapd.conf
+ # Add Commands to /etc/rc.local
+ # Interrupt Kernel Messages
+ if ! grep -q 'sudo dmesg -n 1' /etc/rc.local; then
+ sed -i.bak -e '$ i sudo dmesg -n 1' /etc/rc.local
+ fi
+ # Add to /etc/rc.local to check if in hotspot mode and turn back to client mode during bootup
+ if ! grep -q 'cp /etc/network/interfaces.client /etc/network/interfaces' /etc/rc.local; then
+ sed -i.bak -e "$ i if [ -f /etc/network/interfaces.client ]; then\n\tif grep -q '#wpa-' /etc/network/interfaces; then\n\t\tsudo ifdown wlan0\n\t\tsudo cp /etc/network/interfaces.client /etc/network/interfaces\n\t\tsudo ifup wlan0\n\tfi\nfi" /etc/rc.local || die "Couldn't modify /etc/rc.local"
+ fi
+ else
+ echo Skipping local-only hotspot
fi
fi
@@ -1015,7 +1088,7 @@ if prompt_yn "" N; then
echo "EdisonVoltage already installed"
else
echo "Installing EdisonVoltage"
- cd $HOME/src && git clone -b master git://github.com/cjo20/EdisonVoltage.git || (cd EdisonVoltage && git checkout master && git pull)
+ cd $HOME/src && git clone -b master https://github.com/cjo20/EdisonVoltage.git || (cd EdisonVoltage && git checkout master && git pull)
cd $HOME/src/EdisonVoltage
make voltage
fi
@@ -1030,7 +1103,7 @@ if prompt_yn "" N; then
echo Checking for BT Pebble Mac
if [[ ! -z "$BT_PEB" ]]; then
sudo pip install --default-timeout=1000 libpebble2
- sudo pip install --default-timeout=1000 --user git+git://github.com/mddub/pancreabble.git
+ sudo pip install --default-timeout=1000 --user git+https://github.com/mddub/pancreabble.git
oref0-bluetoothup
sudo rfcomm bind hci0 $BT_PEB
do_openaps_import $HOME/src/oref0/lib/oref0-setup/pancreabble.json
@@ -1047,6 +1120,9 @@ if prompt_yn "" N; then
#Moved this out of the conditional, so that x12 models will work with smb loops
sudo apt-get -y install bc ntpdate bash-completion || die "Couldn't install bc etc."
+ # now required on all platforms for shared-node
+ echo "Installing socat and ntp..."
+ apt-get install -y socat ntp
cd $directory || die "Can't cd $directory"
do_openaps_import $HOME/src/oref0/lib/oref0-setup/supermicrobolus.json
@@ -1074,12 +1150,16 @@ if prompt_yn "" N; then
if [[ -f $HOME/.profile ]]; then
sed --in-place '/.*API_SECRET.*/d' $HOME/.profile
sed --in-place '/.*NIGHTSCOUT_HOST.*/d' $HOME/.profile
+ sed --in-place '/.*MEDTRONIC_PUMP_ID.*/d' $HOME/.profile
+ sed --in-place '/.*MEDTRONIC_FREQUENCY.*/d' $HOME/.profile
fi
# Delete old copies of variables before replacing them
sed --in-place '/.*NIGHTSCOUT_HOST.*/d' $HOME/.bash_profile
sed --in-place '/.*API_SECRET.*/d' $HOME/.bash_profile
sed --in-place '/.*DEXCOM_CGM_RECV_ID*/d' $HOME/.bash_profile
+ sed --in-place '/.*MEDTRONIC_PUMP_ID.*/d' $HOME/.bash_profile
+ sed --in-place '/.*MEDTRONIC_FREQUENCY.*/d' $HOME/.bash_profile
#sed --in-place '/.*DEXCOM_CGM_TX_ID*/d' $HOME/.bash_profile
# Then append the variables
@@ -1089,9 +1169,11 @@ if prompt_yn "" N; then
echo "export API_SECRET" >> $HOME/.bash_profile
echo DEXCOM_CGM_RECV_ID="$BLE_SERIAL" >> $HOME/.bash_profile
echo "export DEXCOM_CGM_RECV_ID" >> $HOME/.bash_profile
+ echo MEDTRONIC_PUMP_ID="$serial" >> $HOME/.bash_profile
+ echo MEDTRONIC_FREQUENCY='`cat $HOME/myopenaps/monitor/medtronic_frequency.ini`' >> $HOME/.bash_profile
+
#echo DEXCOM_CGM_TX_ID="$DEXCOM_CGM_TX_ID" >> $HOME/.bash_profile
#echo "export DEXCOM_CGM_TX_ID" >> $HOME/.bash_profile
- echo
#Turn on i2c, install pi-buttons, and openaps-menu for hardware that has a screen and buttons (so far, only Explorer HAT and Radiofruit Bonnet)
if grep -qa "Explorer HAT" /proc/device-tree/hat/product &> /dev/null || [[ "$hardwaretype" =~ "explorer-hat" ]] || [[ "$hardwaretype" =~ "radiofruit" ]]; then
@@ -1104,11 +1186,9 @@ if prompt_yn "" N; then
sed -i.bak -e "s/#dtparam=i2c_arm=on/dtparam=i2c_arm=on/" /boot/config.txt
egrep "^dtparam=i2c1=on" /boot/config.txt || echo "dtparam=i2c1=on,i2c1_baudrate=400000" >> /boot/config.txt
echo "i2c-dev" > /etc/modules-load.d/i2c.conf
- echo "Installing socat and ntp..."
- apt-get install -y socat ntp
echo "Installing pi-buttons..."
systemctl stop pi-buttons
- cd $HOME/src && git clone git://github.com/bnielsen1965/pi-buttons.git
+ cd $HOME/src && git clone https://github.com/bnielsen1965/pi-buttons.git
echo "Make and install pi-buttons..."
cd pi-buttons
cd src && make && sudo make install && sudo make install_service
@@ -1119,7 +1199,7 @@ if prompt_yn "" N; then
systemctl enable pi-buttons && systemctl restart pi-buttons
echo "Installing openaps-menu..."
test "$directory" != "/$HOME/myopenaps" && (echo You are using a non-standard openaps directory. For the statusmenu to work correctly you need to set the openapsDir variable in index.js)
- cd $HOME/src && git clone git://github.com/openaps/openaps-menu.git || (cd openaps-menu && git checkout master && git pull)
+ cd $HOME/src && git clone https://github.com/openaps/openaps-menu.git || (cd openaps-menu && git checkout master && git pull)
cd $HOME/src/openaps-menu && sudo npm install
cp $HOME/src/openaps-menu/openaps-menu.service /etc/systemd/system/ && systemctl enable openaps-menu
fi
@@ -1130,7 +1210,7 @@ if prompt_yn "" N; then
# Install Golang
mkdir -p $HOME/go
source $HOME/.bash_profile
- golangversion=1.12.5
+ golangversion=1.19.1
if go version | grep go${golangversion}.; then
echo Go already installed
else
@@ -1139,6 +1219,8 @@ if prompt_yn "" N; then
echo "Installing Golang..."
if uname -m | grep armv; then
cd /tmp && wget -c https://storage.googleapis.com/golang/go${golangversion}.linux-armv6l.tar.gz && tar -C /usr/local -xzvf /tmp/go${golangversion}.linux-armv6l.tar.gz
+ elif uname -m | grep aarch64; then
+ cd /tmp && wget -c https://storage.googleapis.com/golang/go${golangversion}.linux-arm64.tar.gz && tar -C /usr/local -xzvf /tmp/go${golangversion}.linux-arm64.tar.gz
elif uname -m | grep i686; then
cd /tmp && wget -c https://dl.google.com/go/go${golangversion}.linux-386.tar.gz && tar -C /usr/local -xzvf /tmp/go${golangversion}.linux-386.tar.gz
fi
@@ -1152,7 +1234,7 @@ if prompt_yn "" N; then
sed --in-place '/.*GOPATH*/d' $HOME/.bash_profile
echo 'GOPATH=$HOME/go' >> $HOME/.bash_profile
echo 'export GOPATH' >> $HOME/.bash_profile
- echo 'PATH=$PATH:/usr/local/go/bin:$GOROOT/bin:$GOPATH/bin' >> $HOME/.bash_profile
+ echo 'PATH=$PATH:/usr/local/go/bin:$GOROOT/bin:$GOPATH/bin:/root/go/bin/' >> $HOME/.bash_profile
sed --in-place '/.*export PATH*/d' $HOME/.bash_profile
echo 'export PATH' >> $HOME/.bash_profile
fi
@@ -1185,8 +1267,14 @@ if prompt_yn "" N; then
esac
#Build Go binaries
- go get -u -v -tags "$radiotags" github.com/ecc1/medtronic/... || die "Couldn't go get medtronic"
- ln -sf $HOME/go/src/github.com/ecc1/medtronic/cmd/pumphistory/openaps.jq $directory/ || die "Couldn't softlink openaps.jq"
+ #go get -u -v -tags "$radiotags" github.com/ecc1/medtronic/... || die "Couldn't go get medtronic"
+ go install -v -tags "$radiotags" github.com/ecc1/medtronic/cmd/...@latest || die "Couldn't go get medtronic"
+ #ln -sf /root/go/pkg/mod/github.com/ecc1/medtronic@v0.0.0-20210712211734-b8431dc5211b/cmd/pumphistory/openaps.jq $directory/ || die "Couldn't softlink openaps.jq"
+ if [[ -f $directory/openaps.jq ]]; then
+ ls -la $directory/openaps.jq
+ else
+ cd $directory && wget https://raw.githubusercontent.com/ecc1/medtronic/master/cmd/pumphistory/openaps.jq || die "Couldn't wget openaps.jq"
+ fi
else
#TODO: write validate_ttyport and support non-SPI ports
die "Unsupported ttyport. Exiting."
diff --git a/bin/oref0-shared-node-loop.sh b/bin/oref0-shared-node-loop.sh
new file mode 100755
index 000000000..66f60513f
--- /dev/null
+++ b/bin/oref0-shared-node-loop.sh
@@ -0,0 +1,21 @@
+#!/usr/bin/env bash
+
+source $(dirname $0)/oref0-bash-common-functions.sh || (echo "ERROR: Failed to run oref0-bash-common-functions.sh. Is oref0 correctly installed?"; exit 1)
+
+# Shared node loop.
+main() {
+ echo
+ echo Starting Shared-Node-loop at $(date):
+ while true; do
+
+ node ../src/oref0/bin/oref0-shared-node.js
+ echo Tough luck, shared node crashed. Starting it againg at $(date)
+ done
+}
+
+usage "$@" < 0) {
+ final_result += '\n';
+ }
+ var len = theArgs.length;
+ for (var i = 0 ; i < len; i++) {
+ if (typeof theArgs[i] != 'object') {
+ final_result += theArgs[i];
+ } else {
+ final_result += JSON.stringify(theArgs[i]);
+ }
+ if(i != len -1 ) {
+ final_result += ' ';
+ }
+
+ }
+ return final_result;
+}
+
+var console_error = function console_error(final_result, ...theArgs) {
+ final_result.err = console_both(final_result.err, theArgs);
+}
+
+var console_log = function console_log(final_result, ...theArgs) {
+ final_result.stdout = console_both(final_result.stdout, theArgs);
+}
+
+var process_exit = function process_exit(final_result, ret) {
+ final_result.return_val = ret;
+}
+
+var initFinalResults = function initFinalResults() {
+ var final_result = {
+ stdout: ''
+ , err: ''
+ , return_val : 0
+ };
+ return final_result;
+}
+
+
+
+module.exports = {
+ console_log : console_log,
+ console_error : console_error,
+ process_exit : process_exit,
+ initFinalResults : initFinalResults
+}
\ No newline at end of file
diff --git a/bin/oref0-shared-node.js b/bin/oref0-shared-node.js
new file mode 100644
index 000000000..2110cc737
--- /dev/null
+++ b/bin/oref0-shared-node.js
@@ -0,0 +1,297 @@
+#!/usr/bin/env node
+
+'use strict';
+
+var os = require("os");
+var ns_status = require("./ns-status");
+var oref0_normalize_temps = require("./oref0-normalize-temps");
+var oref0_calculate_iob = require("./oref0-calculate-iob");
+var oref0_meal = require("./oref0-meal");
+var oref0_get_profile = require("./oref0-get-profile");
+var oref0_get_ns_entries = require("./oref0-get-ns-entries");
+var fs = require('fs');
+var requireUtils = require('../lib/require-utils');
+var shared_node_utils = require('./oref0-shared-node-utils');
+var console_error = shared_node_utils.console_error;
+var console_log = shared_node_utils.console_log;
+var initFinalResults = shared_node_utils.initFinalResults;
+
+function createRetVal(stdout, return_val) {
+ var returnObj = {
+ err: "",
+ stdout: stdout,
+ return_val: return_val
+ }
+ return returnObj;
+}
+
+function serverListen() {
+
+ const net = require('net');
+ const fs = require('fs');
+ const unixSocketServer = net.createServer({
+ allowHalfOpen: true
+ });
+
+ var socketPath = '/tmp/oaps_shared_node';
+ try {
+ fs.unlinkSync(socketPath);
+ } catch (err) {
+ if (err.code == 'ENOENT') {
+ // Intentionly ignored.
+ } else {
+ throw err;
+ }
+ }
+ unixSocketServer.listen(socketPath, () => {
+ console.log('now listening');
+ });
+
+ unixSocketServer.on('end', function() {
+ console.log("server 2 disconnected from port");
+ });
+
+ unixSocketServer.on('connection', (s) => {
+ console.log('got connection!');
+ s.allowHalfOpen = true;
+ s.on('end', function() {
+ console.log("server 2 disconnected from port");
+ });
+
+ s.on('error', function(err) {
+ console.log("there was an error in the client and the error is: " + err.code);
+ });
+
+ s.on("data", function(data) {
+ //... do stuff with the data ...
+ console.log('read data', data.toString());
+ var command = data.toString().split(' ');
+
+ // Split by space except for inside quotes
+ // (https://stackoverflow.com/questions/16261635/javascript-split-string-by-space-but-ignore-space-in-quotes-notice-not-to-spli)
+ var command = data.toString().match(/\\?.|^$/g).reduce((p, c) => {
+ if (c === '"') {
+ p.quote ^= 1;
+ } else if (!p.quote && c === ' ') {
+ p.a.push('');
+ } else {
+ p.a[p.a.length - 1] += c.replace(/\\(.)/, "$1");
+ }
+ return p;
+ }, {
+ a: ['']
+ }).a;
+
+ command = command.map(s => s.trim());
+
+ var result = 'unknown command\n';
+
+ console.log('command = ', command);
+ var async_command = false;
+ var final_result = initFinalResults();
+
+ if (command[0] == 'ns-status') {
+ // remove the first parameter.
+ command.shift();
+ try {
+ result = ns_status(command);
+ result = addNewlToResult(result);
+ final_result = createRetVal(result, 0);
+ } catch (err) {
+ final_result.return_val = 1;
+ console.log('exception when parsing ns_status ', err);
+ console_err(final_result, 'exception when parsing ns_status ', err);
+ }
+ } else if (command[0] == 'oref0-normalize-temps') {
+ command.shift();
+ try {
+ result = oref0_normalize_temps(command);
+ result = addNewlToResult(result);
+ final_result = createRetVal(result, 0);
+ } catch (err) {
+ final_result.return_val = 1;
+ console.log('exception when parsing oref0-normalize-temps ', err);
+ }
+ } else if (command[0] == 'oref0-calculate-iob') {
+ command.shift();
+ try {
+ result = oref0_calculate_iob(command);
+ result = addNewlToResult(result);
+ final_result = createRetVal(result, 0);
+ } catch (err) {
+ final_result.return_val = 1;
+ console.log('exception when parsing oref0-calculate-iob ', err);
+ }
+ } else if (command[0] == 'oref0-meal') {
+ command.shift();
+ try {
+ result = oref0_meal(final_result, command);
+ final_result.stdout = addNewlToResult(final_result.stdout); // put them both in a new function ????????????
+ final_result.err = addNewlToResult(final_result.err);
+ } catch (err) {
+ final_result.return_val = 1;
+ console.log('exception when parsing oref0-meal ', err);
+ }
+ } else if (command[0] == 'oref0-get-profile') {
+ command.shift();
+ try {
+ oref0_get_profile(final_result, command);
+ final_result.stdout = addNewlToResult(final_result.stdout); // put them both in a new function ????????????
+ final_result.err = addNewlToResult(final_result.err);
+ } catch (err) {
+ final_result.return_val = 1;
+ console.log('exception when parsing oref0-get-profile ', err);
+ }
+ } else if (command[0] == 'oref0-get-ns-entries') {
+ async_command = true;
+
+ var final_result = initFinalResults();
+ function print_callback(final_result) {
+ try {
+ final_result.stdout = addNewlToResult(final_result.stdout); // put them both in a new function ????????????
+ final_result.err = addNewlToResult(final_result.err);
+ s.write(JSON.stringify(final_result));
+ s.end();
+ } catch (err) {
+ // I assume here that error happens when handeling the socket, so not trying to close it
+ console.log('exception in print_callback ', err);
+ }
+ }
+ command.shift();
+ try {
+ result = oref0_get_ns_entries(command, print_callback, final_result);
+ result = addNewlToResult(result);
+ } catch (err) {
+ final_result.return_val = 1;
+ console.log('exception when parsing oref0-get-profile ', err);
+ }
+ } else if (command[0] == 'ping') {
+ result = 'pong';
+ final_result = createRetVal(result, 0);
+ } else if (command[0] == 'json') {
+ // remove the first parameter.
+ command.shift();
+ try {
+ var return_val;
+ [result, return_val] = jsonWrapper(command);
+ result = addNewlToResult(result);
+ final_result = createRetVal(result, return_val);
+ } catch (err) {
+ final_result.return_val = 1;
+ console.log('exception when running json_wrarpper ', err);
+ }
+ } else {
+ console.error('Unknown command = ', command);
+ console_error(final_result, 'Unknown command = ', command);
+ final_result.return_val = 1;
+ }
+ if(!async_command) {
+ s.write(JSON.stringify(final_result));
+ s.end();
+ }
+ });
+ });
+}
+
+/**
+ * Return a function for the given JS code that returns.
+ *
+ * If no 'return' in the given javascript snippet, then assume we are a single
+ * statement and wrap in 'return (...)'. This is for convenience for short
+ * '-c ...' snippets.
+ */
+function funcWithReturnFromSnippet(js) {
+ // auto-"return"
+ if (js.indexOf('return') === -1) {
+ if (js.substring(js.length - 1) === ';') {
+ js = js.substring(0, js.length - 1);
+ }
+ js = 'return (' + js + ')';
+ }
+ return (new Function(js));
+}
+
+
+function addNewlToResult(result) {
+ if (result === undefined) {
+ // This preserves the oref0_normalize_temps behavior.
+ result = ""
+ } else if (result.length != 0) {
+ result += "\n";
+ }
+ return result;
+}
+
+// The goal is to run something like:
+// json -f monitor/status.1.json -c "minAgo=(new Date()-new Date(this.dateString))/60/1000; return minAgo < 10 && minAgo > -5 && this.glucose > 38"
+function jsonWrapper(argv_params) {
+ var argv = require('yargs')(argv_params)
+ .usage('$0 json -f monitor/status.1.json -c \"minAgo=(new Date()-new Date(this.dateString))/60/1000; return minAgo < 10 && minAgo > -5 && this.glucose > 38\"')
+ .option('input_file', {
+ alias: 'f',
+ nargs: 1,
+ describe: "Input/Output file",
+ default: false
+ })
+ .option('filtering_code', {
+ alias: 'c',
+ nargs: 1,
+ describe: "Conditional filtering",
+ default: false
+ })
+ .strict(true)
+ .fail(function(msg, err, yargs) {
+ if (err) {
+ return [console.error('Error found', err), 1];
+ }
+ return [console.error('Parsing of command arguments failed', msg), 1];
+ })
+ .help('help');
+ var params = argv.argv;
+ var inputs = params._;
+ if (inputs.length > 0) {
+ return [console.error('Error: too many input parameters.'), 1];
+ }
+ if (!params.input_file) {
+ return [console.error('Error: No input file.'), 1];
+ }
+ if (!params.filtering_code) {
+ return [console.error('Error: No filtering_code'), 1];
+ }
+
+ var data = requireUtils.safeLoadFile(params.input_file);
+ if (!data) {
+ // file is empty. For this files json returns nothing
+ console.error('Error: No data loaded')
+ return ["", 1];
+ }
+ if (!Array.isArray(data)) {
+ // file is not an array of json, we do not handle this.
+ console.error('Error: data is not an array.')
+ return ["", 1];
+ }
+
+ var condFuncs = funcWithReturnFromSnippet(params.filtering_code);
+ var filtered = [];
+ for (var i = 0; i < data.length; i++) {
+ if (condFuncs.call(data[i])) {
+ filtered.push(data[i]);
+ }
+ }
+ return [JSON.stringify(filtered, null, 2), 0];
+}
+
+
+if (!module.parent) {
+ serverListen();
+}
+
+// Functions needed to simulate a stack node.
+const util = require('util');
+const vm = require('vm');
+
+function sleepFor(sleepDuration) {
+ var now = new Date().getTime();
+ while (new Date().getTime() < now + sleepDuration) {
+ /* do nothing */ }
+}
diff --git a/bin/oref0-simulator.sh b/bin/oref0-simulator.sh
index 405dfffb9..a5aa54020 100755
--- a/bin/oref0-simulator.sh
+++ b/bin/oref0-simulator.sh
@@ -27,7 +27,11 @@ function init {
function main {
# look up the currently active bg_target based on the current clock.json
- target=$((cat profile.json | jq -r '.bg_targets.targets[] | [.start, .min_bg] | @csv'; echo -n \"; cat clock.json | awk -F T '{print $2}') | sort | grep -B1 '\"$' | head -1 | awk -F , '{print $2}')
+ if grep target_bg profile.json; then
+ target=$(jq .target_bg profile.json)
+ else
+ target=$((cat profile.json | jq -r '.bg_targets.targets[] | [.start, .min_bg] | @csv'; echo -n \"; cat clock.json | awk -F T '{print $2}') | sort | grep -B1 '\"$' | head -1 | awk -F , '{print $2}')
+ fi
if ! [ -z "$target" ]; then
cat profile.json | jq ". | .min_bg=$target | .max_bg=$target" > profile.json.new
echo setting target to $target
diff --git a/bin/oref0-upgrade.sh b/bin/oref0-upgrade.sh
new file mode 100755
index 000000000..7134951f5
--- /dev/null
+++ b/bin/oref0-upgrade.sh
@@ -0,0 +1,20 @@
+#!/usr/bin/env bash
+
+source $(dirname $0)/oref0-bash-common-functions.sh || (echo "ERROR: Failed to run oref0-bash-common-functions.sh. Is oref0 correctly installed?"; exit 1)
+
+usage "$@" <0:
- logging.error("The following permissions are missing in Nightscout: %s" % missing)
- logging.error("Please follow instructions at https://openaps.readthedocs.io/en/master/docs/walkthrough/phase-1/nightscout-setup.md#switching-from-api_secret-to-token-based-authentication-for-your-rig")
- sys.exit(1)
- logging.info("All permissions in Nightscout are ok")
+ logging.error("The following permissions are missing in Nightscout: %s" % missing)
+ logging.error("Please follow instructions at https://openaps.readthedocs.io/en/master/docs/walkthrough/phase-1/nightscout-setup.md#switching-from-api_secret-to-token-based-authentication-for-your-rig")
+ sys.exit(1)
+
+ logging.info("All permissions in Nightscout are ok")
if __name__ == '__main__':
diff --git a/examples/profile.json b/examples/profile.json
index a79a4119d..125c7e96f 100644
--- a/examples/profile.json
+++ b/examples/profile.json
@@ -63,6 +63,9 @@
"enableSMB_with_COB": true,
"enableSMB_with_temptarget": false,
"enableSMB_after_carbs": true,
+ "prime_indicates_pump_site_change": false,
+ "rewind_indicates_cartridge_change": false,
+ "battery_indicates_battery_change": false,
"maxSMBBasalMinutes": 75,
"curve": "rapid-acting",
"useCustomPeakTime": false,
diff --git a/lib/autotune-prep/categorize.js b/lib/autotune-prep/categorize.js
index bd824e876..4166bb5d0 100644
--- a/lib/autotune-prep/categorize.js
+++ b/lib/autotune-prep/categorize.js
@@ -1,3 +1,5 @@
+'use strict';
+
var tz = require('moment-timezone');
var basal = require('../profile/basal');
var getIOB = require('../iob');
@@ -122,6 +124,7 @@ function categorizeBGDatums(opts) {
var type="";
// main for loop
var fullHistory = IOBInputs.history;
+ var lastIsfResult = null;
for (i=bucketedData.length-5; i > 0; --i) {
glucoseDatum = bucketedData[i];
//console.error(glucoseDatum);
@@ -165,7 +168,8 @@ function categorizeBGDatums(opts) {
glucoseDatum.avgDelta = avgDelta;
//sens = ISF
- var sens = ISF.isfLookup(IOBInputs.profile.isfProfile,BGDate);
+ var sens;
+ [sens, lastIsfResult] = ISF.isfLookup(IOBInputs.profile.isfProfile, BGDate, lastIsfResult);
IOBInputs.clock=BGDate.toISOString();
// trim down IOBInputs.history to just the data for 6h prior to BGDate
//console.error(IOBInputs.history[0].created_at);
@@ -217,7 +221,7 @@ function categorizeBGDatums(opts) {
glucoseDatum.BGI = BGI;
// calculating deviation
var deviation = avgDelta-BGI;
- dev5m = delta-BGI;
+ var dev5m = delta-BGI;
//console.error(deviation,avgDelta,BG,bucketedData[i].glucose);
// set positive deviations to zero if BG is below 80
diff --git a/lib/bolus.js b/lib/bolus.js
index c7dc0f61d..b46d3fc5c 100644
--- a/lib/bolus.js
+++ b/lib/bolus.js
@@ -1,3 +1,4 @@
+'use strict';
function reduce (treatments) {
diff --git a/lib/determine-basal/autosens.js b/lib/determine-basal/autosens.js
index db2c86834..6a8b89a41 100644
--- a/lib/determine-basal/autosens.js
+++ b/lib/determine-basal/autosens.js
@@ -1,3 +1,5 @@
+'use strict';
+
var basal = require('../profile/basal');
var get_iob = require('../iob');
var find_insulin = require('../iob/history');
@@ -143,11 +145,12 @@ function detectSensitivity(inputs) {
var mealCarbs = 0;
var mealStartCounter = 999;
var type="";
+ var lastIsfResult = null;
//console.error(bucketed_data);
for (i=3; i < bucketed_data.length; ++i) {
bgTime = new Date(bucketed_data[i].date);
-
- var sens = isf.isfLookup(profile.isfProfile,bgTime);
+ var sens;
+ [sens, lastIsfResult] = isf.isfLookup(profile.isfProfile, bgTime, lastIsfResult);
//console.error(bgTime , bucketed_data[i].glucose);
var bg;
@@ -398,7 +401,7 @@ function detectSensitivity(inputs) {
} else {
console.error("Sensitivity normal.");
}
- ratio = 1 + (basalOff / profile.max_daily_basal);
+ var ratio = 1 + (basalOff / profile.max_daily_basal);
//console.error(basalOff, profile.max_daily_basal, ratio);
// don't adjust more than 1.2x by default (set in preferences.json)
@@ -411,7 +414,7 @@ function detectSensitivity(inputs) {
}
ratio = Math.round(ratio*100)/100;
- newisf = Math.round(profile.sens / ratio);
+ var newisf = Math.round(profile.sens / ratio);
//console.error(profile, newisf, ratio);
console.error("ISF adjusted from "+profile.sens+" to "+newisf);
//console.error("Basal adjustment "+basalOff.toFixed(2)+"U/hr");
diff --git a/lib/determine-basal/cob.js b/lib/determine-basal/cob.js
index 85f0522b9..903409ca6 100644
--- a/lib/determine-basal/cob.js
+++ b/lib/determine-basal/cob.js
@@ -1,3 +1,5 @@
+'use strict';
+
var basal = require('../profile/basal');
var get_iob = require('../iob');
var find_insulin = require('../iob/history');
@@ -12,7 +14,9 @@ function detectCarbAbsorption(inputs) {
});
var iob_inputs = inputs.iob_inputs;
var basalprofile = inputs.basalprofile;
- /* TODO why does declaring profile break tests-command-behavior.tests.sh? */ profile = inputs.iob_inputs.profile;
+ /* TODO why does declaring profile break tests-command-behavior.tests.sh?
+ because it is a global variable used in other places.*/
+ var profile = inputs.iob_inputs.profile;
var mealTime = new Date(inputs.mealTime);
var ciTime = new Date(inputs.ciTime);
@@ -114,10 +118,12 @@ function detectCarbAbsorption(inputs) {
var minDeviation = 999;
var allDeviations = [];
//console.error(bucketed_data);
+ var lastIsfResult = null;
for (i=0; i < bucketed_data.length-3; ++i) {
bgTime = new Date(bucketed_data[i].date);
- var sens = isf.isfLookup(profile.isfProfile,bgTime);
+ var sens;
+ [sens, lastIsfResult] = isf.isfLookup(profile.isfProfile, bgTime, lastIsfResult);
//console.error(bgTime , bucketed_data[i].glucose, bucketed_data[i].date);
var bg;
diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js
index dded5e57f..1a1a286cc 100644
--- a/lib/determine-basal/determine-basal.js
+++ b/lib/determine-basal/determine-basal.js
@@ -40,7 +40,7 @@ function convert_bg(value, profile)
{
if (profile.out_units === "mmol/L")
{
- return round(value / 18, 1).toFixed(1);
+ return round(value / 18, 1);
}
else
{
@@ -52,7 +52,9 @@ function enable_smb(
profile,
microBolusAllowed,
meal_data,
- target_bg
+ bg,
+ target_bg,
+ high_bg
) {
// disable SMB when a high temptarget is set
if (! microBolusAllowed) {
@@ -84,8 +86,8 @@ function enable_smb(
console.error("SMB enabled for COB of",meal_data.mealCOB);
}
return true;
- }
-
+ }
+
// enable SMB/UAM (if enabled in preferences) for a full 6 hours after any carb entry
// (6 hours is defined in carbWindow in lib/meal/total.js)
if (profile.enableSMB_after_carbs === true && meal_data.carbs ) {
@@ -96,7 +98,7 @@ function enable_smb(
}
return true;
}
-
+
// enable SMB/UAM (if enabled in preferences) if a low temptarget is set
if (profile.enableSMB_with_temptarget === true && (profile.temptargetSet && target_bg < 100)) {
if (meal_data.bwFound) {
@@ -105,8 +107,20 @@ function enable_smb(
console.error("SMB enabled for temptarget of",convert_bg(target_bg, profile));
}
return true;
- }
-
+ }
+
+ // enable SMB if high bg is found
+ if (profile.enableSMB_high_bg === true && high_bg !== null && bg >= high_bg) {
+ console.error("Checking BG to see if High for SMB enablement.");
+ console.error("Current BG", bg, " | High BG ", high_bg);
+ if (meal_data.bwFound) {
+ console.error("Warning: High BG SMB enabled within 6h of using Bolus Wizard: be sure to easy bolus 30s before using Bolus Wizard");
+ } else {
+ console.error("High BG detected. Enabling SMB.");
+ }
+ return true;
+ }
+
console.error("SMB disabled (no enableSMB preferences active or no condition satisfied)");
return false;
}
@@ -138,6 +152,20 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_
var bg = glucose_status.glucose;
var noise = glucose_status.noise;
+// Prep various delta variables.
+ var tick;
+
+ if (glucose_status.delta > -0.5) {
+ tick = "+" + round(glucose_status.delta,0);
+ } else {
+ tick = round(glucose_status.delta,0);
+ }
+ //var minDelta = Math.min(glucose_status.delta, glucose_status.short_avgdelta, glucose_status.long_avgdelta);
+ var minDelta = Math.min(glucose_status.delta, glucose_status.short_avgdelta);
+ var minAvgDelta = Math.min(glucose_status.short_avgdelta, glucose_status.long_avgdelta);
+ var maxDelta = Math.max(glucose_status.delta, glucose_status.short_avgdelta, glucose_status.long_avgdelta);
+
+
// Cancel high temps (and replace with neutral) or shorten long zero temps for various error conditions
// 38 is an xDrip error state that usually indicates sensor failure
@@ -201,12 +229,16 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_
var target_bg;
var min_bg;
var max_bg;
+ var high_bg;
if (typeof profile.min_bg !== 'undefined') {
min_bg = profile.min_bg;
}
if (typeof profile.max_bg !== 'undefined') {
max_bg = profile.max_bg;
}
+ if (typeof profile.enableSMB_high_bg_target !== 'undefined') {
+ high_bg = profile.enableSMB_high_bg_target;
+ }
if (typeof profile.min_bg !== 'undefined' && typeof profile.max_bg !== 'undefined') {
target_bg = (profile.min_bg + profile.max_bg) / 2;
} else {
@@ -230,7 +262,15 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_
// e.g.: Sensitivity ratio set to 0.8 based on temp target of 120; Adjusting basal from 1.65 to 1.35; ISF from 58.9 to 73.6
//sensitivityRatio = 2/(2+(target_bg-normalTarget)/40);
var c = halfBasalTarget - normalTarget;
- sensitivityRatio = c/(c+target_bg-normalTarget);
+ // getting multiplication less or equal to 0 means that we have a really low target with a really low halfBasalTarget
+ // with low TT and lowTTlowersSensitivity we need autosens_max as a value
+ // we use multiplication instead of the division to avoid "division by zero error"
+ if (c * (c + target_bg-normalTarget) <= 0.0) {
+ sensitivityRatio = profile.autosens_max;
+ }
+ else {
+ sensitivityRatio = c/(c+target_bg-normalTarget);
+ }
// limit sensitivityRatio to profile.autosens_max (1.2x by default)
sensitivityRatio = Math.min(sensitivityRatio, profile.autosens_max);
sensitivityRatio = round(sensitivityRatio,2);
@@ -270,38 +310,27 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_
}
}
-// If iob_data or its required properties are missing, return.
-// This has to be checked after checking that we're not in one of the CGM-data-related error conditions handled above,
-// and before attempting to use iob_data below.
-// TODO: move this down to be just above // compare currenttemp to iob_data.lastTemp and cancel temp if they don't match
- if (typeof iob_data === 'undefined' ) {
- rT.error ='Error: iob_data undefined. ';
- return rT;
- }
-
- var iobArray = iob_data;
- if (typeof(iob_data.length) && iob_data.length > 1) {
- iob_data = iobArray[0];
- //console.error(JSON.stringify(iob_data[0]));
- }
-
- if (typeof iob_data.activity === 'undefined' || typeof iob_data.iob === 'undefined' ) {
- rT.error ='Error: iob_data missing some property. ';
- return rT;
+// Raise target for noisy / raw CGM data.
+ if (glucose_status.noise >= 2) {
+ // increase target at least 10% (default 30%) for raw / noisy data
+ var noisyCGMTargetMultiplier = Math.max( 1.1, profile.noisyCGMTargetMultiplier );
+ // don't allow maxRaw above 250
+ var maxRaw = Math.min( 250, profile.maxRaw );
+ var adjustedMinBG = round(Math.min(200, min_bg * noisyCGMTargetMultiplier ));
+ var adjustedTargetBG = round(Math.min(200, target_bg * noisyCGMTargetMultiplier ));
+ var adjustedMaxBG = round(Math.min(200, max_bg * noisyCGMTargetMultiplier ));
+ process.stderr.write("Raising target_bg for noisy / raw CGM data, from "+target_bg+" to "+adjustedTargetBG+"; ");
+ min_bg = adjustedMinBG;
+ target_bg = adjustedTargetBG;
+ max_bg = adjustedMaxBG;
}
-// Prep various delta variables. TODO: make this happen earlier along with other variable prep
- var tick;
+ // min_bg of 90 -> threshold of 65, 100 -> 70 110 -> 75, and 130 -> 85
+ var threshold = min_bg - 0.5*(min_bg-40);
- if (glucose_status.delta > -0.5) {
- tick = "+" + round(glucose_status.delta,0);
- } else {
- tick = round(glucose_status.delta,0);
- }
- //var minDelta = Math.min(glucose_status.delta, glucose_status.short_avgdelta, glucose_status.long_avgdelta);
- var minDelta = Math.min(glucose_status.delta, glucose_status.short_avgdelta);
- var minAvgDelta = Math.min(glucose_status.short_avgdelta, glucose_status.long_avgdelta);
- var maxDelta = Math.max(glucose_status.delta, glucose_status.short_avgdelta, glucose_status.long_avgdelta);
+// If iob_data or its required properties are missing, return.
+// This has to be checked after checking that we're not in one of the CGM-data-related error conditions handled above,
+// and before attempting to use iob_data below.
// Adjust ISF based on sensitivityRatio
var profile_sens = round(profile.sens,1)
@@ -318,6 +347,22 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_
}
console.error("; CR:",profile.carb_ratio);
+ if (typeof iob_data === 'undefined' ) {
+ rT.error ='Error: iob_data undefined. ';
+ return rT;
+ }
+
+ var iobArray = iob_data;
+ if (typeof(iob_data.length) && iob_data.length > 1) {
+ iob_data = iobArray[0];
+ //console.error(JSON.stringify(iob_data[0]));
+ }
+
+ if (typeof iob_data.activity === 'undefined' || typeof iob_data.iob === 'undefined' ) {
+ rT.error ='Error: iob_data missing some property. ';
+ return rT;
+ }
+
// Compare currenttemp to iob_data.lastTemp and cancel temp if they don't match, as a safety check
// This should occur after checking that we're not in one of the CGM-data-related error conditions handled above,
// and before returning (doing nothing) below if eventualBG is undefined.
@@ -371,61 +416,11 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_
// and adjust it for the deviation above
var eventualBG = naive_eventualBG + deviation;
-// Raise target for noisy / raw CGM data.
-// TODO: move this up to immediately after parsing targets above (once adv_target_adjustments is deprecated)
- if (glucose_status.noise >= 2) {
- // increase target at least 10% (default 30%) for raw / noisy data
- var noisyCGMTargetMultiplier = Math.max( 1.1, profile.noisyCGMTargetMultiplier );
- // don't allow maxRaw above 250
- var maxRaw = Math.min( 250, profile.maxRaw );
- var adjustedMinBG = round(Math.min(200, min_bg * noisyCGMTargetMultiplier ));
- var adjustedTargetBG = round(Math.min(200, target_bg * noisyCGMTargetMultiplier ));
- var adjustedMaxBG = round(Math.min(200, max_bg * noisyCGMTargetMultiplier ));
- process.stderr.write("Raising target_bg for noisy / raw CGM data, from "+target_bg+" to "+adjustedTargetBG+"; ");
- min_bg = adjustedMinBG;
- target_bg = adjustedTargetBG;
- max_bg = adjustedMaxBG;
- // adjust target BG range if configured to bring down high BG faster
- // TODO: deprecate this
- } else if ( bg > max_bg && profile.adv_target_adjustments && ! profile.temptargetSet ) {
- // with target=100, as BG rises from 100 to 160, adjustedTarget drops from 100 to 80
- adjustedMinBG = round(Math.max(80, min_bg - (bg - min_bg)/3 ),0);
- adjustedTargetBG =round( Math.max(80, target_bg - (bg - target_bg)/3 ),0);
- adjustedMaxBG = round(Math.max(80, max_bg - (bg - max_bg)/3 ),0);
- // if eventualBG, naive_eventualBG, and target_bg aren't all above adjustedMinBG, don’t use it
- //console.error("naive_eventualBG:",naive_eventualBG+", eventualBG:",eventualBG);
- if (eventualBG > adjustedMinBG && naive_eventualBG > adjustedMinBG && min_bg > adjustedMinBG) {
- process.stderr.write("Adjusting targets for high BG: min_bg from "+min_bg+" to "+adjustedMinBG+"; ");
- min_bg = adjustedMinBG;
- } else {
- process.stderr.write("min_bg unchanged: "+min_bg+"; ");
- }
- // if eventualBG, naive_eventualBG, and target_bg aren't all above adjustedTargetBG, don’t use it
- if (eventualBG > adjustedTargetBG && naive_eventualBG > adjustedTargetBG && target_bg > adjustedTargetBG) {
- process.stderr.write("target_bg from "+target_bg+" to "+adjustedTargetBG+"; ");
- target_bg = adjustedTargetBG;
- } else {
- process.stderr.write("target_bg unchanged: "+target_bg+"; ");
- }
- // if eventualBG, naive_eventualBG, and max_bg aren't all above adjustedMaxBG, don’t use it
- if (eventualBG > adjustedMaxBG && naive_eventualBG > adjustedMaxBG && max_bg > adjustedMaxBG) {
- console.error("max_bg from "+max_bg+" to "+adjustedMaxBG);
- max_bg = adjustedMaxBG;
- } else {
- console.error("max_bg unchanged: "+max_bg);
- }
- }
-
- // TODO: move this line to be 4 lines down
- var expectedDelta = calculate_expected_delta(target_bg, eventualBG, bgi);
if (typeof eventualBG === 'undefined' || isNaN(eventualBG)) {
rT.error ='Error: could not calculate eventualBG. ';
return rT;
}
-
- // TODO: move this up to immediately after calculating targets
- // min_bg of 90 -> threshold of 65, 100 -> 70 110 -> 75, and 130 -> 85
- var threshold = min_bg - 0.5*(min_bg-40);
+ var expectedDelta = calculate_expected_delta(target_bg, eventualBG, bgi);
//console.error(reservoir_data);
@@ -457,7 +452,9 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_
profile,
microBolusAllowed,
meal_data,
- target_bg
+ bg,
+ target_bg,
+ high_bg
);
// enable UAM (if enabled in preferences)
@@ -477,7 +474,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_
// use autosens-adjusted sens to counteract autosens meal insulin dosing adjustments so that
// autotuned CR is still in effect even when basals and ISF are being adjusted by TT or autosens
// this avoids overdosing insulin for large meals when low temp targets are active
- csf = sens / profile.carb_ratio;
+ var csf = sens / profile.carb_ratio;
console.error("profile.sens:",profile.sens,"sens:",sens,"CSF:",csf);
var maxCarbAbsorptionRate = 30; // g/h; maximum rate to assume carbs will absorb if no CI observed
@@ -698,7 +695,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_
}
// set eventualBG based on COB or UAM predBGs
- rT.eventualBG = eventualBG;
+ rT.eventualBG = eventualBG; // for FreeAPS-X needs to be in mg/dL
}
console.error("UAM Impact:",uci,"mg/dL per 5m; UAM Duration:",UAMduration,"hours");
@@ -806,17 +803,17 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_
rT.COB=meal_data.mealCOB;
rT.IOB=iob_data.iob;
- rT.BGI=bgi;
+ rT.BGI=convert_bg(bgi,profile);
rT.deviation=convert_bg(deviation, profile);
rT.ISF=convert_bg(sens, profile);
rT.CR=round(profile.carb_ratio, 2);
rT.target_bg=convert_bg(target_bg, profile);
- rT.reason="minPredBG " + convert_bg(minPredBG, profile) + ", minGuardBG " + convert_bg(minGuardBG, profile) + ", IOBpredBG " + convert_bg(lastIOBpredBG, profile);
+ rT.reason="COB: " + rT.COB + ", Dev: " + rT.deviation + ", BGI: " + rT.BGI+ ", ISF: " + rT.ISF + ", CR: " + rT.CR + ", minPredBG: " + convert_bg(minPredBG, profile) + ", minGuardBG: " + convert_bg(minGuardBG, profile) + ", IOBpredBG: " + convert_bg(lastIOBpredBG, profile);
if (lastCOBpredBG > 0) {
- rT.reason += ", COBpredBG " + convert_bg(lastCOBpredBG, profile);
+ rT.reason += ", COBpredBG: " + convert_bg(lastCOBpredBG, profile);
}
if (lastUAMpredBG > 0) {
- rT.reason += ", UAMpredBG " + convert_bg(lastUAMpredBG, profile)
+ rT.reason += ", UAMpredBG: " + convert_bg(lastUAMpredBG, profile)
}
rT.reason += "; ";
@@ -868,9 +865,17 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_
enableSMB = false;
}
// Disable SMB for sudden rises (often caused by calibrations or activation/deactivation of Dexcom's noise-filtering algorithm)
- if ( maxDelta > 0.20 * bg ) {
- console.error("maxDelta",convert_bg(maxDelta, profile),"> 20% of BG",convert_bg(bg, profile),"- disabling SMB");
- rT.reason += "maxDelta "+convert_bg(maxDelta, profile)+" > 20% of BG "+convert_bg(bg, profile)+": SMB disabled; ";
+// Added maxDelta_bg_threshold as a hidden preference and included a cap at 0.3 as a safety limit
+var maxDelta_bg_threshold;
+ if (typeof profile.maxDelta_bg_threshold === 'undefined') {
+ maxDelta_bg_threshold = 0.2;
+ }
+ if (typeof profile.maxDelta_bg_threshold !== 'undefined') {
+ maxDelta_bg_threshold = Math.min(profile.maxDelta_bg_threshold, 0.3);
+ }
+ if ( maxDelta > maxDelta_bg_threshold * bg ) {
+ console.error("maxDelta "+convert_bg(maxDelta, profile)+" > "+100 * maxDelta_bg_threshold +"% of BG "+convert_bg(bg, profile)+" - disabling SMB");
+ rT.reason += "maxDelta "+convert_bg(maxDelta, profile)+" > "+100 * maxDelta_bg_threshold +"% of BG "+convert_bg(bg, profile)+": SMB disabled; ";
enableSMB = false;
}
@@ -889,8 +894,10 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_
var carbsReq = (bgUndershoot - zeroTempEffect) / csf - COBforCarbsReq;
zeroTempEffect = round(zeroTempEffect);
carbsReq = round(carbsReq);
- console.error("naive_eventualBG:",naive_eventualBG,"bgUndershoot:",bgUndershoot,"zeroTempDuration:",zeroTempDuration,"zeroTempEffect:",zeroTempEffect,"carbsReq:",carbsReq);
- if ( carbsReq >= profile.carbsReqThreshold && minutesAboveThreshold <= 45 ) {
+ console.error("naive_eventualBG: " + convert_bg(naive_eventualBG,profile) + ", bgUndershoot: " + convert_bg(bgUndershoot,profile) + ", zeroTempDuration: " + zeroTempDuration + ", zeroTempEffect: " + zeroTempEffect +", carbsReq: " + carbsReq);
+ if ( meal_data.reason == "Could not parse clock data" ) {
+ console.error("carbsReq unknown: Could not parse clock data");
+ } else if ( carbsReq >= profile.carbsReqThreshold && minutesAboveThreshold <= 45 ) {
rT.carbsReq = carbsReq;
rT.reason += carbsReq + " add'l carbs req w/in " + minutesAboveThreshold + "m; ";
}
@@ -903,7 +910,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_
rT.reason += " and minDelta " + convert_bg(minDelta, profile) + " > " + "expectedDelta " + convert_bg(expectedDelta, profile) + "; ";
// predictive low glucose suspend mode: BG is / is projected to be < threshold
} else if ( bg < threshold || minGuardBG < threshold ) {
- rT.reason += "minGuardBG " + convert_bg(minGuardBG, profile) + "<" + convert_bg(threshold, profile);
+ rT.reason += "minGuardBG: " + convert_bg(minGuardBG, profile) + "<" + convert_bg(threshold, profile);
bgUndershoot = target_bg - minGuardBG;
var worstCaseInsulinReq = bgUndershoot / sens;
var durationReq = round(60*worstCaseInsulinReq / profile.current_basal);
@@ -1131,7 +1138,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_
var nextBolusMins = round(SMBInterval-lastBolusAge,0);
var nextBolusSeconds = round((SMBInterval - lastBolusAge) * 60, 0) % 60;
//console.error(naive_eventualBG, insulinReq, worstCaseInsulinReq, durationReq);
- console.error("naive_eventualBG",naive_eventualBG+",",durationReq+"m "+smbLowTempReq+"U/h temp needed; last bolus",lastBolusAge+"m ago; maxBolus: "+maxBolus);
+ console.error("naive_eventualBG " + convert_bg(naive_eventualBG,profile) +", " + durationReq + "m " + smbLowTempReq + "U/h temp needed; last bolus " + lastBolusAge + "m ago; maxBolus: "+maxBolus);
if (lastBolusAge > SMBInterval) {
if (microBolus > 0) {
diff --git a/lib/iob/calculate.js b/lib/iob/calculate.js
index ba808692f..904e953f4 100644
--- a/lib/iob/calculate.js
+++ b/lib/iob/calculate.js
@@ -1,3 +1,5 @@
+'use strict';
+
function iobCalc(treatment, time, curve, dia, peak, profile) {
// iobCalc returns two variables:
// activityContrib = units of treatment.insulin used in previous minute
diff --git a/lib/iob/history.js b/lib/iob/history.js
index 860b5f83e..5c7ffe67a 100644
--- a/lib/iob/history.js
+++ b/lib/iob/history.js
@@ -1,3 +1,4 @@
+'use strict';
var tz = require('moment-timezone');
var basalprofile = require('../profile/basal.js');
@@ -326,6 +327,11 @@ function calcTempTreatments (inputs, zeroTempDuration) {
var temp = {};
temp.rate = current.rate;
temp.duration = current.duration;
+ // Loop reports the amount of insulin actually delivered while the temp basal was running
+ // use that to calculate the effective temp basal rate
+ if (typeof current.amount !== 'undefined') {
+ temp.rate = current.amount / current.duration * 60;
+ }
temp.timestamp = current.timestamp;
temp.started_at = new Date(tz(temp.timestamp));
temp.date = temp.started_at.getTime();
@@ -509,6 +515,7 @@ function calcTempTreatments (inputs, zeroTempDuration) {
var currentItem = splitHistory[i];
if (currentItem.duration > 0) {
+ var target_bg;
var currentRate = profile_data.current_basal;
if (!_.isEmpty(profile_data.basalprofile)) {
diff --git a/lib/iob/index.js b/lib/iob/index.js
index 678bbca04..fd64e3473 100644
--- a/lib/iob/index.js
+++ b/lib/iob/index.js
@@ -1,3 +1,4 @@
+'use strict';
var tz = require('moment-timezone');
var find_insulin = require('./history');
@@ -65,7 +66,7 @@ function generate (inputs, currentIOBOnly, treatments) {
iStop=4*60;
}
for (var i=0; i 150) {
- console.error("Error: carbRatio of " + carbRatio + " out of bounds.");
+ console_error(final_result, "Error: carbRatio of " + carbRatio + " out of bounds.");
return;
}
break;
@@ -26,7 +29,7 @@ function carbRatioLookup (inputs, profile) {
}
return carbRatio.ratio;
} else {
- console.error("Error: Unsupported carb_ratio units " + carbratio_data.units);
+ console_error(final_result, "Error: Unsupported carb_ratio units " + carbratio_data.units);
return;
}
//return carbRatio.ratio;
diff --git a/lib/profile/index.js b/lib/profile/index.js
index 00ba074ca..8ede7f428 100644
--- a/lib/profile/index.js
+++ b/lib/profile/index.js
@@ -1,3 +1,4 @@
+'use strict';
var basal = require('./basal');
var targets = require('./targets');
@@ -5,6 +6,10 @@ var isf = require('./isf');
var carb_ratios = require('./carbs');
var _ = require('lodash');
+var shared_node_utils = require('../../bin/oref0-shared-node-utils');
+var console_error = shared_node_utils.console_error;
+var console_log = shared_node_utils.console_log;
+
function defaults ( ) {
return /* profile */ {
max_iob: 0 // if max_iob is not provided, will default to zero
@@ -18,16 +23,12 @@ function defaults ( ) {
, low_temptarget_lowers_sensitivity: false // lower sensitivity for temptargets <= 99.
, sensitivity_raises_target: true // raise BG target when autosens detects sensitivity
, resistance_lowers_target: false // lower BG target when autosens detects resistance
- , adv_target_adjustments: false // lower target automatically when BG and eventualBG are high
, exercise_mode: false // when true, > 100 mg/dL high temp target adjusts sensitivityRatio for exercise_mode. This majorly changes the behavior of high temp targets from before. synonmym for high_temptarget_raises_sensitivity
, half_basal_exercise_target: 160 // when temptarget is 160 mg/dL *and* exercise_mode=true, run 50% basal at this level (120 = 75%; 140 = 60%)
// create maxCOB and default it to 120 because that's the most a typical body can absorb over 4 hours.
// (If someone enters more carbs or stacks more; OpenAPS will just truncate dosing based on 120.
// Essentially, this just limits AMA/SMB as a safety cap against excessive COB entry)
, maxCOB: 120
- , wide_bg_target_range: false // by default use only the low end of the pump's BG target range as OpenAPS target
- // by default the higher end of the target range is used only for avoiding bolus wizard overcorrections
- // use wide_bg_target_range: true to force neutral temps over a wider range of eventualBGs
, skip_neutral_temps: false // if true, don't set neutral temps
, unsuspend_if_no_temp: false // if true, pump will un-suspend after a zero temp finishes
, bolussnooze_dia_divisor: 2 // bolus snooze decays after 1/2 of DIA
@@ -49,12 +50,15 @@ function defaults ( ) {
// if the CGM sensor reads falsely high and doesn't come down as actual BG does
, enableSMB_always: false // always enable supermicrobolus (unless disabled by high temptarget)
, enableSMB_after_carbs: false // enable supermicrobolus for 6h after carbs, even with 0 COB
+ , enableSMB_high_bg: false // enable SMBs when a high BG is detected, based on the high BG target (adjusted or profile)
+ , enableSMB_high_bg_target: 110 // set the value enableSMB_high_bg will compare against to enable SMB. If BG > than this value, SMBs should enable.
// *** WARNING *** DO NOT USE enableSMB_always or enableSMB_after_carbs with Libre or similar.
, allowSMB_with_high_temptarget: false // allow supermicrobolus (if otherwise enabled) even with high temp targets
, maxSMBBasalMinutes: 30 // maximum minutes of basal that can be delivered as a single SMB with uncovered COB
, maxUAMSMBBasalMinutes: 30 // maximum minutes of basal that can be delivered as a single SMB when IOB exceeds COB
, SMBInterval: 3 // minimum interval between SMBs, in minutes.
, bolus_increment: 0.1 // minimum bolus that can be delivered as an SMB
+ , maxDelta_bg_threshold: 0.2 // maximum change in bg to use SMB, above that will disable SMB
, curve: "rapid-acting" // change this to "ultra-rapid" for Fiasp, or "bilinear" for old curve
, useCustomPeakTime: false // allows changing insulinPeakTime
, insulinPeakTime: 75 // number of minutes after a bolus activity peaks. defaults to 55m for Fiasp if useCustomPeakTime: false
@@ -68,10 +72,13 @@ function defaults ( ) {
// TODO: make maxRaw a preference here usable by oref0-raw in myopenaps-cgm-loop
//, maxRaw: 200 // highest raw/noisy CGM value considered safe to use for looping
, calc_glucose_noise: false
- };
+ , target_bg: false // set to an integer value in mg/dL to override pump min_bg
+ , edison_battery_shutdown_voltage: 3050
+ , pi_battery_shutdown_percent: 2
+ }
}
-function displayedDefaults () {
+function displayedDefaults (final_result) {
var allDefaults = defaults();
var profile = { };
@@ -81,9 +88,7 @@ function displayedDefaults () {
profile.autosens_max = allDefaults.autosens_max;
profile.autosens_min = allDefaults.autosens_min;
profile.rewind_resets_autosens = allDefaults.rewind_resets_autosens;
- profile.adv_target_adjustments = allDefaults.adv_target_adjustments;
profile.exercise_mode = allDefaults.exercise_mode;
- profile.wide_bg_target_range = allDefaults.wide_bg_target_range;
profile.sensitivity_raises_target = allDefaults.sensitivity_raises_target;
profile.unsuspend_if_no_temp = allDefaults.unsuspend_if_no_temp;
profile.enableSMB_with_COB = allDefaults.enableSMB_with_COB;
@@ -91,12 +96,14 @@ function displayedDefaults () {
profile.enableUAM = allDefaults.enableUAM;
profile.curve = allDefaults.curve;
profile.offline_hotspot = allDefaults.offline_hotspot;
+ profile.edison_battery_shutdown_voltage = allDefaults.edison_battery_shutdown_voltage;
+ profile.pi_battery_shutdown_percent = allDefaults.pi_battery_shutdown_percent;
- console.error(profile);
+ console_error(final_result, profile);
return profile
}
-function generate (inputs, opts) {
+function generate (final_result, inputs, opts) {
var profile = opts && opts.type ? opts : defaults( );
// check if inputs has overrides for any of the default prefs
@@ -111,8 +118,8 @@ function generate (inputs, opts) {
if (inputs.settings.insulin_action_curve > 1) {
profile.dia = pumpsettings_data.insulin_action_curve;
} else {
- console.error('DIA of', profile.dia, 'is not supported');
- return -1;
+ console_error(final_result, 'DIA of', profile.dia, 'is not supported');
+ return -1;
}
if (inputs.model) {
@@ -130,19 +137,19 @@ function generate (inputs, opts) {
profile.max_daily_basal = basal.maxDailyBasal(inputs);
profile.max_basal = basal.maxBasalLookup(inputs);
if (profile.current_basal === 0) {
- console.error("current_basal of",profile.current_basal,"is not supported");
- return -1;
+ console_error(final_result, "current_basal of",profile.current_basal,"is not supported");
+ return -1;
}
if (profile.max_daily_basal === 0) {
- console.error("max_daily_basal of",profile.max_daily_basal,"is not supported");
- return -1;
+ console_error(final_result, "max_daily_basal of",profile.max_daily_basal,"is not supported");
+ return -1;
}
if (profile.max_basal < 0.1) {
- console.error("max_basal of",profile.max_basal,"is not supported");
- return -1;
+ console_error(final_result, "max_basal of",profile.max_basal,"is not supported");
+ return -1;
}
- var range = targets.bgTargetsLookup(inputs, profile);
+ var range = targets.bgTargetsLookup(final_result, inputs, profile);
profile.out_units = inputs.targets.user_preferred_units;
profile.min_bg = Math.round(range.min_bg);
profile.max_bg = Math.round(range.max_bg);
@@ -158,17 +165,18 @@ function generate (inputs, opts) {
delete profile.bg_targets.raw;
profile.temptargetSet = range.temptargetSet;
- profile.sens = isf.isfLookup(inputs.isf);
+ var lastResult = null;
+ [profile.sens, lastResult] = isf.isfLookup(inputs.isf, undefined, lastResult);
profile.isfProfile = inputs.isf;
if (profile.sens < 5) {
- console.error("ISF of",profile.sens,"is not supported");
- return -1;
+ console_error(final_result, "ISF of",profile.sens,"is not supported");
+ return -1;
}
if (typeof(inputs.carbratio) !== "undefined") {
- profile.carb_ratio = carb_ratios.carbRatioLookup(inputs, profile);
+ profile.carb_ratio = carb_ratios.carbRatioLookup(final_result, inputs, profile);
profile.carb_ratios = inputs.carbratio;
} else {
- console.error("Profile wasn't given carb ratio data, cannot calculate carb_ratio");
+ console_error(final_result, "Profile wasn't given carb ratio data, cannot calculate carb_ratio");
}
return profile;
}
diff --git a/lib/profile/isf.js b/lib/profile/isf.js
index ca9bcea05..27cdca6e7 100644
--- a/lib/profile/isf.js
+++ b/lib/profile/isf.js
@@ -1,9 +1,8 @@
+'use strict';
var _ = require('lodash');
-var lastResult = null;
-
-function isfLookup(isf_data, timestamp) {
+function isfLookup(isf_data, timestamp, lastResult) {
var nowDate = timestamp;
@@ -14,7 +13,7 @@ function isfLookup(isf_data, timestamp) {
var nowMinutes = nowDate.getHours() * 60 + nowDate.getMinutes();
if (lastResult && nowMinutes >= lastResult.offset && nowMinutes < lastResult.endOffset) {
- return lastResult.sensitivity;
+ return [lastResult.sensitivity, lastResult];
}
isf_data = _.sortBy(isf_data.sensitivities, function(o) { return o.offset; });
@@ -22,7 +21,7 @@ function isfLookup(isf_data, timestamp) {
var isfSchedule = isf_data[isf_data.length - 1];
if (isf_data[0].offset !== 0) {
- return -1;
+ return [-1, lastResult];
}
var endMinutes = 1440;
@@ -40,7 +39,7 @@ function isfLookup(isf_data, timestamp) {
lastResult = isfSchedule;
lastResult.endOffset = endMinutes;
- return isfSchedule.sensitivity;
+ return [isfSchedule.sensitivity, lastResult];
}
isfLookup.isfLookup = isfLookup;
diff --git a/lib/profile/targets.js b/lib/profile/targets.js
index fc91660ed..31a140a91 100644
--- a/lib/profile/targets.js
+++ b/lib/profile/targets.js
@@ -1,11 +1,14 @@
+'use strict';
var getTime = require('../medtronic-clock');
+var shared_node_utils = require('../../bin/oref0-shared-node-utils');
+var console_error = shared_node_utils.console_error;
-function bgTargetsLookup (inputs, profile) {
- return bound_target_range(lookup(inputs, profile));
+function bgTargetsLookup (final_result, inputs, profile) {
+ return bound_target_range(lookup(final_result, inputs, profile));
}
-function lookup (inputs, profile) {
+function lookup (final_result, inputs, profile) {
var bgtargets_data = inputs.targets;
var temptargets_data = inputs.temptargets;
var now = new Date();
@@ -21,19 +24,19 @@ function lookup (inputs, profile) {
}
}
- if (profile.wide_bg_target_range === true) {
- console.error('Allowing wide eventualBG target range: ' + bgTargets.low + ' - ' + bgTargets.high );
- } else {
- bgTargets.high = bgTargets.low;
+ if (profile.target_bg) {
+ bgTargets.low = profile.target_bg;
}
+ bgTargets.high = bgTargets.low;
+
var tempTargets = bgTargets;
// sort tempTargets by date so we can process most recent first
try {
temptargets_data.sort(function (a, b) { return new Date(b.created_at) - new Date(a.created_at) });
} catch (e) {
- console.error("No temptargets found.");
+ console_error(final_result, "No temptargets found.");
}
//console.error(temptargets_data);
//console.error(now);
@@ -48,7 +51,7 @@ function lookup (inputs, profile) {
tempTargets = bgTargets;
break;
} else if (! temptargets_data[i].targetBottom || ! temptargets_data[i].targetTop) {
- console.error("eventualBG target range invalid: " + temptargets_data[i].targetBottom + "-" + temptargets_data[i].targetTop);
+ console_error(final_result, "eventualBG target range invalid: " + temptargets_data[i].targetBottom + "-" + temptargets_data[i].targetTop);
break;
} else if (now >= start && now < expires ) {
//console.error(temptargets_data[i]);
@@ -77,8 +80,8 @@ function bound_target_range (target) {
return target
}
-bgTargetsLookup.bgTargetsLookup = bgTargetsLookup;
-bgTargetsLookup.lookup = lookup;
-bgTargetsLookup.bound_target_range = bound_target_range;
+bgTargetsLookup.bgTargetsLookup = bgTargetsLookup; // does use log
+bgTargetsLookup.lookup = lookup; // not used outside
+bgTargetsLookup.bound_target_range = bound_target_range; // does not log
exports = module.exports = bgTargetsLookup;
diff --git a/lib/pump.js b/lib/pump.js
index b2f892a68..838fea172 100644
--- a/lib/pump.js
+++ b/lib/pump.js
@@ -1,3 +1,4 @@
+'use strict';
function translate (treatments) {
diff --git a/lib/require-utils.js b/lib/require-utils.js
index 6081e7e28..c17f3e82b 100644
--- a/lib/require-utils.js
+++ b/lib/require-utils.js
@@ -14,18 +14,68 @@ function safeRequire (path) {
return resolved;
}
+function safeLoadFile(path) {
+
+ var resolved;
+
+ try {
+ resolved = JSON.parse(fs.readFileSync(path, 'utf8'));
+ //console.log('content = ' , resolved);
+ } catch (e) {
+ console.error("Could not require: " + path, e);
+ }
+ return resolved;
+}
+
function requireWithTimestamp (path) {
- var resolved = safeRequire(path);
+ var resolved = safeLoadFile(path);
if (resolved) {
resolved.timestamp = fs.statSync(path).mtime;
}
-
return resolved;
}
+// Functions that are needed in order to test the module. Can be removed in the future.
+
+function compareMethods(path) {
+ var new_data = safeLoadFile(path);
+ var old_data = safeRequire(path);
+ if (JSON.stringify(new_data) === JSON.stringify(old_data) ) {
+ console.log("test passed", new_data, old_data);
+ } else {
+ console.log("test failed");
+ }
+}
+
+// Module tests.
+if (!module.parent) {
+ // Write the first file: and test it.
+ var obj = {x: "x", y: 1}
+ fs.writeFileSync('/tmp/file1.json', JSON.stringify(obj));
+ compareMethods('/tmp/file1.json');
+
+ // Check a non existing object.
+ compareMethods('/tmp/not_exist.json');
+
+ // check a file that is not formated well.
+ fs.writeFileSync('/tmp/bad.json', '{"x":"x","y":1');
+ compareMethods('/tmp/bad.json');
+
+ // Rewrite the file and reread it.
+ var new_obj = {x: "x", y: 2}
+ fs.writeFileSync('/tmp/file1.json', JSON.stringify(new_obj));
+ var obj_read = safeLoadFile('/tmp/file1.json');
+ if (JSON.stringify(new_obj) === JSON.stringify(obj_read) ) {
+ console.log("test passed");
+ } else {
+ console.log("test failed");
+ }
+
+}
module.exports = {
safeRequire: safeRequire
, requireWithTimestamp: requireWithTimestamp
-};
\ No newline at end of file
+ , safeLoadFile: safeLoadFile
+};
diff --git a/lib/temps.js b/lib/temps.js
index 90abd1bf0..9ac6918cc 100644
--- a/lib/temps.js
+++ b/lib/temps.js
@@ -1,3 +1,4 @@
+'use strict';
function filter (treatments) {
diff --git a/package.json b/package.json
index 3db784b27..bffeac98a 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "oref0",
- "version": "0.7.0",
+ "version": "0.7.1",
"description": "openaps oref0 reference implementation of the reference design",
"scripts": {
"test": "make test",
@@ -93,6 +93,7 @@
"oref0-upload-profile": "./bin/oref0-upload-profile.js",
"oref0-version": "./bin/oref0-version.sh",
"oref0-get-ns-entries": "./bin/oref0-get-ns-entries.js",
+ "oref0-shared-node-loop": "./bin/oref0-shared-node-loop.sh",
"peb-urchin-status": "./bin/peb-urchin-status.sh",
"wifi": "./bin/oref0-tail-wifi.sh"
},
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 000000000..b51dedf39
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,3 @@
+requests==2.25.1
+six==1.15.0
+pytz==2021.1
diff --git a/tests/check-syntax.test.js b/tests/check-syntax.test.js
index 9f52e1240..ba0da571c 100644
--- a/tests/check-syntax.test.js
+++ b/tests/check-syntax.test.js
@@ -93,8 +93,9 @@ describe("Syntax checks", function() {
var type = getFileFormat(file);
if(type !== "unknown") {
it(file, function() {
+ this.timeout(4000);
checkFile(file, type);
});
}
});
-});
\ No newline at end of file
+});
diff --git a/tests/command-behavior.tests.sh b/tests/command-behavior.tests.sh
index 16c620e38..2741a64b0 100755
--- a/tests/command-behavior.tests.sh
+++ b/tests/command-behavior.tests.sh
@@ -474,7 +474,6 @@ EOT
cat >profile.json <pumpprofile.json < os.path.getmtime(myopenaps_dir + "monitor/glucose.json"):
+ if os.path.getmtime(myopenaps_dir + "xdrip/glucose.json") > os.path.getmtime(myopenaps_dir + "monitor/glucose.json") and os.path.getsize(myopenaps_dir + "xdrip/glucose.json") > 0:
json_url = os.path.join(myopenaps_dir + "xdrip/glucose.json")
else:
json_url = os.path.join(myopenaps_dir + "monitor/glucose.json")