Skip to content

Commit

Permalink
Categorize improvements for libre 1 minute data (openaps#1238)
Browse files Browse the repository at this point in the history
* Work in progress on categorize

  * Determine date & dateString properties in prepGlucose map function
  * Filter invalid records in filterRecords filter function
  * Clean up for loop to remove previously performed checks
  * Average any values that fall within the 2 minute deadband

* Increase limit to allow for 1 minute data

  * Ensure 1440 (60*24) records can be downloaded
  • Loading branch information
rob-andy authored and scottleibrand committed Sep 16, 2019
1 parent d36b1df commit e53241b
Show file tree
Hide file tree
Showing 3 changed files with 41 additions and 43 deletions.
2 changes: 1 addition & 1 deletion bin/oref0-autotune.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def get_nightscout_bg_entries(nightscout_host, start_date, end_date, directory):
date_list = [start_date + datetime.timedelta(days=x) for x in range(0, (end_date - start_date).days)]

for date in date_list:
url="{0}/api/v1/entries/sgv.json?find\[date\]\[\$gte\]={1}&find\[date\]\[\$lte\]={1}`&count=1000"
url="{0}/api/v1/entries/sgv.json?find\[date\]\[\$gte\]={1}&find\[date\]\[\$lte\]={1}`&count=1500"
url = url.format(nightscout_host, date)
#TODO: Add ability to use API secret for Nightscout.
res = requests.get(url)
Expand Down
2 changes: 1 addition & 1 deletion bin/oref0-autotune.sh
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ echo "Grabbing NIGHTSCOUT treatments.json and entries/sgv.json for date range...
for i in "${date_list[@]}"
do
# pull CGM data from 4am-4am
query="find%5Bdate%5D%5B%24gte%5D=$(to_epochtime "$i +4 hours" |nonl; echo 000)&find%5Bdate%5D%5B%24lte%5D=$(to_epochtime "$i +28 hours" |nonl; echo 000)&count=1000"
query="find%5Bdate%5D%5B%24gte%5D=$(to_epochtime "$i +4 hours" |nonl; echo 000)&find%5Bdate%5D%5B%24lte%5D=$(to_epochtime "$i +28 hours" |nonl; echo 000)&count=1500"
echo Query: $NIGHTSCOUT_HOST entries/sgv.json $query
ns-get host $NIGHTSCOUT_HOST entries/sgv.json $query > ns-entries.$i.json || die "Couldn't download ns-entries.$i.json"
ls -la ns-entries.$i.json || die "No ns-entries.$i.json downloaded"
Expand Down
80 changes: 39 additions & 41 deletions lib/autotune-prep/categorize.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,21 +17,38 @@ function categorizeBGDatums(opts) {
return bDate.getTime() - aDate.getTime();
});
var profileData = opts.profile;

var glucoseData = [ ];
if (typeof(opts.glucose) !== 'undefined') {
//var glucoseData = opts.glucose;
var glucoseData = opts.glucose.map(function prepGlucose (obj) {
glucoseData = opts.glucose.map(function prepGlucose (obj) {
//Support the NS sgv field to avoid having to convert in a custom way
obj.glucose = obj.glucose || obj.sgv;

if (obj.date) {
//obj.BGTime = new Date(obj.date);
} else if (obj.displayTime) {
// Attempt to get date from displayTime
obj.date = new Date(obj.displayTime.replace('T', ' ')).getTime();
} else if (obj.dateString) {
// Attempt to get date from dateString
obj.date = new Date(obj.dateString).getTime();
}// else { console.error("Could not determine BG time"); }

if (!obj.dateString)
{
obj.dateString = new Date(tz(obj.date)).toISOString();
}
return obj;
}).filter(function filterRecords(obj) {
// Only take records with a valid date record
// and a glucose value, which is also above 39
return (obj.date && obj.glucose && obj.glucose >=39);
}).sort(function (a, b) {
// sort the collection in order
return b.date - a.date;
});
}
// this sorts the glucose collection in order.
glucoseData.sort(function (a, b) {
var aDate = new Date(tz(a.date));
var bDate = new Date(tz(b.date));
//console.error(aDate);
return bDate.getTime() - aDate.getTime();
});
// if (typeof(opts.preppedGlucose) !== 'undefined') {
// var preppedGlucoseData = opts.preppedGlucose;
// }
Expand All @@ -53,44 +70,25 @@ function categorizeBGDatums(opts) {
var CRData = [];

var bucketedData = [];
bucketedData[0] = glucoseData[0];
bucketedData[0] = JSON.parse(JSON.stringify(glucoseData[0]));
var j=0;
var k=0; // index of first value used by bucket
//for loop to validate and bucket the data
for (var i=1; i < glucoseData.length; ++i) {
var BGTime;
var lastBGTime;
if (glucoseData[i].date) {
BGTime = new Date(glucoseData[i].date);
} else if (glucoseData[i].displayTime) {
BGTime = new Date(glucoseData[i].displayTime.replace('T', ' '));
} else if (glucoseData[i].dateString) {
BGTime = new Date(glucoseData[i].dateString);
} else { console.error("Could not determine BG time"); }
if (glucoseData[i-1].date) {
lastBGTime = new Date(glucoseData[i-1].date);
} else if (glucoseData[i-1].displayTime) {
lastBGTime = new Date(glucoseData[i-1].displayTime.replace('T', ' '));
} else if (glucoseData[i-1].dateString) {
lastBGTime = new Date(glucoseData[i-1].dateString);
} else { console.error("Could not determine last BG time"); }
if (glucoseData[i].glucose < 39 || glucoseData[i-1].glucose < 39) {
continue;
}
var BGTime = glucoseData[i].date;
var lastBGTime = glucoseData[k].date;
var elapsedMinutes = (BGTime - lastBGTime)/(60*1000);
if ( glucoseData[i].glucose) {
if(Math.abs(elapsedMinutes) > 2) {
j++;
bucketedData[j]=glucoseData[i];
bucketedData[j].date = BGTime.getTime();
if (! bucketedData[j].dateString) {
bucketedData[j].dateString = BGTime.toISOString();
}
} else {
// if duplicate, average the two
bucketedData[j].glucose = (bucketedData[j].glucose + glucoseData[i].glucose)/2;
}

if(Math.abs(elapsedMinutes) >= 2) {
j++; // move to next bucket
k=i; // store index of first value used by bucket
bucketedData[j]=JSON.parse(JSON.stringify(glucoseData[i]));
} else {
console.error("Skipping record w/o glucose:",JSON.stringify(glucoseData[i]));
// average all readings within time deadband
var glucoseTotal = glucoseData.slice(k, i+1).reduce(function(total, entry) {
return total + entry.glucose;
}, 0);
bucketedData[j].glucose = glucoseTotal / (i-k+1);
}
}
//console.error(bucketedData);
Expand Down

0 comments on commit e53241b

Please sign in to comment.