// Parameters
var period = 10s
var every = 10s
var tolerance = 10d
// Dataframe
// check - http://192.168.0.126:9092/kapacitor/v1/tasks/voc_stats/step1
// check - http://192.168.0.5:9092/kapacitor/v1/tasks/voc_stats/step1
var hourlyAverage = batch
|query('''SELECT mean(usage_user) AS usage_user FROM "telegraf"."autogen"."cpu" WHERE cpu = 'cpu-total' ''')
.period(1h)
.every(every)
|httpOut('step1')
var weeklyAverage = batch
|query('''SELECT mean(usage_user) AS usage_user FROM "telegraf"."autogen"."cpu" WHERE cpu = 'cpu-total' ''')
.period(7d)
.every(every)
|httpOut('step2')
var weeklyMinimum = batch
|query('''SELECT min(usage_user) AS usage_user FROM "telegraf"."autogen"."cpu" WHERE cpu = 'cpu-total' ''')
.period(7d)
.every(every)
|httpOut('step3')
var data = hourlyAverage
|join(weeklyAverage, weeklyMinimum)
.as('hourlyAverage', 'weeklyAverage', 'weeklyMinimum')
.tolerance(8d)
|httpOut('step4')
I can observer the http ouput from step1,2&3 but step 4 is always outputting null series.
httpOut looks like this
step1 : {"series":[{"name":"cpu","columns":["time","usage_user"],"values":[["2019-01-29T21:10:26.908059043Z",0.784185539493217]]}]}
Step2: {"series":[{"name":"cpu","columns":["time","usage_user"],"values":[["2019-01-22T22:10:46.908007082Z",3.631224120920862]]}]}
Step3: {"series":[{"name":"cpu","columns":["time","usage_user"],"values":[["2019-01-28T00:21:15Z",0.1752190237792004]]}]}
step4: {"series":[{"name":"cpu"}]}
@rawkode Any possible answer for this ? or Do need any extra information on this ? I am running docker from influxdb sandbox. Tried both./sandbox up and./sandbox up - nightly as well
I have tried almost the same script as this today and it seems not working as well
var every = 15s
var custom_period = 3h
// Dataframe
// check - http://192.168.0.123:9092/kapacitor/v1/tasks/Rule-1-cpu/step1
var hourlyAverage = batch
|query('''SELECT mean(usage_user) AS usage_user FROM "telegraf"."autogen"."cpu" WHERE cpu = 'cpu-total' ''')
.period(1h)
.every(every)
.fill('none')
.align()
|httpOut('step1')
// Custom hours ago average
var customAverage = batch
|query('''SELECT mean(usage_user) AS usage_user FROM "telegraf"."autogen"."cpu" WHERE cpu = 'cpu-total' ''')
.period(1h)
.every(every)
.fill('none')
.align()
// Query data from custom time ago
.offset(custom_period)
|shift(custom_period)
|httpOut('step2')
var weeklyAverage = batch
|query('''SELECT mean(usage_user) AS usage_user FROM "telegraf"."autogen"."cpu" WHERE cpu = 'cpu-total' ''')
.period(7d)
.every(every)
.fill('none')
.align()
|httpOut('step3')
var weeklyMinimum = batch
|query('''SELECT min(usage_user) AS usage_user FROM "telegraf"."autogen"."cpu" WHERE cpu = 'cpu-total' ''')
.period(7d)
.every(every)
.fill('none')
.align()
|httpOut('step4')
var data = hourlyAverage
|join(customAverage, weeklyAverage, weeklyMinimum)
.as('hourlyAverage', 'customAverage', 'weeklyAverage', 'weeklyMinimum')
.tolerance(2w)
|httpOut('step5')
Here i have similar outputs
step 1: {"series":[{"name":"cpu","columns":["time","usage_user"],"values":[["2019-02-04T18:51:30Z",5.510724480730352]]}]}
step 2: {"series":[{"name":"cpu","columns":["time","usage_user"],"values":[["2019-02-04T18:51:30Z",3.297008825595237]]}]}
step 3: {"series":[{"name":"cpu","columns":["time","usage_user"],"values":[["2019-01-28T19:51:45Z",4.021326806086728]]}]}
step 4: {"series":[{"name":"cpu","columns":["time","usage_user"],"values":[["2019-02-04T07:04:30Z",0.17513134851138412]]}]}
step5: {"series":[{"name":"cpu"}]}
I have tried plenty of permutation and combination but nothing giving me the result, i really appreciate if can provide me a way around.
This appears to be a way the join handles the batched points, but this can be fixed by utilising last().
var period = 10s
var every = 10s
var tolerance = 10d
var hourlyAverage = batch
|query('''SELECT mean(usage_user) AS usage_user FROM "telegraf"."autogen"."cpu" WHERE cpu = 'cpu-total' ''')
.period(1h)
.every(every)
|last('usage_user')
.as('usage_user')
var weeklyAverage = batch
|query('''SELECT mean(usage_user) AS usage_user FROM "telegraf"."autogen"."cpu" WHERE cpu = 'cpu-total' ''')
.period(7d)
.every(every)
|last('usage_user')
.as('usage_user')
hourlyAverage
|join(weeklyAverage)
.as('hourlyAverage', 'weeklyAverage')
.tolerance(1s)
Thanks @rawkode .
It’s an amazing work around. I have gone through many post but none of them explained this trick. I am also happy that this also solved my another issue of time stamping. In below example, although join node is succeeded but it has an issue with the time stamp.
// Parameters
var every = 10s
// Dataframe
// check - http://ip/kapacitor/v1/tasks/test/step1
// Past one week average
var weeklyAverage = batch
|query('''SELECT mean(voc) AS voc_0015BC0036000018 FROM "sensordata"."autogen"."voc" WHERE eui = '0015BC0036000018' ''')
.period(7d)
.every(every)
.groupBy('eui')
.align()
|httpOut('step1')
// Past one week minimum
var weeklyMinimum = batch
|query('''SELECT mean(voc) AS voc_0015BC0036000018 FROM "sensordata"."autogen"."voc" WHERE eui = '0015BC0036000018' ''')
.period(7d)
.every(every)
.groupBy('eui')
.align()
|httpOut('step2')
var data = weeklyAverage
|join(weeklyMinimum)
.as('weeklyAverage', 'weeklyMinimum')
.tolerance(2w)
|httpOut('step3')
data
|influxDBOut()
.database('telegraf')
.retentionPolicy('autogen')
.measurement('cpu_stats')
If see the http output
step#1 {"series":[{"name":"voc","tags":{"eui":"0015BC0036000018"},"columns":["time","voc_0015BC0036000018"],"values":[["2019-01-30T19:54:10Z",107.27860696517413]]}]}
step#2 {"series":[{"name":"voc","tags":{"eui":"0015BC0036000018"},"columns":["time","voc_0015BC0036000018"],"values":[["2019-01-30T19:54:10Z",107.27860696517413]]}]}
step#3 {"series":[{"name":"voc","tags":{"eui":"0015BC0036000018"},"columns":["time","weeklyAverage.voc_0015BC0036000018","weeklyMinimum.voc_0015BC0036000018"],"values":[["2019-01-28T00:00:00Z",107.27860696517413,107.27860696517413]]}]}
I observe the time stamp in step 3 its always “2019-01-28T00:00:00Z” in every single execution. Effectively Influxdb out node is overwriting the same data point every single time of its execution.
But now i think its will form time series, rather than over writing the same data point again and again.