Thanks to idias of
@codi640
come with such a solution (without percentile line for testing).
Please take a look,
Is this solution ok, expecially in application to long range like 1m or 1 year?
data = from(bucket: v.bucket)
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => r[“_measurement”] == “cpu”)
|> filter(fn: (r) => r[“_field”] == “usage_system”)
|> filter(fn: (r) => r[“cpu”] == “cpu-total”)
)
q = float(v:v.percentiles)
quantileValue_by_host = (tables=<-, host) => {
result = tables
|> group(columns: [“host”])
|> quantile(column: “_value”, q: q, method: “estimate_tdigest”, compression: 1000.0)
|> filter(fn: (r) => r.host == host)
|> drop(columns: [“_start”, “_stop”, “_field”, “cpu”, “_measurement”, “host”]) //this 2 lines just to return value, not stream (must be better solution for this)
|> findRecord(fn: (key) => true, idx: 0)
return result
}
data_in_quantile = data
|> filter(fn: (r) => r._value < quantileValue_by_host(tables: data, host: r[“host”])._value)
|> yield(name: “p” + v.percentiles + " data")