Range-related variables #814
This commit is contained in:
@@ -162,7 +162,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
- run: sudo pip install codespell
|
- run: sudo pip install codespell
|
||||||
- run: codespell -S './.git*,./src/img*' -L que
|
- run: codespell -S './.git*,./src/img*' -L que --ignore-words=./.codespell_ignore
|
||||||
|
|
||||||
workflows:
|
workflows:
|
||||||
version: 2
|
version: 2
|
||||||
|
|||||||
1
.codespell_ignore
Normal file
1
.codespell_ignore
Normal file
@@ -0,0 +1 @@
|
|||||||
|
hist
|
||||||
@@ -1,6 +1,23 @@
|
|||||||
Functions reference
|
Functions reference
|
||||||
===================
|
===================
|
||||||
|
|
||||||
|
## Functions Variables
|
||||||
|
|
||||||
|
There are some built-in template variables available for using in functions:
|
||||||
|
|
||||||
|
- `$__range_ms` - panel time range in ms
|
||||||
|
- `$__range_s` - panel time range in seconds
|
||||||
|
- `$__range` - panel time range, string representation (`30s`, `1m`, `1h`)
|
||||||
|
- `$__range_series` - invoke function over all series values
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
```
|
||||||
|
groupBy($__range, avg)
|
||||||
|
percentile($__range_series, 95) - 95th percentile over all values
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Transform
|
## Transform
|
||||||
|
|
||||||
|
|
||||||
@@ -10,7 +27,7 @@ Functions reference
|
|||||||
groupBy(interval, function)
|
groupBy(interval, function)
|
||||||
```
|
```
|
||||||
|
|
||||||
Takes each timeseries and consolidate its points falled in given _interval_ into one point using _function_, which can be one of: _avg_, _min_, _max_, _median_.
|
Takes each timeseries and consolidate its points fallen in the given _interval_ into one point using _function_, which can be one of: _avg_, _min_, _max_, _median_.
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
```
|
```
|
||||||
@@ -124,7 +141,7 @@ Replaces `null` values with N
|
|||||||
aggregateBy(interval, function)
|
aggregateBy(interval, function)
|
||||||
```
|
```
|
||||||
|
|
||||||
Takes all timeseries and consolidate all its points falled in given _interval_ into one point using _function_, which can be one of: _avg_, _min_, _max_, _median_.
|
Takes all timeseries and consolidate all its points fallen in the given _interval_ into one point using _function_, which can be one of: _avg_, _min_, _max_, _median_.
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
```
|
```
|
||||||
@@ -142,6 +159,20 @@ This will add metrics together and return the sum at each datapoint. This method
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
### _percentile_
|
||||||
|
```
|
||||||
|
percentile(interval, N)
|
||||||
|
```
|
||||||
|
Takes all timeseries and consolidate all its points fallen in the given _interval_ into one point by Nth percentile.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
```
|
||||||
|
percentile(1h, 99)
|
||||||
|
percentile($__range_series, 95) - 95th percentile over all values
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
### _average_
|
### _average_
|
||||||
```
|
```
|
||||||
average(interval)
|
average(interval)
|
||||||
|
|||||||
@@ -37,3 +37,5 @@ export const TRIGGER_SEVERITY = [
|
|||||||
|
|
||||||
/** Minimum interval for SLA over time (1 hour) */
|
/** Minimum interval for SLA over time (1 hour) */
|
||||||
export const MIN_SLA_INTERVAL = 3600;
|
export const MIN_SLA_INTERVAL = 3600;
|
||||||
|
|
||||||
|
export const RANGE_VARIABLE_VALUE = 'range_series';
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
import * as utils from './utils';
|
import * as utils from './utils';
|
||||||
import ts from './timeseries';
|
import ts, { groupBy_perf as groupBy } from './timeseries';
|
||||||
|
|
||||||
let downsampleSeries = ts.downsample;
|
let downsampleSeries = ts.downsample;
|
||||||
let groupBy = ts.groupBy_perf;
|
|
||||||
let groupBy_exported = (interval, groupFunc, datapoints) => groupBy(datapoints, interval, groupFunc);
|
let groupBy_exported = (interval, groupFunc, datapoints) => groupBy(datapoints, interval, groupFunc);
|
||||||
let sumSeries = ts.sumSeries;
|
let sumSeries = ts.sumSeries;
|
||||||
let delta = ts.delta;
|
let delta = ts.delta;
|
||||||
|
|||||||
@@ -111,6 +111,9 @@ export class ZabbixDatasource {
|
|||||||
let timeFrom = Math.ceil(dateMath.parse(options.range.from) / 1000);
|
let timeFrom = Math.ceil(dateMath.parse(options.range.from) / 1000);
|
||||||
let timeTo = Math.ceil(dateMath.parse(options.range.to) / 1000);
|
let timeTo = Math.ceil(dateMath.parse(options.range.to) / 1000);
|
||||||
|
|
||||||
|
// Add range variables
|
||||||
|
options.scopedVars = Object.assign({}, options.scopedVars, utils.getRangeScopedVars(options.range));
|
||||||
|
|
||||||
// Prevent changes of original object
|
// Prevent changes of original object
|
||||||
let target = _.cloneDeep(t);
|
let target = _.cloneDeep(t);
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import _ from 'lodash';
|
|||||||
import mocks from '../../test-setup/mocks';
|
import mocks from '../../test-setup/mocks';
|
||||||
import { Datasource } from "../module";
|
import { Datasource } from "../module";
|
||||||
import { zabbixTemplateFormat } from "../datasource";
|
import { zabbixTemplateFormat } from "../datasource";
|
||||||
|
import { dateMath } from '@grafana/data';
|
||||||
|
|
||||||
describe('ZabbixDatasource', () => {
|
describe('ZabbixDatasource', () => {
|
||||||
let ctx = {};
|
let ctx = {};
|
||||||
@@ -41,7 +42,10 @@ describe('ZabbixDatasource', () => {
|
|||||||
item: {filter: ""}
|
item: {filter: ""}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
range: {from: 'now-7d', to: 'now'}
|
range: {
|
||||||
|
from: dateMath.parse('now-1h'),
|
||||||
|
to: dateMath.parse('now')
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
it('should return an empty array when no targets are set', (done) => {
|
it('should return an empty array when no targets are set', (done) => {
|
||||||
@@ -59,7 +63,7 @@ describe('ZabbixDatasource', () => {
|
|||||||
let ranges = ['now-8d', 'now-169h', 'now-1M', 'now-1y'];
|
let ranges = ['now-8d', 'now-169h', 'now-1M', 'now-1y'];
|
||||||
|
|
||||||
_.forEach(ranges, range => {
|
_.forEach(ranges, range => {
|
||||||
ctx.options.range.from = range;
|
ctx.options.range.from = dateMath.parse(range);
|
||||||
ctx.ds.queryNumericData = jest.fn();
|
ctx.ds.queryNumericData = jest.fn();
|
||||||
ctx.ds.query(ctx.options);
|
ctx.ds.query(ctx.options);
|
||||||
|
|
||||||
@@ -76,7 +80,7 @@ describe('ZabbixDatasource', () => {
|
|||||||
let ranges = ['now-7d', 'now-168h', 'now-1h', 'now-30m', 'now-30s'];
|
let ranges = ['now-7d', 'now-168h', 'now-1h', 'now-30m', 'now-30s'];
|
||||||
|
|
||||||
_.forEach(ranges, range => {
|
_.forEach(ranges, range => {
|
||||||
ctx.options.range.from = range;
|
ctx.options.range.from = dateMath.parse(range);
|
||||||
ctx.ds.queryNumericData = jest.fn();
|
ctx.ds.queryNumericData = jest.fn();
|
||||||
ctx.ds.query(ctx.options);
|
ctx.ds.query(ctx.options);
|
||||||
|
|
||||||
@@ -108,10 +112,7 @@ describe('ZabbixDatasource', () => {
|
|||||||
}
|
}
|
||||||
]));
|
]));
|
||||||
|
|
||||||
ctx.options = {
|
ctx.options.targets = [{
|
||||||
range: {from: 'now-1h', to: 'now'},
|
|
||||||
targets: [
|
|
||||||
{
|
|
||||||
group: {filter: ""},
|
group: {filter: ""},
|
||||||
host: {filter: "Zabbix server"},
|
host: {filter: "Zabbix server"},
|
||||||
application: {filter: ""},
|
application: {filter: ""},
|
||||||
@@ -123,9 +124,7 @@ describe('ZabbixDatasource', () => {
|
|||||||
options: {
|
options: {
|
||||||
skipEmptyValues: false
|
skipEmptyValues: false
|
||||||
}
|
}
|
||||||
}
|
}];
|
||||||
],
|
|
||||||
};
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return data in table format', (done) => {
|
it('should return data in table format', (done) => {
|
||||||
|
|||||||
@@ -11,6 +11,7 @@
|
|||||||
|
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
import * as utils from './utils';
|
import * as utils from './utils';
|
||||||
|
import * as c from './constants';
|
||||||
|
|
||||||
const POINT_VALUE = 0;
|
const POINT_VALUE = 0;
|
||||||
const POINT_TIMESTAMP = 1;
|
const POINT_TIMESTAMP = 1;
|
||||||
@@ -94,11 +95,15 @@ function groupBy(datapoints, interval, groupByCallback) {
|
|||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
function groupBy_perf(datapoints, interval, groupByCallback) {
|
export function groupBy_perf(datapoints, interval, groupByCallback) {
|
||||||
if (datapoints.length === 0) {
|
if (datapoints.length === 0) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (interval === c.RANGE_VARIABLE_VALUE) {
|
||||||
|
return groupByRange(datapoints, groupByCallback);
|
||||||
|
}
|
||||||
|
|
||||||
let ms_interval = utils.parseInterval(interval);
|
let ms_interval = utils.parseInterval(interval);
|
||||||
let grouped_series = [];
|
let grouped_series = [];
|
||||||
let frame_values = [];
|
let frame_values = [];
|
||||||
@@ -132,6 +137,19 @@ function groupBy_perf(datapoints, interval, groupByCallback) {
|
|||||||
return grouped_series;
|
return grouped_series;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function groupByRange(datapoints, groupByCallback) {
|
||||||
|
const frame_values = [];
|
||||||
|
const frame_start = datapoints[0][POINT_TIMESTAMP];
|
||||||
|
const frame_end = datapoints[datapoints.length - 1][POINT_TIMESTAMP];
|
||||||
|
let point;
|
||||||
|
for (let i=0; i < datapoints.length; i++) {
|
||||||
|
point = datapoints[i];
|
||||||
|
frame_values.push(point[POINT_VALUE]);
|
||||||
|
}
|
||||||
|
const frame_value = groupByCallback(frame_values);
|
||||||
|
return [[frame_value, frame_start], [frame_value, frame_end]];
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Summarize set of time series into one.
|
* Summarize set of time series into one.
|
||||||
* @param {datapoints[]} timeseries array of time series
|
* @param {datapoints[]} timeseries array of time series
|
||||||
@@ -495,6 +513,7 @@ const exportedFunctions = {
|
|||||||
downsample,
|
downsample,
|
||||||
groupBy,
|
groupBy,
|
||||||
groupBy_perf,
|
groupBy_perf,
|
||||||
|
groupByRange,
|
||||||
sumSeries,
|
sumSeries,
|
||||||
scale,
|
scale,
|
||||||
offset,
|
offset,
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
import moment from 'moment';
|
import moment from 'moment';
|
||||||
|
import kbn from 'grafana/app/core/utils/kbn';
|
||||||
|
import * as c from './constants';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Expand Zabbix item name
|
* Expand Zabbix item name
|
||||||
@@ -141,6 +143,18 @@ export function isTemplateVariable(str, templateVariables) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getRangeScopedVars(range) {
|
||||||
|
const msRange = range.to.diff(range.from);
|
||||||
|
const sRange = Math.round(msRange / 1000);
|
||||||
|
const regularRange = kbn.secondsToHms(msRange / 1000);
|
||||||
|
return {
|
||||||
|
__range_ms: { text: msRange, value: msRange },
|
||||||
|
__range_s: { text: sRange, value: sRange },
|
||||||
|
__range: { text: regularRange, value: regularRange },
|
||||||
|
__range_series: {text: c.RANGE_VARIABLE_VALUE, value: c.RANGE_VARIABLE_VALUE},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
export function buildRegex(str) {
|
export function buildRegex(str) {
|
||||||
var matches = str.match(regexPattern);
|
var matches = str.match(regexPattern);
|
||||||
var pattern = matches[1];
|
var pattern = matches[1];
|
||||||
|
|||||||
@@ -45,6 +45,7 @@ jest.mock('grafana/app/core/utils/datemath', () => {
|
|||||||
jest.mock('grafana/app/core/utils/kbn', () => {
|
jest.mock('grafana/app/core/utils/kbn', () => {
|
||||||
return {
|
return {
|
||||||
round_interval: n => n,
|
round_interval: n => n,
|
||||||
|
secondsToHms: n => n + 'ms'
|
||||||
};
|
};
|
||||||
}, {virtual: true});
|
}, {virtual: true});
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user