Merge branch 'metric-functions'
This commit is contained in:
13
dist/datasource-zabbix/dataProcessor.js
vendored
13
dist/datasource-zabbix/dataProcessor.js
vendored
@@ -3,7 +3,7 @@
|
||||
System.register(['lodash', './utils', './timeseries'], function (_export, _context) {
|
||||
"use strict";
|
||||
|
||||
var _, utils, ts, downsampleSeries, groupBy, groupBy_exported, sumSeries, delta, rate, scale, SUM, COUNT, AVERAGE, MIN, MAX, MEDIAN, metricFunctions, aggregationFunctions;
|
||||
var _, utils, ts, downsampleSeries, groupBy, groupBy_exported, sumSeries, delta, rate, scale, simpleMovingAverage, expMovingAverage, SUM, COUNT, AVERAGE, MIN, MAX, MEDIAN, metricFunctions, aggregationFunctions;
|
||||
|
||||
function limit(order, n, orderByFunc, timeseries) {
|
||||
var orderByCallback = aggregationFunctions[orderByFunc];
|
||||
@@ -106,6 +106,14 @@ System.register(['lodash', './utils', './timeseries'], function (_export, _conte
|
||||
return ts.scale_perf(datapoints, factor);
|
||||
};
|
||||
|
||||
simpleMovingAverage = function simpleMovingAverage(n, datapoints) {
|
||||
return ts.simpleMovingAverage(datapoints, n);
|
||||
};
|
||||
|
||||
expMovingAverage = function expMovingAverage(a, datapoints) {
|
||||
return ts.expMovingAverage(datapoints, a);
|
||||
};
|
||||
|
||||
SUM = ts.SUM;
|
||||
COUNT = ts.COUNT;
|
||||
AVERAGE = ts.AVERAGE;
|
||||
@@ -117,7 +125,10 @@ System.register(['lodash', './utils', './timeseries'], function (_export, _conte
|
||||
scale: scale,
|
||||
delta: delta,
|
||||
rate: rate,
|
||||
movingAverage: simpleMovingAverage,
|
||||
exponentialMovingAverage: expMovingAverage,
|
||||
aggregateBy: aggregateByWrapper,
|
||||
// Predefined aggs
|
||||
average: _.partial(aggregateWrapper, AVERAGE),
|
||||
min: _.partial(aggregateWrapper, MIN),
|
||||
max: _.partial(aggregateWrapper, MAX),
|
||||
|
||||
2
dist/datasource-zabbix/dataProcessor.js.map
vendored
2
dist/datasource-zabbix/dataProcessor.js.map
vendored
File diff suppressed because one or more lines are too long
6
dist/datasource-zabbix/datasource.js
vendored
6
dist/datasource-zabbix/datasource.js
vendored
@@ -315,7 +315,7 @@ System.register(['lodash', 'app/core/utils/datemath', './utils', './migrations',
|
||||
var useTrends = _this.isUseTrends(timeRange);
|
||||
|
||||
// Metrics or Text query mode
|
||||
if (target.mode === c.MODE_METRICS || target.mode === c.MODE_TEXT || target.mode === c.MODE_ITEMID) {
|
||||
if (!target.mode || target.mode === c.MODE_METRICS || target.mode === c.MODE_TEXT || target.mode === c.MODE_ITEMID) {
|
||||
// Migrate old targets
|
||||
target = migrations.migrate(target);
|
||||
|
||||
@@ -426,10 +426,10 @@ System.register(['lodash', 'app/core/utils/datemath', './utils', './migrations',
|
||||
var aliasFunctions = bindFunctionDefs(target.functions, 'Alias');
|
||||
|
||||
// Apply transformation functions
|
||||
timeseries_data = _.map(timeseries_data, function (timeseries) {
|
||||
timeseries_data = _.cloneDeep(_.map(timeseries_data, function (timeseries) {
|
||||
timeseries.datapoints = sequence(transformFunctions)(timeseries.datapoints);
|
||||
return timeseries;
|
||||
});
|
||||
}));
|
||||
|
||||
// Apply filter functions
|
||||
if (filterFunctions.length) {
|
||||
|
||||
2
dist/datasource-zabbix/datasource.js.map
vendored
2
dist/datasource-zabbix/datasource.js.map
vendored
File diff suppressed because one or more lines are too long
14
dist/datasource-zabbix/metricFunctions.js
vendored
14
dist/datasource-zabbix/metricFunctions.js
vendored
@@ -111,6 +111,20 @@ System.register(['lodash', 'jquery'], function (_export, _context) {
|
||||
defaultParams: []
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'movingAverage',
|
||||
category: 'Transform',
|
||||
params: [{ name: 'factor', type: 'int', options: [6, 10, 60, 100, 600] }],
|
||||
defaultParams: [10]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'exponentialMovingAverage',
|
||||
category: 'Transform',
|
||||
params: [{ name: 'smoothing', type: 'float', options: [6, 10, 60, 100, 600] }],
|
||||
defaultParams: [0.2]
|
||||
});
|
||||
|
||||
// Aggregate
|
||||
|
||||
addFuncDef({
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -69,7 +69,7 @@ describe('ZabbixDatasource', () => {
|
||||
|
||||
// Check that useTrends options is true
|
||||
expect(ctx.ds.queryNumericData)
|
||||
.to.have.been.calledWith(defined, defined, true);
|
||||
.to.have.been.calledWith(defined, defined, true, sinon.match.any);
|
||||
});
|
||||
|
||||
done();
|
||||
@@ -85,7 +85,7 @@ describe('ZabbixDatasource', () => {
|
||||
|
||||
// Check that useTrends options is false
|
||||
expect(ctx.ds.queryNumericData)
|
||||
.to.have.been.calledWith(defined, defined, false);
|
||||
.to.have.been.calledWith(defined, defined, false, sinon.match.any);
|
||||
});
|
||||
done();
|
||||
});
|
||||
|
||||
93
dist/datasource-zabbix/timeseries.js
vendored
93
dist/datasource-zabbix/timeseries.js
vendored
@@ -209,6 +209,97 @@ System.register(['lodash', './utils'], function (_export, _context) {
|
||||
return newSeries;
|
||||
}
|
||||
|
||||
function simpleMovingAverage(datapoints, n) {
|
||||
var sma = [];
|
||||
var w_sum = void 0;
|
||||
var w_avg = null;
|
||||
var w_count = 0;
|
||||
|
||||
// Initial window
|
||||
for (var j = n; j > 0; j--) {
|
||||
if (datapoints[n - j][POINT_VALUE] !== null) {
|
||||
w_avg += datapoints[n - j][POINT_VALUE];
|
||||
w_count++;
|
||||
}
|
||||
}
|
||||
if (w_count > 0) {
|
||||
w_avg = w_avg / w_count;
|
||||
} else {
|
||||
w_avg = null;
|
||||
}
|
||||
sma.push([w_avg, datapoints[n - 1][POINT_TIMESTAMP]]);
|
||||
|
||||
for (var i = n; i < datapoints.length; i++) {
|
||||
// Insert next value
|
||||
if (datapoints[i][POINT_VALUE] !== null) {
|
||||
w_sum = w_avg * w_count;
|
||||
w_avg = (w_sum + datapoints[i][POINT_VALUE]) / (w_count + 1);
|
||||
w_count++;
|
||||
}
|
||||
// Remove left side point
|
||||
if (datapoints[i - n][POINT_VALUE] !== null) {
|
||||
w_sum = w_avg * w_count;
|
||||
if (w_count > 1) {
|
||||
w_avg = (w_sum - datapoints[i - n][POINT_VALUE]) / (w_count - 1);
|
||||
w_count--;
|
||||
} else {
|
||||
w_avg = null;
|
||||
w_count = 0;
|
||||
}
|
||||
}
|
||||
sma.push([w_avg, datapoints[i][POINT_TIMESTAMP]]);
|
||||
}
|
||||
return sma;
|
||||
}
|
||||
|
||||
function expMovingAverage(datapoints, n) {
|
||||
var ema = [datapoints[0]];
|
||||
var ema_prev = datapoints[0][POINT_VALUE];
|
||||
var ema_cur = void 0;
|
||||
var a = void 0;
|
||||
|
||||
if (n > 1) {
|
||||
// Calculate a from window size
|
||||
a = 2 / (n + 1);
|
||||
|
||||
// Initial window, use simple moving average
|
||||
var w_avg = null;
|
||||
var w_count = 0;
|
||||
for (var j = n; j > 0; j--) {
|
||||
if (datapoints[n - j][POINT_VALUE] !== null) {
|
||||
w_avg += datapoints[n - j][POINT_VALUE];
|
||||
w_count++;
|
||||
}
|
||||
}
|
||||
if (w_count > 0) {
|
||||
w_avg = w_avg / w_count;
|
||||
// Actually, we should set timestamp from datapoints[n-1] and start calculation of EMA from n.
|
||||
// But in order to start EMA from first point (not from Nth) we should expand time range and request N additional
|
||||
// points outside left side of range. We can't do that, so this trick is used for pretty view of first N points.
|
||||
// We calculate AVG for first N points, but then start from 2nd point, not from Nth. In general, it means we
|
||||
// assume that previous N values (0-N, 0-(N-1), ..., 0-1) have the same average value as a first N values.
|
||||
ema = [[w_avg, datapoints[0][POINT_TIMESTAMP]]];
|
||||
ema_prev = w_avg;
|
||||
n = 1;
|
||||
}
|
||||
} else {
|
||||
// Use predefined a and start from 1st point (use it as initial EMA value)
|
||||
a = n;
|
||||
n = 1;
|
||||
}
|
||||
|
||||
for (var i = n; i < datapoints.length; i++) {
|
||||
if (datapoints[i][POINT_VALUE] !== null) {
|
||||
ema_cur = a * datapoints[i][POINT_VALUE] + (1 - a) * ema_prev;
|
||||
ema_prev = ema_cur;
|
||||
ema.push([ema_cur, datapoints[i][POINT_TIMESTAMP]]);
|
||||
} else {
|
||||
ema.push([null, datapoints[i][POINT_TIMESTAMP]]);
|
||||
}
|
||||
}
|
||||
return ema;
|
||||
}
|
||||
|
||||
function COUNT(values) {
|
||||
return values.length;
|
||||
}
|
||||
@@ -351,6 +442,8 @@ System.register(['lodash', './utils'], function (_export, _context) {
|
||||
scale_perf: scale_perf,
|
||||
delta: delta,
|
||||
rate: rate,
|
||||
simpleMovingAverage: simpleMovingAverage,
|
||||
expMovingAverage: expMovingAverage,
|
||||
SUM: SUM,
|
||||
COUNT: COUNT,
|
||||
AVERAGE: AVERAGE,
|
||||
|
||||
2
dist/datasource-zabbix/timeseries.js.map
vendored
2
dist/datasource-zabbix/timeseries.js.map
vendored
File diff suppressed because one or more lines are too long
4
dist/datasource-zabbix/zabbixDBConnector.js
vendored
4
dist/datasource-zabbix/zabbixDBConnector.js
vendored
@@ -157,7 +157,6 @@ System.register(['angular', 'lodash'], function (_export, _context) {
|
||||
var hosts = _.uniqBy(_.flatten(_.map(items, 'hosts')), 'hostid'); //uniqBy is needed to deduplicate
|
||||
var grafanaSeries = _.map(time_series, function (series) {
|
||||
var itemid = series.name;
|
||||
var datapoints = series.points;
|
||||
var item = _.find(items, { 'itemid': itemid });
|
||||
var alias = item.name;
|
||||
if (_.keys(hosts).length > 1 && addHostName) {
|
||||
@@ -165,6 +164,9 @@ System.register(['angular', 'lodash'], function (_export, _context) {
|
||||
var host = _.find(hosts, { 'hostid': item.hostid });
|
||||
alias = host.name + ": " + alias;
|
||||
}
|
||||
// zabbixCachingProxy deduplicates requests and returns one time series for equal queries.
|
||||
// Clone is needed to prevent changing of series object shared between all targets.
|
||||
var datapoints = _.cloneDeep(series.points);
|
||||
return {
|
||||
target: alias,
|
||||
datapoints: datapoints
|
||||
|
||||
File diff suppressed because one or more lines are too long
9
dist/test/datasource-zabbix/dataProcessor.js
vendored
9
dist/test/datasource-zabbix/dataProcessor.js
vendored
@@ -31,6 +31,12 @@ var rate = _timeseries2.default.rate;
|
||||
var scale = function scale(factor, datapoints) {
|
||||
return _timeseries2.default.scale_perf(datapoints, factor);
|
||||
};
|
||||
var simpleMovingAverage = function simpleMovingAverage(n, datapoints) {
|
||||
return _timeseries2.default.simpleMovingAverage(datapoints, n);
|
||||
};
|
||||
var expMovingAverage = function expMovingAverage(a, datapoints) {
|
||||
return _timeseries2.default.expMovingAverage(datapoints, a);
|
||||
};
|
||||
|
||||
var SUM = _timeseries2.default.SUM;
|
||||
var COUNT = _timeseries2.default.COUNT;
|
||||
@@ -121,7 +127,10 @@ var metricFunctions = {
|
||||
scale: scale,
|
||||
delta: delta,
|
||||
rate: rate,
|
||||
movingAverage: simpleMovingAverage,
|
||||
exponentialMovingAverage: expMovingAverage,
|
||||
aggregateBy: aggregateByWrapper,
|
||||
// Predefined aggs
|
||||
average: _lodash2.default.partial(aggregateWrapper, AVERAGE),
|
||||
min: _lodash2.default.partial(aggregateWrapper, MIN),
|
||||
max: _lodash2.default.partial(aggregateWrapper, MAX),
|
||||
|
||||
6
dist/test/datasource-zabbix/datasource.js
vendored
6
dist/test/datasource-zabbix/datasource.js
vendored
@@ -168,7 +168,7 @@ var ZabbixAPIDatasource = function () {
|
||||
var useTrends = _this.isUseTrends(timeRange);
|
||||
|
||||
// Metrics or Text query mode
|
||||
if (target.mode === c.MODE_METRICS || target.mode === c.MODE_TEXT || target.mode === c.MODE_ITEMID) {
|
||||
if (!target.mode || target.mode === c.MODE_METRICS || target.mode === c.MODE_TEXT || target.mode === c.MODE_ITEMID) {
|
||||
// Migrate old targets
|
||||
target = migrations.migrate(target);
|
||||
|
||||
@@ -289,10 +289,10 @@ var ZabbixAPIDatasource = function () {
|
||||
var aliasFunctions = bindFunctionDefs(target.functions, 'Alias');
|
||||
|
||||
// Apply transformation functions
|
||||
timeseries_data = _lodash2.default.map(timeseries_data, function (timeseries) {
|
||||
timeseries_data = _lodash2.default.cloneDeep(_lodash2.default.map(timeseries_data, function (timeseries) {
|
||||
timeseries.datapoints = sequence(transformFunctions)(timeseries.datapoints);
|
||||
return timeseries;
|
||||
});
|
||||
}));
|
||||
|
||||
// Apply filter functions
|
||||
if (filterFunctions.length) {
|
||||
|
||||
14
dist/test/datasource-zabbix/metricFunctions.js
vendored
14
dist/test/datasource-zabbix/metricFunctions.js
vendored
@@ -74,6 +74,20 @@ addFuncDef({
|
||||
defaultParams: []
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'movingAverage',
|
||||
category: 'Transform',
|
||||
params: [{ name: 'factor', type: 'int', options: [6, 10, 60, 100, 600] }],
|
||||
defaultParams: [10]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'exponentialMovingAverage',
|
||||
category: 'Transform',
|
||||
params: [{ name: 'smoothing', type: 'float', options: [6, 10, 60, 100, 600] }],
|
||||
defaultParams: [0.2]
|
||||
});
|
||||
|
||||
// Aggregate
|
||||
|
||||
addFuncDef({
|
||||
|
||||
@@ -84,7 +84,7 @@ describe('ZabbixDatasource', function () {
|
||||
ctx.ds.query(ctx.options);
|
||||
|
||||
// Check that useTrends options is true
|
||||
expect(ctx.ds.queryNumericData).to.have.been.calledWith(defined, defined, true);
|
||||
expect(ctx.ds.queryNumericData).to.have.been.calledWith(defined, defined, true, _sinon2.default.match.any);
|
||||
});
|
||||
|
||||
done();
|
||||
@@ -99,7 +99,7 @@ describe('ZabbixDatasource', function () {
|
||||
ctx.ds.query(ctx.options);
|
||||
|
||||
// Check that useTrends options is false
|
||||
expect(ctx.ds.queryNumericData).to.have.been.calledWith(defined, defined, false);
|
||||
expect(ctx.ds.queryNumericData).to.have.been.calledWith(defined, defined, false, _sinon2.default.match.any);
|
||||
});
|
||||
done();
|
||||
});
|
||||
|
||||
93
dist/test/datasource-zabbix/timeseries.js
vendored
93
dist/test/datasource-zabbix/timeseries.js
vendored
@@ -234,6 +234,97 @@ function rate(datapoints) {
|
||||
return newSeries;
|
||||
}
|
||||
|
||||
function simpleMovingAverage(datapoints, n) {
|
||||
var sma = [];
|
||||
var w_sum = void 0;
|
||||
var w_avg = null;
|
||||
var w_count = 0;
|
||||
|
||||
// Initial window
|
||||
for (var j = n; j > 0; j--) {
|
||||
if (datapoints[n - j][POINT_VALUE] !== null) {
|
||||
w_avg += datapoints[n - j][POINT_VALUE];
|
||||
w_count++;
|
||||
}
|
||||
}
|
||||
if (w_count > 0) {
|
||||
w_avg = w_avg / w_count;
|
||||
} else {
|
||||
w_avg = null;
|
||||
}
|
||||
sma.push([w_avg, datapoints[n - 1][POINT_TIMESTAMP]]);
|
||||
|
||||
for (var i = n; i < datapoints.length; i++) {
|
||||
// Insert next value
|
||||
if (datapoints[i][POINT_VALUE] !== null) {
|
||||
w_sum = w_avg * w_count;
|
||||
w_avg = (w_sum + datapoints[i][POINT_VALUE]) / (w_count + 1);
|
||||
w_count++;
|
||||
}
|
||||
// Remove left side point
|
||||
if (datapoints[i - n][POINT_VALUE] !== null) {
|
||||
w_sum = w_avg * w_count;
|
||||
if (w_count > 1) {
|
||||
w_avg = (w_sum - datapoints[i - n][POINT_VALUE]) / (w_count - 1);
|
||||
w_count--;
|
||||
} else {
|
||||
w_avg = null;
|
||||
w_count = 0;
|
||||
}
|
||||
}
|
||||
sma.push([w_avg, datapoints[i][POINT_TIMESTAMP]]);
|
||||
}
|
||||
return sma;
|
||||
}
|
||||
|
||||
function expMovingAverage(datapoints, n) {
|
||||
var ema = [datapoints[0]];
|
||||
var ema_prev = datapoints[0][POINT_VALUE];
|
||||
var ema_cur = void 0;
|
||||
var a = void 0;
|
||||
|
||||
if (n > 1) {
|
||||
// Calculate a from window size
|
||||
a = 2 / (n + 1);
|
||||
|
||||
// Initial window, use simple moving average
|
||||
var w_avg = null;
|
||||
var w_count = 0;
|
||||
for (var j = n; j > 0; j--) {
|
||||
if (datapoints[n - j][POINT_VALUE] !== null) {
|
||||
w_avg += datapoints[n - j][POINT_VALUE];
|
||||
w_count++;
|
||||
}
|
||||
}
|
||||
if (w_count > 0) {
|
||||
w_avg = w_avg / w_count;
|
||||
// Actually, we should set timestamp from datapoints[n-1] and start calculation of EMA from n.
|
||||
// But in order to start EMA from first point (not from Nth) we should expand time range and request N additional
|
||||
// points outside left side of range. We can't do that, so this trick is used for pretty view of first N points.
|
||||
// We calculate AVG for first N points, but then start from 2nd point, not from Nth. In general, it means we
|
||||
// assume that previous N values (0-N, 0-(N-1), ..., 0-1) have the same average value as a first N values.
|
||||
ema = [[w_avg, datapoints[0][POINT_TIMESTAMP]]];
|
||||
ema_prev = w_avg;
|
||||
n = 1;
|
||||
}
|
||||
} else {
|
||||
// Use predefined a and start from 1st point (use it as initial EMA value)
|
||||
a = n;
|
||||
n = 1;
|
||||
}
|
||||
|
||||
for (var i = n; i < datapoints.length; i++) {
|
||||
if (datapoints[i][POINT_VALUE] !== null) {
|
||||
ema_cur = a * datapoints[i][POINT_VALUE] + (1 - a) * ema_prev;
|
||||
ema_prev = ema_cur;
|
||||
ema.push([ema_cur, datapoints[i][POINT_TIMESTAMP]]);
|
||||
} else {
|
||||
ema.push([null, datapoints[i][POINT_TIMESTAMP]]);
|
||||
}
|
||||
}
|
||||
return ema;
|
||||
}
|
||||
|
||||
function COUNT(values) {
|
||||
return values.length;
|
||||
}
|
||||
@@ -367,6 +458,8 @@ var exportedFunctions = {
|
||||
scale_perf: scale_perf,
|
||||
delta: delta,
|
||||
rate: rate,
|
||||
simpleMovingAverage: simpleMovingAverage,
|
||||
expMovingAverage: expMovingAverage,
|
||||
SUM: SUM,
|
||||
COUNT: COUNT,
|
||||
AVERAGE: AVERAGE,
|
||||
|
||||
@@ -195,7 +195,6 @@ function convertGrafanaTSResponse(time_series, items, addHostName) {
|
||||
var hosts = _lodash2.default.uniqBy(_lodash2.default.flatten(_lodash2.default.map(items, 'hosts')), 'hostid'); //uniqBy is needed to deduplicate
|
||||
var grafanaSeries = _lodash2.default.map(time_series, function (series) {
|
||||
var itemid = series.name;
|
||||
var datapoints = series.points;
|
||||
var item = _lodash2.default.find(items, { 'itemid': itemid });
|
||||
var alias = item.name;
|
||||
if (_lodash2.default.keys(hosts).length > 1 && addHostName) {
|
||||
@@ -203,6 +202,9 @@ function convertGrafanaTSResponse(time_series, items, addHostName) {
|
||||
var host = _lodash2.default.find(hosts, { 'hostid': item.hostid });
|
||||
alias = host.name + ": " + alias;
|
||||
}
|
||||
// zabbixCachingProxy deduplicates requests and returns one time series for equal queries.
|
||||
// Clone is needed to prevent changing of series object shared between all targets.
|
||||
var datapoints = _lodash2.default.cloneDeep(series.points);
|
||||
return {
|
||||
target: alias,
|
||||
datapoints: datapoints
|
||||
|
||||
@@ -9,6 +9,8 @@ let sumSeries = ts.sumSeries;
|
||||
let delta = ts.delta;
|
||||
let rate = ts.rate;
|
||||
let scale = (factor, datapoints) => ts.scale_perf(datapoints, factor);
|
||||
let simpleMovingAverage = (n, datapoints) => ts.simpleMovingAverage(datapoints, n);
|
||||
let expMovingAverage = (a, datapoints) => ts.expMovingAverage(datapoints, a);
|
||||
|
||||
let SUM = ts.SUM;
|
||||
let COUNT = ts.COUNT;
|
||||
@@ -102,7 +104,10 @@ let metricFunctions = {
|
||||
scale: scale,
|
||||
delta: delta,
|
||||
rate: rate,
|
||||
movingAverage: simpleMovingAverage,
|
||||
exponentialMovingAverage: expMovingAverage,
|
||||
aggregateBy: aggregateByWrapper,
|
||||
// Predefined aggs
|
||||
average: _.partial(aggregateWrapper, AVERAGE),
|
||||
min: _.partial(aggregateWrapper, MIN),
|
||||
max: _.partial(aggregateWrapper, MAX),
|
||||
|
||||
@@ -113,7 +113,8 @@ class ZabbixAPIDatasource {
|
||||
let useTrends = this.isUseTrends(timeRange);
|
||||
|
||||
// Metrics or Text query mode
|
||||
if (target.mode === c.MODE_METRICS || target.mode === c.MODE_TEXT || target.mode === c.MODE_ITEMID) {
|
||||
if (!target.mode || target.mode === c.MODE_METRICS ||
|
||||
target.mode === c.MODE_TEXT || target.mode === c.MODE_ITEMID) {
|
||||
// Migrate old targets
|
||||
target = migrations.migrate(target);
|
||||
|
||||
@@ -216,10 +217,10 @@ class ZabbixAPIDatasource {
|
||||
let aliasFunctions = bindFunctionDefs(target.functions, 'Alias');
|
||||
|
||||
// Apply transformation functions
|
||||
timeseries_data = _.map(timeseries_data, timeseries => {
|
||||
timeseries_data = _.cloneDeep(_.map(timeseries_data, timeseries => {
|
||||
timeseries.datapoints = sequence(transformFunctions)(timeseries.datapoints);
|
||||
return timeseries;
|
||||
});
|
||||
}));
|
||||
|
||||
// Apply filter functions
|
||||
if (filterFunctions.length) {
|
||||
|
||||
@@ -58,6 +58,24 @@ addFuncDef({
|
||||
defaultParams: [],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'movingAverage',
|
||||
category: 'Transform',
|
||||
params: [
|
||||
{ name: 'factor', type: 'int', options: [6, 10, 60, 100, 600] }
|
||||
],
|
||||
defaultParams: [10],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'exponentialMovingAverage',
|
||||
category: 'Transform',
|
||||
params: [
|
||||
{ name: 'smoothing', type: 'float', options: [6, 10, 60, 100, 600] }
|
||||
],
|
||||
defaultParams: [0.2],
|
||||
});
|
||||
|
||||
// Aggregate
|
||||
|
||||
addFuncDef({
|
||||
|
||||
@@ -69,7 +69,7 @@ describe('ZabbixDatasource', () => {
|
||||
|
||||
// Check that useTrends options is true
|
||||
expect(ctx.ds.queryNumericData)
|
||||
.to.have.been.calledWith(defined, defined, true);
|
||||
.to.have.been.calledWith(defined, defined, true, sinon.match.any);
|
||||
});
|
||||
|
||||
done();
|
||||
@@ -85,7 +85,7 @@ describe('ZabbixDatasource', () => {
|
||||
|
||||
// Check that useTrends options is false
|
||||
expect(ctx.ds.queryNumericData)
|
||||
.to.have.been.calledWith(defined, defined, false);
|
||||
.to.have.been.calledWith(defined, defined, false, sinon.match.any);
|
||||
});
|
||||
done();
|
||||
});
|
||||
|
||||
@@ -226,6 +226,97 @@ function rate(datapoints) {
|
||||
return newSeries;
|
||||
}
|
||||
|
||||
function simpleMovingAverage(datapoints, n) {
|
||||
let sma = [];
|
||||
let w_sum;
|
||||
let w_avg = null;
|
||||
let w_count = 0;
|
||||
|
||||
// Initial window
|
||||
for (let j = n; j > 0; j--) {
|
||||
if (datapoints[n - j][POINT_VALUE] !== null) {
|
||||
w_avg += datapoints[n - j][POINT_VALUE];
|
||||
w_count++;
|
||||
}
|
||||
}
|
||||
if (w_count > 0) {
|
||||
w_avg = w_avg / w_count;
|
||||
} else {
|
||||
w_avg = null;
|
||||
}
|
||||
sma.push([w_avg, datapoints[n - 1][POINT_TIMESTAMP]]);
|
||||
|
||||
for (let i = n; i < datapoints.length; i++) {
|
||||
// Insert next value
|
||||
if (datapoints[i][POINT_VALUE] !== null) {
|
||||
w_sum = w_avg * w_count;
|
||||
w_avg = (w_sum + datapoints[i][POINT_VALUE]) / (w_count + 1);
|
||||
w_count++;
|
||||
}
|
||||
// Remove left side point
|
||||
if (datapoints[i - n][POINT_VALUE] !== null) {
|
||||
w_sum = w_avg * w_count;
|
||||
if (w_count > 1) {
|
||||
w_avg = (w_sum - datapoints[i - n][POINT_VALUE]) / (w_count - 1);
|
||||
w_count--;
|
||||
} else {
|
||||
w_avg = null;
|
||||
w_count = 0;
|
||||
}
|
||||
}
|
||||
sma.push([w_avg, datapoints[i][POINT_TIMESTAMP]]);
|
||||
}
|
||||
return sma;
|
||||
}
|
||||
|
||||
function expMovingAverage(datapoints, n) {
|
||||
let ema = [datapoints[0]];
|
||||
let ema_prev = datapoints[0][POINT_VALUE];
|
||||
let ema_cur;
|
||||
let a;
|
||||
|
||||
if (n > 1) {
|
||||
// Calculate a from window size
|
||||
a = 2 / (n + 1);
|
||||
|
||||
// Initial window, use simple moving average
|
||||
let w_avg = null;
|
||||
let w_count = 0;
|
||||
for (let j = n; j > 0; j--) {
|
||||
if (datapoints[n - j][POINT_VALUE] !== null) {
|
||||
w_avg += datapoints[n - j][POINT_VALUE];
|
||||
w_count++;
|
||||
}
|
||||
}
|
||||
if (w_count > 0) {
|
||||
w_avg = w_avg / w_count;
|
||||
// Actually, we should set timestamp from datapoints[n-1] and start calculation of EMA from n.
|
||||
// But in order to start EMA from first point (not from Nth) we should expand time range and request N additional
|
||||
// points outside left side of range. We can't do that, so this trick is used for pretty view of first N points.
|
||||
// We calculate AVG for first N points, but then start from 2nd point, not from Nth. In general, it means we
|
||||
// assume that previous N values (0-N, 0-(N-1), ..., 0-1) have the same average value as a first N values.
|
||||
ema = [[w_avg, datapoints[0][POINT_TIMESTAMP]]];
|
||||
ema_prev = w_avg;
|
||||
n = 1;
|
||||
}
|
||||
} else {
|
||||
// Use predefined a and start from 1st point (use it as initial EMA value)
|
||||
a = n;
|
||||
n = 1;
|
||||
}
|
||||
|
||||
for (let i = n; i < datapoints.length; i++) {
|
||||
if (datapoints[i][POINT_VALUE] !== null) {
|
||||
ema_cur = a * datapoints[i][POINT_VALUE] + (1 - a) * ema_prev;
|
||||
ema_prev = ema_cur;
|
||||
ema.push([ema_cur, datapoints[i][POINT_TIMESTAMP]]);
|
||||
} else {
|
||||
ema.push([null, datapoints[i][POINT_TIMESTAMP]]);
|
||||
}
|
||||
}
|
||||
return ema;
|
||||
}
|
||||
|
||||
function COUNT(values) {
|
||||
return values.length;
|
||||
}
|
||||
@@ -359,6 +450,8 @@ const exportedFunctions = {
|
||||
scale_perf,
|
||||
delta,
|
||||
rate,
|
||||
simpleMovingAverage,
|
||||
expMovingAverage,
|
||||
SUM,
|
||||
COUNT,
|
||||
AVERAGE,
|
||||
|
||||
@@ -171,13 +171,15 @@ function convertGrafanaTSResponse(time_series, items, addHostName) {
|
||||
var hosts = _.uniqBy(_.flatten(_.map(items, 'hosts')), 'hostid'); //uniqBy is needed to deduplicate
|
||||
let grafanaSeries = _.map(time_series, series => {
|
||||
let itemid = series.name;
|
||||
let datapoints = series.points;
|
||||
var item = _.find(items, {'itemid': itemid});
|
||||
var alias = item.name;
|
||||
if (_.keys(hosts).length > 1 && addHostName) { //only when actual multi hosts selected
|
||||
var host = _.find(hosts, {'hostid': item.hostid});
|
||||
alias = host.name + ": " + alias;
|
||||
}
|
||||
// zabbixCachingProxy deduplicates requests and returns one time series for equal queries.
|
||||
// Clone is needed to prevent changing of series object shared between all targets.
|
||||
let datapoints = _.cloneDeep(series.points);
|
||||
return {
|
||||
target: alias,
|
||||
datapoints: datapoints
|
||||
|
||||
Reference in New Issue
Block a user