fix aggregateBy func, fix #498
This commit is contained in:
4
dist/datasource-zabbix/dataProcessor.js
vendored
4
dist/datasource-zabbix/dataProcessor.js
vendored
@@ -65,8 +65,10 @@ System.register(['lodash', './utils', './timeseries'], function (_export, _conte
|
|||||||
function aggregateByWrapper(interval, aggregateFunc, datapoints) {
|
function aggregateByWrapper(interval, aggregateFunc, datapoints) {
|
||||||
// Flatten all points in frame and then just use groupBy()
|
// Flatten all points in frame and then just use groupBy()
|
||||||
var flattenedPoints = _.flatten(datapoints, true);
|
var flattenedPoints = _.flatten(datapoints, true);
|
||||||
|
// groupBy_perf works with sorted series only
|
||||||
|
var sortedPoints = ts.sortByTime(flattenedPoints);
|
||||||
var groupByCallback = aggregationFunctions[aggregateFunc];
|
var groupByCallback = aggregationFunctions[aggregateFunc];
|
||||||
return groupBy(flattenedPoints, interval, groupByCallback);
|
return groupBy(sortedPoints, interval, groupByCallback);
|
||||||
}
|
}
|
||||||
|
|
||||||
function aggregateWrapper(groupByCallback, interval, datapoints) {
|
function aggregateWrapper(groupByCallback, interval, datapoints) {
|
||||||
|
|||||||
2
dist/datasource-zabbix/dataProcessor.js.map
vendored
2
dist/datasource-zabbix/dataProcessor.js.map
vendored
File diff suppressed because one or more lines are too long
51
dist/datasource-zabbix/specs/dataProcessor.spec.js
vendored
Normal file
51
dist/datasource-zabbix/specs/dataProcessor.spec.js
vendored
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
import _ from 'lodash';
|
||||||
|
import dataProcessor from '../dataProcessor';
|
||||||
|
|
||||||
|
describe('dataProcessor', () => {
|
||||||
|
let ctx = {};
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
ctx.datapoints = [
|
||||||
|
[[10, 1500000000000], [2, 1500000001000], [7, 1500000002000], [1, 1500000003000]],
|
||||||
|
[[9, 1500000000000], [3, 1500000001000], [4, 1500000002000], [8, 1500000003000]],
|
||||||
|
];
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('When apply groupBy() functions', () => {
|
||||||
|
it('should return series average', () => {
|
||||||
|
let aggregateBy = dataProcessor.metricFunctions['groupBy'];
|
||||||
|
const avg2s = _.map(ctx.datapoints, (dp) => aggregateBy('2s', 'avg', dp));
|
||||||
|
expect(avg2s).toEqual([
|
||||||
|
[[6, 1500000000000], [4, 1500000002000]],
|
||||||
|
[[6, 1500000000000], [6, 1500000002000]],
|
||||||
|
]);
|
||||||
|
|
||||||
|
const avg10s = _.map(ctx.datapoints, (dp) => aggregateBy('10s', 'avg', dp));
|
||||||
|
expect(avg10s).toEqual([
|
||||||
|
[[5, 1500000000000]],
|
||||||
|
[[6, 1500000000000]],
|
||||||
|
]);
|
||||||
|
|
||||||
|
// not aligned
|
||||||
|
const dp = [[10, 1500000001000], [2, 1500000002000], [7, 1500000003000], [1, 1500000004000]];
|
||||||
|
expect(aggregateBy('2s', 'avg', dp)).toEqual([
|
||||||
|
[10, 1500000000000], [4.5, 1500000002000], [1, 1500000004000]
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('When apply aggregateBy() functions', () => {
|
||||||
|
it('should return series average', () => {
|
||||||
|
let aggregateBy = dataProcessor.metricFunctions['aggregateBy'];
|
||||||
|
const avg1s = aggregateBy('1s', 'avg', ctx.datapoints);
|
||||||
|
expect(avg1s).toEqual([
|
||||||
|
[9.5, 1500000000000], [2.5, 1500000001000], [5.5, 1500000002000], [4.5, 1500000003000]
|
||||||
|
]);
|
||||||
|
|
||||||
|
const avg10s = aggregateBy('10s', 'avg', ctx.datapoints);
|
||||||
|
expect(avg10s).toEqual([
|
||||||
|
[5.5, 1500000000000]
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
3
dist/datasource-zabbix/timeseries.js
vendored
3
dist/datasource-zabbix/timeseries.js
vendored
@@ -480,7 +480,8 @@ System.register(['lodash', './utils'], function (_export, _context) {
|
|||||||
MIN: MIN,
|
MIN: MIN,
|
||||||
MAX: MAX,
|
MAX: MAX,
|
||||||
MEDIAN: MEDIAN,
|
MEDIAN: MEDIAN,
|
||||||
PERCENTIL: PERCENTIL
|
PERCENTIL: PERCENTIL,
|
||||||
|
sortByTime: sortByTime
|
||||||
};
|
};
|
||||||
|
|
||||||
_export('default', exportedFunctions);
|
_export('default', exportedFunctions);
|
||||||
|
|||||||
2
dist/datasource-zabbix/timeseries.js.map
vendored
2
dist/datasource-zabbix/timeseries.js.map
vendored
File diff suppressed because one or more lines are too long
@@ -79,9 +79,11 @@ function groupByWrapper(interval, groupFunc, datapoints) {
|
|||||||
|
|
||||||
function aggregateByWrapper(interval, aggregateFunc, datapoints) {
|
function aggregateByWrapper(interval, aggregateFunc, datapoints) {
|
||||||
// Flatten all points in frame and then just use groupBy()
|
// Flatten all points in frame and then just use groupBy()
|
||||||
var flattenedPoints = _.flatten(datapoints, true);
|
const flattenedPoints = _.flatten(datapoints, true);
|
||||||
var groupByCallback = aggregationFunctions[aggregateFunc];
|
// groupBy_perf works with sorted series only
|
||||||
return groupBy(flattenedPoints, interval, groupByCallback);
|
const sortedPoints = ts.sortByTime(flattenedPoints);
|
||||||
|
let groupByCallback = aggregationFunctions[aggregateFunc];
|
||||||
|
return groupBy(sortedPoints, interval, groupByCallback);
|
||||||
}
|
}
|
||||||
|
|
||||||
function aggregateWrapper(groupByCallback, interval, datapoints) {
|
function aggregateWrapper(groupByCallback, interval, datapoints) {
|
||||||
|
|||||||
51
src/datasource-zabbix/specs/dataProcessor.spec.js
Normal file
51
src/datasource-zabbix/specs/dataProcessor.spec.js
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
import _ from 'lodash';
|
||||||
|
import dataProcessor from '../dataProcessor';
|
||||||
|
|
||||||
|
describe('dataProcessor', () => {
|
||||||
|
let ctx = {};
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
ctx.datapoints = [
|
||||||
|
[[10, 1500000000000], [2, 1500000001000], [7, 1500000002000], [1, 1500000003000]],
|
||||||
|
[[9, 1500000000000], [3, 1500000001000], [4, 1500000002000], [8, 1500000003000]],
|
||||||
|
];
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('When apply groupBy() functions', () => {
|
||||||
|
it('should return series average', () => {
|
||||||
|
let aggregateBy = dataProcessor.metricFunctions['groupBy'];
|
||||||
|
const avg2s = _.map(ctx.datapoints, (dp) => aggregateBy('2s', 'avg', dp));
|
||||||
|
expect(avg2s).toEqual([
|
||||||
|
[[6, 1500000000000], [4, 1500000002000]],
|
||||||
|
[[6, 1500000000000], [6, 1500000002000]],
|
||||||
|
]);
|
||||||
|
|
||||||
|
const avg10s = _.map(ctx.datapoints, (dp) => aggregateBy('10s', 'avg', dp));
|
||||||
|
expect(avg10s).toEqual([
|
||||||
|
[[5, 1500000000000]],
|
||||||
|
[[6, 1500000000000]],
|
||||||
|
]);
|
||||||
|
|
||||||
|
// not aligned
|
||||||
|
const dp = [[10, 1500000001000], [2, 1500000002000], [7, 1500000003000], [1, 1500000004000]];
|
||||||
|
expect(aggregateBy('2s', 'avg', dp)).toEqual([
|
||||||
|
[10, 1500000000000], [4.5, 1500000002000], [1, 1500000004000]
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('When apply aggregateBy() functions', () => {
|
||||||
|
it('should return series average', () => {
|
||||||
|
let aggregateBy = dataProcessor.metricFunctions['aggregateBy'];
|
||||||
|
const avg1s = aggregateBy('1s', 'avg', ctx.datapoints);
|
||||||
|
expect(avg1s).toEqual([
|
||||||
|
[9.5, 1500000000000], [2.5, 1500000001000], [5.5, 1500000002000], [4.5, 1500000003000]
|
||||||
|
]);
|
||||||
|
|
||||||
|
const avg10s = aggregateBy('10s', 'avg', ctx.datapoints);
|
||||||
|
expect(avg10s).toEqual([
|
||||||
|
[5.5, 1500000000000]
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -488,7 +488,8 @@ const exportedFunctions = {
|
|||||||
MIN,
|
MIN,
|
||||||
MAX,
|
MAX,
|
||||||
MEDIAN,
|
MEDIAN,
|
||||||
PERCENTIL
|
PERCENTIL,
|
||||||
|
sortByTime
|
||||||
};
|
};
|
||||||
|
|
||||||
export default exportedFunctions;
|
export default exportedFunctions;
|
||||||
|
|||||||
Reference in New Issue
Block a user