Build plugin with grafana toolkit (#1539)
* Use grafana toolkit template for building plugin * Fix linter and type errors * Update styles building * Fix sass deprecation warning * Remove empty js files produced by webpack building sass * Fix signing script * Replace classnames with cx * Fix data source config page * Use custom webpack config instead of overriding original one * Use gpx_ prefix for plugin executable * Remove unused configs * Roll back react hooks dependencies usage * Move plugin-specific ts config to root config file * Temporary do not use rst2html for function description tooltip * Remove unused code * remove unused dependencies * update react table dependency * Migrate tests to typescript * remove unused dependencies * Remove old webpack configs * Add sign target to makefile * Add magefile * Update CI test job * Update go packages * Update build instructions * Downgrade go version to 1.18 * Fix go version in ci * Fix metric picker * Add comment to webpack config * remove angular mocks * update bra config * Rename datasource-zabbix to datasource (fix mage build) * Add instructions for building backend with mage * Fix webpack targets * Fix ci backend tests * Add initial e2e tests * Fix e2e ci tests * Update docker compose for cypress tests * build grafana docker image * Fix docker stop task * CI: add Grafana compatibility check
This commit is contained in:
312
src/datasource/specs/datasource.spec.ts
Normal file
312
src/datasource/specs/datasource.spec.ts
Normal file
@@ -0,0 +1,312 @@
|
||||
import _ from 'lodash';
|
||||
import { templateSrvMock, datasourceSrvMock } from '../../test-setup/mocks';
|
||||
import { replaceTemplateVars, ZabbixDatasource, zabbixTemplateFormat } from '../datasource';
|
||||
import { dateMath } from '@grafana/data';
|
||||
|
||||
jest.mock(
|
||||
'@grafana/runtime',
|
||||
() => ({
|
||||
getBackendSrv: () => ({
|
||||
datasourceRequest: jest.fn().mockResolvedValue({ data: { result: '' } }),
|
||||
fetch: () => ({
|
||||
toPromise: () => jest.fn().mockResolvedValue({ data: { result: '' } }),
|
||||
}),
|
||||
}),
|
||||
getTemplateSrv: () => ({
|
||||
replace: jest.fn().mockImplementation((query) => query),
|
||||
}),
|
||||
}),
|
||||
{ virtual: true }
|
||||
);
|
||||
|
||||
jest.mock('../components/AnnotationQueryEditor', () => ({
|
||||
AnnotationQueryEditor: () => {},
|
||||
}));
|
||||
|
||||
describe('ZabbixDatasource', () => {
|
||||
let ctx: any = {};
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.instanceSettings = {
|
||||
jsonData: {
|
||||
alerting: false,
|
||||
username: 'zabbix',
|
||||
password: 'zabbix',
|
||||
trends: true,
|
||||
trendsFrom: '14d',
|
||||
trendsRange: '7d',
|
||||
dbConnectionEnable: false,
|
||||
},
|
||||
};
|
||||
|
||||
ctx.options = {
|
||||
targets: [
|
||||
{
|
||||
group: { filter: '' },
|
||||
host: { filter: '' },
|
||||
application: { filter: '' },
|
||||
item: { filter: '' },
|
||||
},
|
||||
],
|
||||
range: {
|
||||
from: dateMath.parse('now-1h'),
|
||||
to: dateMath.parse('now'),
|
||||
},
|
||||
};
|
||||
|
||||
ctx.datasourceSrv = datasourceSrvMock;
|
||||
|
||||
ctx.ds = new ZabbixDatasource(ctx.instanceSettings);
|
||||
ctx.ds.templateSrv = templateSrvMock;
|
||||
});
|
||||
|
||||
describe('When querying text data', () => {
|
||||
beforeEach(() => {
|
||||
ctx.ds.replaceTemplateVars = (str) => str;
|
||||
ctx.ds.zabbix.zabbixAPI.getHistory = jest.fn().mockReturnValue(
|
||||
Promise.resolve([
|
||||
{ clock: '1500010200', itemid: '10100', ns: '900111000', value: 'Linux first' },
|
||||
{ clock: '1500010300', itemid: '10100', ns: '900111000', value: 'Linux 2nd' },
|
||||
{ clock: '1500010400', itemid: '10100', ns: '900111000', value: 'Linux last' },
|
||||
])
|
||||
);
|
||||
|
||||
ctx.ds.zabbix.getItemsFromTarget = jest.fn().mockReturnValue(
|
||||
Promise.resolve([
|
||||
{
|
||||
hosts: [{ hostid: '10001', name: 'Zabbix server' }],
|
||||
itemid: '10100',
|
||||
name: 'System information',
|
||||
key_: 'system.uname',
|
||||
},
|
||||
])
|
||||
);
|
||||
|
||||
ctx.options.targets = [
|
||||
{
|
||||
group: { filter: '' },
|
||||
host: { filter: 'Zabbix server' },
|
||||
application: { filter: '' },
|
||||
item: { filter: 'System information' },
|
||||
textFilter: '',
|
||||
useCaptureGroups: true,
|
||||
queryType: 2,
|
||||
resultFormat: 'table',
|
||||
options: {
|
||||
skipEmptyValues: false,
|
||||
},
|
||||
},
|
||||
];
|
||||
});
|
||||
|
||||
it('should return data in table format', (done) => {
|
||||
ctx.ds.query(ctx.options).then((result) => {
|
||||
expect(result.data.length).toBe(1);
|
||||
|
||||
let tableData = result.data[0];
|
||||
expect(tableData.columns).toEqual([
|
||||
{ text: 'Host' },
|
||||
{ text: 'Item' },
|
||||
{ text: 'Key' },
|
||||
{ text: 'Last value' },
|
||||
]);
|
||||
expect(tableData.rows).toEqual([['Zabbix server', 'System information', 'system.uname', 'Linux last']]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should extract value if regex with capture group is used', (done) => {
|
||||
ctx.options.targets[0].textFilter = 'Linux (.*)';
|
||||
ctx.ds.query(ctx.options).then((result) => {
|
||||
let tableData = result.data[0];
|
||||
expect(tableData.rows[0][3]).toEqual('last');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should skip item when last value is empty', () => {
|
||||
ctx.ds.zabbix.getItemsFromTarget = jest.fn().mockReturnValue(
|
||||
Promise.resolve([
|
||||
{
|
||||
hosts: [{ hostid: '10001', name: 'Zabbix server' }],
|
||||
itemid: '10100',
|
||||
name: 'System information',
|
||||
key_: 'system.uname',
|
||||
},
|
||||
{
|
||||
hosts: [{ hostid: '10002', name: 'Server02' }],
|
||||
itemid: '90109',
|
||||
name: 'System information',
|
||||
key_: 'system.uname',
|
||||
},
|
||||
])
|
||||
);
|
||||
|
||||
ctx.options.targets[0].options.skipEmptyValues = true;
|
||||
ctx.ds.zabbix.getHistory = jest.fn().mockReturnValue(
|
||||
Promise.resolve([
|
||||
{ clock: '1500010200', itemid: '10100', ns: '900111000', value: 'Linux first' },
|
||||
{ clock: '1500010300', itemid: '10100', ns: '900111000', value: 'Linux 2nd' },
|
||||
{ clock: '1500010400', itemid: '10100', ns: '900111000', value: 'Linux last' },
|
||||
{ clock: '1500010200', itemid: '90109', ns: '900111000', value: 'Non empty value' },
|
||||
{ clock: '1500010500', itemid: '90109', ns: '900111000', value: '' },
|
||||
])
|
||||
);
|
||||
return ctx.ds.query(ctx.options).then((result) => {
|
||||
let tableData = result.data[0];
|
||||
expect(tableData.rows.length).toBe(1);
|
||||
expect(tableData.rows[0][3]).toEqual('Linux last');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('When replacing template variables', () => {
|
||||
function testReplacingVariable(target, varValue, expectedResult, done) {
|
||||
ctx.ds.replaceTemplateVars = _.partial(replaceTemplateVars, {
|
||||
replace: jest.fn((target) => zabbixTemplateFormat(varValue)),
|
||||
});
|
||||
|
||||
let result = ctx.ds.replaceTemplateVars(target);
|
||||
expect(result).toBe(expectedResult);
|
||||
done();
|
||||
}
|
||||
|
||||
/*
|
||||
* Alphanumerics, spaces, dots, dashes and underscores
|
||||
* are allowed in Zabbix host name.
|
||||
* 'AaBbCc0123 .-_'
|
||||
*/
|
||||
it('should return properly escaped regex', (done) => {
|
||||
let target = '$host';
|
||||
let template_var_value = 'AaBbCc0123 .-_';
|
||||
let expected_result = '/^AaBbCc0123 \\.-_$/';
|
||||
|
||||
testReplacingVariable(target, template_var_value, expected_result, done);
|
||||
});
|
||||
|
||||
/*
|
||||
* Single-value variable
|
||||
* $host = backend01
|
||||
* $host => /^backend01|backend01$/
|
||||
*/
|
||||
it('should return proper regex for single value', (done) => {
|
||||
let target = '$host';
|
||||
let template_var_value = 'backend01';
|
||||
let expected_result = '/^backend01$/';
|
||||
|
||||
testReplacingVariable(target, template_var_value, expected_result, done);
|
||||
});
|
||||
|
||||
/*
|
||||
* Multi-value variable
|
||||
* $host = [backend01, backend02]
|
||||
* $host => /^(backend01|backend01)$/
|
||||
*/
|
||||
it('should return proper regex for multi-value', (done) => {
|
||||
let target = '$host';
|
||||
let template_var_value = ['backend01', 'backend02'];
|
||||
let expected_result = '/^(backend01|backend02)$/';
|
||||
|
||||
testReplacingVariable(target, template_var_value, expected_result, done);
|
||||
});
|
||||
});
|
||||
|
||||
describe('When invoking metricFindQuery() with legacy query', () => {
|
||||
beforeEach(() => {
|
||||
ctx.ds.replaceTemplateVars = (str) => str;
|
||||
ctx.ds.zabbix = {
|
||||
getGroups: jest.fn().mockReturnValue(Promise.resolve([])),
|
||||
getHosts: jest.fn().mockReturnValue(Promise.resolve([])),
|
||||
getApps: jest.fn().mockReturnValue(Promise.resolve([])),
|
||||
getItems: jest.fn().mockReturnValue(Promise.resolve([])),
|
||||
};
|
||||
});
|
||||
|
||||
it('should return groups', (done) => {
|
||||
const tests = [
|
||||
{ query: '*', expect: '/.*/' },
|
||||
{ query: 'Backend', expect: 'Backend' },
|
||||
{ query: 'Back*', expect: 'Back*' },
|
||||
];
|
||||
|
||||
for (const test of tests) {
|
||||
ctx.ds.metricFindQuery(test.query);
|
||||
expect(ctx.ds.zabbix.getGroups).toBeCalledWith(test.expect);
|
||||
ctx.ds.zabbix.getGroups.mockClear();
|
||||
}
|
||||
done();
|
||||
});
|
||||
|
||||
it('should return empty list for empty query', (done) => {
|
||||
ctx.ds.metricFindQuery('').then((result) => {
|
||||
expect(ctx.ds.zabbix.getGroups).toBeCalledTimes(0);
|
||||
ctx.ds.zabbix.getGroups.mockClear();
|
||||
|
||||
expect(result).toEqual([]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return hosts', (done) => {
|
||||
const tests = [
|
||||
{ query: '*.*', expect: ['/.*/', '/.*/'] },
|
||||
{ query: '.', expect: ['', ''] },
|
||||
{ query: 'Backend.*', expect: ['Backend', '/.*/'] },
|
||||
{ query: 'Back*.', expect: ['Back*', ''] },
|
||||
];
|
||||
|
||||
for (const test of tests) {
|
||||
ctx.ds.metricFindQuery(test.query);
|
||||
expect(ctx.ds.zabbix.getHosts).toBeCalledWith(test.expect[0], test.expect[1]);
|
||||
ctx.ds.zabbix.getHosts.mockClear();
|
||||
}
|
||||
done();
|
||||
});
|
||||
|
||||
it('should return applications', (done) => {
|
||||
const tests = [
|
||||
{ query: '*.*.*', expect: ['/.*/', '/.*/', '/.*/'] },
|
||||
{ query: '.*.', expect: ['', '/.*/', ''] },
|
||||
{ query: 'Backend.backend01.*', expect: ['Backend', 'backend01', '/.*/'] },
|
||||
{ query: 'Back*.*.', expect: ['Back*', '/.*/', ''] },
|
||||
];
|
||||
|
||||
for (const test of tests) {
|
||||
ctx.ds.metricFindQuery(test.query);
|
||||
expect(ctx.ds.zabbix.getApps).toBeCalledWith(test.expect[0], test.expect[1], test.expect[2]);
|
||||
ctx.ds.zabbix.getApps.mockClear();
|
||||
}
|
||||
done();
|
||||
});
|
||||
|
||||
it('should return items', (done) => {
|
||||
const tests = [
|
||||
{ query: '*.*.*.*', expect: ['/.*/', '/.*/', '', null, '/.*/'] },
|
||||
{ query: '.*.*.*', expect: ['', '/.*/', '', null, '/.*/'] },
|
||||
{ query: 'Backend.backend01.*.*', expect: ['Backend', 'backend01', '', null, '/.*/'] },
|
||||
{ query: 'Back*.*.cpu.*', expect: ['Back*', '/.*/', 'cpu', null, '/.*/'] },
|
||||
];
|
||||
|
||||
for (const test of tests) {
|
||||
ctx.ds.metricFindQuery(test.query);
|
||||
expect(ctx.ds.zabbix.getItems).toBeCalledWith(
|
||||
test.expect[0],
|
||||
test.expect[1],
|
||||
test.expect[2],
|
||||
test.expect[3],
|
||||
test.expect[4]
|
||||
);
|
||||
ctx.ds.zabbix.getItems.mockClear();
|
||||
}
|
||||
done();
|
||||
});
|
||||
|
||||
it('should invoke method with proper arguments', (done) => {
|
||||
let query = '*.*';
|
||||
|
||||
ctx.ds.metricFindQuery(query);
|
||||
expect(ctx.ds.zabbix.getHosts).toBeCalledWith('/.*/', '/.*/');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
53
src/datasource/specs/dbConnector.test.ts
Normal file
53
src/datasource/specs/dbConnector.test.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { DBConnector } from '../zabbix/connectors/dbConnector';
|
||||
|
||||
const loadDatasourceMock = jest.fn().mockResolvedValue({ id: 42, name: 'foo', meta: {} });
|
||||
const getAllMock = jest.fn().mockReturnValue([{ id: 42, name: 'foo', meta: {} }]);
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
getDataSourceSrv: () => ({
|
||||
get: loadDatasourceMock,
|
||||
getList: getAllMock
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('DBConnector', () => {
|
||||
const ctx: any = {};
|
||||
|
||||
describe('When init DB connector', () => {
|
||||
beforeEach(() => {
|
||||
ctx.options = {
|
||||
datasourceId: 42,
|
||||
datasourceName: undefined
|
||||
};
|
||||
|
||||
loadDatasourceMock.mockClear();
|
||||
getAllMock.mockClear();
|
||||
});
|
||||
|
||||
it('should try to load datasource by name first', () => {
|
||||
const dbConnector = new DBConnector({ datasourceName: 'bar' });
|
||||
dbConnector.loadDBDataSource();
|
||||
expect(getAllMock).not.toHaveBeenCalled();
|
||||
expect(loadDatasourceMock).toHaveBeenCalledWith('bar');
|
||||
});
|
||||
|
||||
it('should load datasource by id if name not present', () => {
|
||||
const dbConnector = new DBConnector({ datasourceId: 42 });
|
||||
dbConnector.loadDBDataSource();
|
||||
expect(getAllMock).toHaveBeenCalled();
|
||||
expect(loadDatasourceMock).toHaveBeenCalledWith('foo');
|
||||
});
|
||||
|
||||
it('should throw error if no name and id specified', () => {
|
||||
ctx.options = {};
|
||||
const dbConnector = new DBConnector(ctx.options);
|
||||
return expect(dbConnector.loadDBDataSource()).rejects.toBe('Data Source name should be specified');
|
||||
});
|
||||
|
||||
it('should throw error if datasource with given id is not found', () => {
|
||||
ctx.options.datasourceId = 45;
|
||||
const dbConnector = new DBConnector(ctx.options);
|
||||
return expect(dbConnector.loadDBDataSource()).rejects.toBe('Data Source with ID 45 not found');
|
||||
});
|
||||
});
|
||||
});
|
||||
139
src/datasource/specs/influxdbConnector.test.ts
Normal file
139
src/datasource/specs/influxdbConnector.test.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
import { InfluxDBConnector } from '../zabbix/connectors/influxdb/influxdbConnector';
|
||||
import { compactQuery } from '../utils';
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
getDataSourceSrv: jest.fn(() => ({
|
||||
get: jest.fn().mockResolvedValue({ id: 42, name: 'InfluxDB DS', meta: {} }),
|
||||
})),
|
||||
}));
|
||||
|
||||
describe('InfluxDBConnector', () => {
|
||||
let ctx: any = {};
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.options = { datasourceName: 'InfluxDB DS', retentionPolicy: 'longterm' };
|
||||
ctx.influxDBConnector = new InfluxDBConnector(ctx.options);
|
||||
ctx.influxDBConnector.invokeInfluxDBQuery = jest.fn().mockResolvedValue([]);
|
||||
ctx.defaultQueryParams = {
|
||||
itemids: ['123', '234'],
|
||||
range: { timeFrom: 15000, timeTill: 15100 },
|
||||
intervalSec: 5,
|
||||
table: 'history',
|
||||
aggFunction: 'MAX',
|
||||
};
|
||||
});
|
||||
|
||||
describe('When building InfluxDB query', () => {
|
||||
it('should build proper query', () => {
|
||||
const { itemids, range, intervalSec, table, aggFunction } = ctx.defaultQueryParams;
|
||||
const query = ctx.influxDBConnector.buildHistoryQuery(itemids, table, range, intervalSec, aggFunction);
|
||||
const expected = compactQuery(`SELECT MAX("value")
|
||||
FROM "history"
|
||||
WHERE ("itemid" = '123' OR "itemid" = '234')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
expect(query).toBe(expected);
|
||||
});
|
||||
|
||||
it('should use MEAN instead of AVG', () => {
|
||||
const { itemids, range, intervalSec, table } = ctx.defaultQueryParams;
|
||||
const aggFunction = 'avg';
|
||||
const query = ctx.influxDBConnector.buildHistoryQuery(itemids, table, range, intervalSec, aggFunction);
|
||||
const expected = compactQuery(`SELECT MEAN("value")
|
||||
FROM "history"
|
||||
WHERE ("itemid" = '123' OR "itemid" = '234')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
expect(query).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('When invoking InfluxDB query', () => {
|
||||
it('should query proper table depending on item type', () => {
|
||||
const { timeFrom, timeTill } = ctx.defaultQueryParams.range;
|
||||
const options = { intervalMs: 5000 };
|
||||
const items = [{ itemid: '123', value_type: 3 }];
|
||||
const expectedQuery = compactQuery(`SELECT MEAN("value")
|
||||
FROM "history_uint"
|
||||
WHERE ("itemid" = '123')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
ctx.influxDBConnector.getHistory(items, timeFrom, timeTill, options);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenCalledWith(expectedQuery);
|
||||
});
|
||||
|
||||
it('should split query if different item types are used', () => {
|
||||
const { timeFrom, timeTill } = ctx.defaultQueryParams.range;
|
||||
const options = { intervalMs: 5000 };
|
||||
const items = [
|
||||
{ itemid: '123', value_type: 0 },
|
||||
{ itemid: '234', value_type: 3 },
|
||||
];
|
||||
const sharedQueryPart = `AND "time" >= 15000s AND "time" <= 15100s GROUP BY time(5s), "itemid" fill(none)`;
|
||||
const expectedQueryFirst = compactQuery(`SELECT MEAN("value")
|
||||
FROM "history"
|
||||
WHERE ("itemid" = '123') ${sharedQueryPart}
|
||||
`);
|
||||
const expectedQuerySecond = compactQuery(`SELECT MEAN("value")
|
||||
FROM "history_uint"
|
||||
WHERE ("itemid" = '234') ${sharedQueryPart}
|
||||
`);
|
||||
ctx.influxDBConnector.getHistory(items, timeFrom, timeTill, options);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenCalledTimes(2);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenNthCalledWith(1, expectedQueryFirst);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenNthCalledWith(2, expectedQuerySecond);
|
||||
});
|
||||
|
||||
it('should use the same table for trends query if no retention policy set', () => {
|
||||
ctx.influxDBConnector.retentionPolicy = '';
|
||||
const { timeFrom, timeTill } = ctx.defaultQueryParams.range;
|
||||
const options = { intervalMs: 5000 };
|
||||
const items = [{ itemid: '123', value_type: 3 }];
|
||||
const expectedQuery = compactQuery(`SELECT MEAN("value")
|
||||
FROM "history_uint"
|
||||
WHERE ("itemid" = '123')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
ctx.influxDBConnector.getTrends(items, timeFrom, timeTill, options);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenCalledWith(expectedQuery);
|
||||
});
|
||||
|
||||
it('should use retention policy name for trends query if it was set', () => {
|
||||
const { timeFrom, timeTill } = ctx.defaultQueryParams.range;
|
||||
const options = { intervalMs: 5000 };
|
||||
const items = [{ itemid: '123', value_type: 3 }];
|
||||
const expectedQuery = compactQuery(`SELECT MEAN("value_avg")
|
||||
FROM "longterm"."history_uint"
|
||||
WHERE ("itemid" = '123')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
ctx.influxDBConnector.getTrends(items, timeFrom, timeTill, options);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenCalledWith(expectedQuery);
|
||||
});
|
||||
|
||||
it('should use proper value column if retention policy set (trends used)', () => {
|
||||
const { timeFrom, timeTill } = ctx.defaultQueryParams.range;
|
||||
const options = { intervalMs: 5000, consolidateBy: 'max' };
|
||||
const items = [{ itemid: '123', value_type: 3 }];
|
||||
const expectedQuery = compactQuery(`SELECT MAX("value_max")
|
||||
FROM "longterm"."history_uint"
|
||||
WHERE ("itemid" = '123')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
ctx.influxDBConnector.getTrends(items, timeFrom, timeTill, options);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenCalledWith(expectedQuery);
|
||||
});
|
||||
});
|
||||
});
|
||||
70
src/datasource/specs/migrations.test.ts
Normal file
70
src/datasource/specs/migrations.test.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import _ from 'lodash';
|
||||
import { migrateDSConfig, DS_CONFIG_SCHEMA } from '../migrations';
|
||||
|
||||
describe('Migrations', () => {
|
||||
let ctx: any = {};
|
||||
|
||||
describe('When migrating datasource config', () => {
|
||||
beforeEach(() => {
|
||||
ctx.jsonData = {
|
||||
dbConnection: {
|
||||
enable: true,
|
||||
datasourceId: 1,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
it('should change direct DB connection setting to flat style', () => {
|
||||
migrateDSConfig(ctx.jsonData);
|
||||
expect(ctx.jsonData).toMatchObject({
|
||||
dbConnectionEnable: true,
|
||||
dbConnectionDatasourceId: 1,
|
||||
schema: DS_CONFIG_SCHEMA,
|
||||
});
|
||||
});
|
||||
|
||||
it('should not touch anything if schema is up to date', () => {
|
||||
ctx.jsonData = {
|
||||
futureOptionOne: 'foo',
|
||||
futureOptionTwo: 'bar',
|
||||
schema: DS_CONFIG_SCHEMA,
|
||||
};
|
||||
migrateDSConfig(ctx.jsonData);
|
||||
expect(ctx.jsonData).toMatchObject({
|
||||
futureOptionOne: 'foo',
|
||||
futureOptionTwo: 'bar',
|
||||
schema: DS_CONFIG_SCHEMA,
|
||||
});
|
||||
expect(ctx.jsonData.dbConnectionEnable).toBeUndefined();
|
||||
expect(ctx.jsonData.dbConnectionDatasourceId).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('When handling provisioned datasource config', () => {
|
||||
beforeEach(() => {
|
||||
ctx.jsonData = {
|
||||
username: 'zabbix',
|
||||
password: 'zabbix',
|
||||
trends: true,
|
||||
trendsFrom: '7d',
|
||||
trendsRange: '4d',
|
||||
cacheTTL: '1h',
|
||||
alerting: true,
|
||||
addThresholds: false,
|
||||
alertingMinSeverity: 3,
|
||||
disableReadOnlyUsersAck: true,
|
||||
dbConnectionEnable: true,
|
||||
dbConnectionDatasourceName: 'MySQL Zabbix',
|
||||
dbConnectionRetentionPolicy: 'one_year',
|
||||
};
|
||||
});
|
||||
|
||||
it('should not touch anything if schema is up to date', () => {
|
||||
const originalConf = _.cloneDeep(ctx.jsonData);
|
||||
migrateDSConfig(ctx.jsonData);
|
||||
expect(ctx.jsonData).toMatchObject(originalConf);
|
||||
expect(ctx.jsonData.dbConnectionEnable).toBe(true);
|
||||
expect(ctx.jsonData.dbConnectionDatasourceName).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
44
src/datasource/specs/timeseries.spec.ts
Normal file
44
src/datasource/specs/timeseries.spec.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
// import _ from 'lodash';
|
||||
import ts from '../timeseries';
|
||||
|
||||
describe('timeseries processing functions', () => {
|
||||
|
||||
describe('sumSeries()', () => {
|
||||
it('should properly sum series', (done) => {
|
||||
let series = [
|
||||
[[0, 1], [1, 2], [1, 3]],
|
||||
[[2, 1], [3, 2], [4, 3]]
|
||||
];
|
||||
|
||||
let expected = [[2, 1], [4, 2], [5, 3]];
|
||||
|
||||
let result = ts.sumSeries(series);
|
||||
expect(result).toEqual(expected);
|
||||
done();
|
||||
});
|
||||
|
||||
it('should properly sum series with nulls', (done) => {
|
||||
// issue #286
|
||||
let series = [
|
||||
[[1, 1], [1, 2], [1, 3]],
|
||||
[[3, 2], [4, 3]]
|
||||
];
|
||||
|
||||
let expected = [[1, 1], [4, 2], [5, 3]];
|
||||
|
||||
let result = ts.sumSeries(series);
|
||||
expect(result).toEqual(expected);
|
||||
done();
|
||||
});
|
||||
|
||||
it('should properly offset metric', (done) => {
|
||||
let points = [[1, 1], [-4, 2], [2, 3]];
|
||||
|
||||
let expected = [[101, 1], [96, 2], [102, 3]];
|
||||
|
||||
let result = ts.offset(points, 100);
|
||||
expect(result).toEqual(expected);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
168
src/datasource/specs/utils.spec.ts
Normal file
168
src/datasource/specs/utils.spec.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
import _ from 'lodash';
|
||||
import * as utils from '../utils';
|
||||
|
||||
describe('Utils', () => {
|
||||
|
||||
describe('expandItemName()', () => {
|
||||
|
||||
it('should properly expand unquoted params', (done) => {
|
||||
let test_cases = [
|
||||
{
|
||||
name: `CPU $2 time`,
|
||||
key: `system.cpu.util[,user,avg1]`,
|
||||
expected: "CPU user time"
|
||||
},
|
||||
{
|
||||
name: `CPU $2 time - $3`,
|
||||
key: `system.cpu.util[,system,avg1]`,
|
||||
expected: "CPU system time - avg1"
|
||||
},
|
||||
{
|
||||
name: `CPU - $1 - $2 - $3`,
|
||||
key: `system.cpu.util[,system,avg1]`,
|
||||
expected: "CPU - - system - avg1"
|
||||
}
|
||||
];
|
||||
|
||||
_.each(test_cases, test_case => {
|
||||
let expandedName = utils.expandItemName(test_case.name, test_case.key);
|
||||
expect(expandedName).toBe(test_case.expected);
|
||||
});
|
||||
done();
|
||||
});
|
||||
|
||||
it('should properly expand quoted params with commas', (done) => {
|
||||
let test_cases = [
|
||||
{
|
||||
name: `CPU $2 time`,
|
||||
key: `system.cpu.util["type=user,value=avg",user]`,
|
||||
expected: "CPU user time"
|
||||
},
|
||||
{
|
||||
name: `CPU $1 time`,
|
||||
key: `system.cpu.util["type=user,value=avg","user"]`,
|
||||
expected: "CPU type=user,value=avg time"
|
||||
},
|
||||
{
|
||||
name: `CPU $1 time $3`,
|
||||
key: `system.cpu.util["type=user,value=avg",,"user"]`,
|
||||
expected: "CPU type=user,value=avg time user"
|
||||
},
|
||||
{
|
||||
name: `CPU $1 $2 $3`,
|
||||
key: `system.cpu.util["type=user,value=avg",time,"user"]`,
|
||||
expected: "CPU type=user,value=avg time user"
|
||||
}
|
||||
];
|
||||
|
||||
_.each(test_cases, test_case => {
|
||||
let expandedName = utils.expandItemName(test_case.name, test_case.key);
|
||||
expect(expandedName).toBe(test_case.expected);
|
||||
});
|
||||
done();
|
||||
});
|
||||
|
||||
it('should properly expand array params', (done) => {
|
||||
let test_cases = [
|
||||
{
|
||||
name: `CPU $2 - $3 time`,
|
||||
key: `system.cpu.util[,[user,system],avg1]`,
|
||||
expected: "CPU user,system - avg1 time"
|
||||
},
|
||||
{
|
||||
name: `CPU $2 - $3 time`,
|
||||
key: `system.cpu.util[,["user,system",iowait],avg1]`,
|
||||
expected: `CPU "user,system",iowait - avg1 time`
|
||||
},
|
||||
{
|
||||
name: `CPU - $2 - $3 - $4`,
|
||||
key: `system.cpu.util[,[],["user,system",iowait],avg1]`,
|
||||
expected: `CPU - - "user,system",iowait - avg1`
|
||||
}
|
||||
];
|
||||
|
||||
_.each(test_cases, test_case => {
|
||||
let expandedName = utils.expandItemName(test_case.name, test_case.key);
|
||||
expect(expandedName).toBe(test_case.expected);
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe('splitTemplateQuery()', () => {
|
||||
|
||||
// Backward compatibility
|
||||
it('should properly split query in old format', (done) => {
|
||||
let test_cases = [
|
||||
{
|
||||
query: `/alu/./tw-(nyc|que|brx|dwt|brk)-sta_(\w|\d)*-alu-[0-9{2}/`,
|
||||
expected: ['/alu/', '/tw-(nyc|que|brx|dwt|brk)-sta_(\w|\d)*-alu-[0-9{2}/']
|
||||
},
|
||||
{
|
||||
query: `a.b.c.d`,
|
||||
expected: ['a', 'b', 'c', 'd']
|
||||
}
|
||||
];
|
||||
|
||||
_.each(test_cases, test_case => {
|
||||
let splitQuery = utils.splitTemplateQuery(test_case.query);
|
||||
expect(splitQuery).toEqual(test_case.expected);
|
||||
});
|
||||
done();
|
||||
});
|
||||
|
||||
it('should properly split query', (done) => {
|
||||
let test_cases = [
|
||||
{
|
||||
query: `{alu}{/tw-(nyc|que|brx|dwt|brk)-sta_(\w|\d)*-alu-[0-9]*/}`,
|
||||
expected: ['alu', '/tw-(nyc|que|brx|dwt|brk)-sta_(\w|\d)*-alu-[0-9]*/']
|
||||
},
|
||||
{
|
||||
query: `{alu}{/tw-(nyc|que|brx|dwt|brk)-sta_(\w|\d)*-alu-[0-9]{2}/}`,
|
||||
expected: ['alu', '/tw-(nyc|que|brx|dwt|brk)-sta_(\w|\d)*-alu-[0-9]{2}/']
|
||||
},
|
||||
{
|
||||
query: `{a}{b}{c}{d}`,
|
||||
expected: ['a', 'b', 'c', 'd']
|
||||
},
|
||||
{
|
||||
query: `{a}{b.c.d}`,
|
||||
expected: ['a', 'b.c.d']
|
||||
}
|
||||
];
|
||||
|
||||
_.each(test_cases, test_case => {
|
||||
let splitQuery = utils.splitTemplateQuery(test_case.query);
|
||||
expect(splitQuery).toEqual(test_case.expected);
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getArrayDepth()', () => {
|
||||
it('should calculate proper array depth', () => {
|
||||
const test_cases = [
|
||||
{
|
||||
array: [],
|
||||
depth: 1
|
||||
},
|
||||
{
|
||||
array: [1, 2, 3],
|
||||
depth: 1
|
||||
},
|
||||
{
|
||||
array: [[1, 2], [3, 4]],
|
||||
depth: 2
|
||||
},
|
||||
{
|
||||
array: [[[1, 2], [3, 4]], [[1, 2], [3, 4]]],
|
||||
depth: 3
|
||||
},
|
||||
];
|
||||
|
||||
for (const test_case of test_cases) {
|
||||
expect(utils.getArrayDepth(test_case.array)).toBe(test_case.depth);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user