Build plugin with grafana toolkit (#1539)
* Use grafana toolkit template for building plugin * Fix linter and type errors * Update styles building * Fix sass deprecation warning * Remove empty js files produced by webpack building sass * Fix signing script * Replace classnames with cx * Fix data source config page * Use custom webpack config instead of overriding original one * Use gpx_ prefix for plugin executable * Remove unused configs * Roll back react hooks dependencies usage * Move plugin-specific ts config to root config file * Temporary do not use rst2html for function description tooltip * Remove unused code * remove unused dependencies * update react table dependency * Migrate tests to typescript * remove unused dependencies * Remove old webpack configs * Add sign target to makefile * Add magefile * Update CI test job * Update go packages * Update build instructions * Downgrade go version to 1.18 * Fix go version in ci * Fix metric picker * Add comment to webpack config * remove angular mocks * update bra config * Rename datasource-zabbix to datasource (fix mage build) * Add instructions for building backend with mage * Fix webpack targets * Fix ci backend tests * Add initial e2e tests * Fix e2e ci tests * Update docker compose for cypress tests * build grafana docker image * Fix docker stop task * CI: add Grafana compatibility check
This commit is contained in:
140
src/datasource/zabbix/connectors/sql/sqlConnector.ts
Normal file
140
src/datasource/zabbix/connectors/sql/sqlConnector.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
import _ from 'lodash';
|
||||
import { getBackendSrv } from '@grafana/runtime';
|
||||
import { compactQuery } from '../../../utils';
|
||||
import mysql from './mysql';
|
||||
import postgres from './postgres';
|
||||
import dbConnector, {
|
||||
DBConnector,
|
||||
DEFAULT_QUERY_LIMIT,
|
||||
HISTORY_TO_TABLE_MAP,
|
||||
TREND_TO_TABLE_MAP,
|
||||
} from '../dbConnector';
|
||||
|
||||
const supportedDatabases = {
|
||||
mysql: 'mysql',
|
||||
postgres: 'postgres',
|
||||
};
|
||||
|
||||
export class SQLConnector extends DBConnector {
|
||||
private limit: number;
|
||||
private sqlDialect: any;
|
||||
|
||||
constructor(options) {
|
||||
super(options);
|
||||
|
||||
this.limit = options.limit || DEFAULT_QUERY_LIMIT;
|
||||
this.sqlDialect = null;
|
||||
|
||||
super.loadDBDataSource().then(() => {
|
||||
this.loadSQLDialect();
|
||||
});
|
||||
}
|
||||
|
||||
loadSQLDialect() {
|
||||
if (this.datasourceTypeId === supportedDatabases.postgres) {
|
||||
this.sqlDialect = postgres;
|
||||
} else {
|
||||
this.sqlDialect = mysql;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to invoke test query for one of Zabbix database tables.
|
||||
*/
|
||||
testDataSource() {
|
||||
const testQuery = this.sqlDialect.testQuery();
|
||||
return this.invokeSQLQuery(testQuery);
|
||||
}
|
||||
|
||||
getHistory(items, timeFrom, timeTill, options) {
|
||||
const { aggFunction, intervalSec } = getAggFunc(timeFrom, timeTill, options);
|
||||
|
||||
// Group items by value type and perform request for each value type
|
||||
const grouped_items = _.groupBy(items, 'value_type');
|
||||
const promises = _.map(grouped_items, (items, value_type) => {
|
||||
const itemids = _.map(items, 'itemid').join(', ');
|
||||
const table = HISTORY_TO_TABLE_MAP[value_type];
|
||||
let query = this.sqlDialect.historyQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction);
|
||||
|
||||
query = compactQuery(query);
|
||||
return this.invokeSQLQuery(query);
|
||||
});
|
||||
|
||||
return Promise.all(promises).then((results) => {
|
||||
return _.flatten(results);
|
||||
});
|
||||
}
|
||||
|
||||
getTrends(items, timeFrom, timeTill, options) {
|
||||
const { consolidateBy } = options;
|
||||
const { aggFunction, intervalSec } = getAggFunc(timeFrom, timeTill, options);
|
||||
|
||||
// Group items by value type and perform request for each value type
|
||||
const grouped_items = _.groupBy(items, 'value_type');
|
||||
const promises = _.map(grouped_items, (items, value_type) => {
|
||||
const itemids = _.map(items, 'itemid').join(', ');
|
||||
const table = TREND_TO_TABLE_MAP[value_type];
|
||||
let valueColumn = _.includes(['avg', 'min', 'max', 'sum'], consolidateBy) ? consolidateBy : 'avg';
|
||||
valueColumn = dbConnector.consolidateByTrendColumns[valueColumn];
|
||||
let query = this.sqlDialect.trendsQuery(
|
||||
itemids,
|
||||
table,
|
||||
timeFrom,
|
||||
timeTill,
|
||||
intervalSec,
|
||||
aggFunction,
|
||||
valueColumn
|
||||
);
|
||||
|
||||
query = compactQuery(query);
|
||||
return this.invokeSQLQuery(query);
|
||||
});
|
||||
|
||||
return Promise.all(promises).then((results) => {
|
||||
return _.flatten(results);
|
||||
});
|
||||
}
|
||||
|
||||
invokeSQLQuery(query) {
|
||||
const queryDef = {
|
||||
refId: 'A',
|
||||
format: 'time_series',
|
||||
datasourceId: this.datasourceId,
|
||||
rawSql: query,
|
||||
maxDataPoints: this.limit,
|
||||
};
|
||||
|
||||
return getBackendSrv()
|
||||
.datasourceRequest({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
queries: [queryDef],
|
||||
},
|
||||
})
|
||||
.then((response) => {
|
||||
const results = response.data.results;
|
||||
if (results['A']) {
|
||||
return results['A'].frames;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function getAggFunc(timeFrom, timeTill, options) {
|
||||
const { intervalMs } = options;
|
||||
let { consolidateBy } = options;
|
||||
let intervalSec = Math.ceil(intervalMs / 1000);
|
||||
|
||||
// The interval must match the time range exactly n times, otherwise
|
||||
// the resulting first and last data points will yield invalid values in the
|
||||
// calculated average value in downsampleSeries - when using consolidateBy(avg)
|
||||
const numOfIntervals = Math.ceil((timeTill - timeFrom) / intervalSec);
|
||||
intervalSec = Math.ceil((timeTill - timeFrom) / numOfIntervals);
|
||||
|
||||
consolidateBy = consolidateBy || 'avg';
|
||||
const aggFunction = dbConnector.consolidateByFunc[consolidateBy];
|
||||
return { aggFunction, intervalSec };
|
||||
}
|
||||
Reference in New Issue
Block a user