Build plugin with grafana toolkit (#1539)
* Use grafana toolkit template for building plugin * Fix linter and type errors * Update styles building * Fix sass deprecation warning * Remove empty js files produced by webpack building sass * Fix signing script * Replace classnames with cx * Fix data source config page * Use custom webpack config instead of overriding original one * Use gpx_ prefix for plugin executable * Remove unused configs * Roll back react hooks dependencies usage * Move plugin-specific ts config to root config file * Temporary do not use rst2html for function description tooltip * Remove unused code * remove unused dependencies * update react table dependency * Migrate tests to typescript * remove unused dependencies * Remove old webpack configs * Add sign target to makefile * Add magefile * Update CI test job * Update go packages * Update build instructions * Downgrade go version to 1.18 * Fix go version in ci * Fix metric picker * Add comment to webpack config * remove angular mocks * update bra config * Rename datasource-zabbix to datasource (fix mage build) * Add instructions for building backend with mage * Fix webpack targets * Fix ci backend tests * Add initial e2e tests * Fix e2e ci tests * Update docker compose for cypress tests * build grafana docker image * Fix docker stop task * CI: add Grafana compatibility check
This commit is contained in:
88
src/datasource/zabbix/connectors/dbConnector.ts
Normal file
88
src/datasource/zabbix/connectors/dbConnector.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import _ from 'lodash';
|
||||
import { getDataSourceSrv } from '@grafana/runtime';
|
||||
|
||||
export const DEFAULT_QUERY_LIMIT = 10000;
|
||||
|
||||
export const HISTORY_TO_TABLE_MAP = {
|
||||
'0': 'history',
|
||||
'1': 'history_str',
|
||||
'2': 'history_log',
|
||||
'3': 'history_uint',
|
||||
'4': 'history_text',
|
||||
};
|
||||
|
||||
export const TREND_TO_TABLE_MAP = {
|
||||
'0': 'trends',
|
||||
'3': 'trends_uint',
|
||||
};
|
||||
|
||||
export const consolidateByFunc = {
|
||||
avg: 'AVG',
|
||||
min: 'MIN',
|
||||
max: 'MAX',
|
||||
sum: 'SUM',
|
||||
count: 'COUNT',
|
||||
};
|
||||
|
||||
export const consolidateByTrendColumns = {
|
||||
avg: 'value_avg',
|
||||
min: 'value_min',
|
||||
max: 'value_max',
|
||||
sum: 'num*value_avg', // sum of sums inside the one-hour trend period
|
||||
};
|
||||
|
||||
/**
|
||||
* Base class for external history database connectors. Subclasses should implement `getHistory()`, `getTrends()` and
|
||||
* `testDataSource()` methods, which describe how to fetch data from source other than Zabbix API.
|
||||
*/
|
||||
export class DBConnector {
|
||||
protected datasourceId: any;
|
||||
private datasourceName: any;
|
||||
protected datasourceTypeId: any;
|
||||
// private datasourceTypeName: any;
|
||||
|
||||
constructor(options) {
|
||||
this.datasourceId = options.datasourceId;
|
||||
this.datasourceName = options.datasourceName;
|
||||
this.datasourceTypeId = null;
|
||||
// this.datasourceTypeName = null;
|
||||
}
|
||||
|
||||
static loadDatasource(dsId, dsName) {
|
||||
if (!dsName && dsId !== undefined) {
|
||||
const ds = _.find(getDataSourceSrv().getList(), { id: dsId });
|
||||
if (!ds) {
|
||||
return Promise.reject(`Data Source with ID ${dsId} not found`);
|
||||
}
|
||||
dsName = ds.name;
|
||||
}
|
||||
if (dsName) {
|
||||
return getDataSourceSrv().get(dsName);
|
||||
} else {
|
||||
return Promise.reject(`Data Source name should be specified`);
|
||||
}
|
||||
}
|
||||
|
||||
loadDBDataSource() {
|
||||
return DBConnector.loadDatasource(this.datasourceId, this.datasourceName).then((ds) => {
|
||||
this.datasourceTypeId = ds.meta.id;
|
||||
// this.datasourceTypeName = ds.meta.name;
|
||||
if (!this.datasourceName) {
|
||||
this.datasourceName = ds.name;
|
||||
}
|
||||
if (!this.datasourceId) {
|
||||
this.datasourceId = ds.id;
|
||||
}
|
||||
return ds;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
DBConnector,
|
||||
DEFAULT_QUERY_LIMIT,
|
||||
HISTORY_TO_TABLE_MAP,
|
||||
TREND_TO_TABLE_MAP,
|
||||
consolidateByFunc,
|
||||
consolidateByTrendColumns,
|
||||
};
|
||||
165
src/datasource/zabbix/connectors/influxdb/influxdbConnector.ts
Normal file
165
src/datasource/zabbix/connectors/influxdb/influxdbConnector.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
dataFrameToJSON,
|
||||
Field,
|
||||
FieldType,
|
||||
MutableDataFrame,
|
||||
TIME_SERIES_TIME_FIELD_NAME,
|
||||
} from '@grafana/data';
|
||||
import _ from 'lodash';
|
||||
import { compactQuery } from '../../../utils';
|
||||
import { consolidateByTrendColumns, DBConnector, HISTORY_TO_TABLE_MAP } from '../dbConnector';
|
||||
|
||||
const consolidateByFunc = {
|
||||
avg: 'MEAN',
|
||||
min: 'MIN',
|
||||
max: 'MAX',
|
||||
sum: 'SUM',
|
||||
count: 'COUNT',
|
||||
};
|
||||
|
||||
export class InfluxDBConnector extends DBConnector {
|
||||
private retentionPolicy: any;
|
||||
private influxDS: any;
|
||||
|
||||
constructor(options) {
|
||||
super(options);
|
||||
this.retentionPolicy = options.retentionPolicy;
|
||||
super.loadDBDataSource().then((ds) => {
|
||||
this.influxDS = ds;
|
||||
return ds;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to invoke test query for one of Zabbix database tables.
|
||||
*/
|
||||
testDataSource() {
|
||||
return this.influxDS.testDatasource().then((result) => {
|
||||
if (result.status && result.status === 'error') {
|
||||
return Promise.reject({
|
||||
data: {
|
||||
message: `InfluxDB connection error: ${result.message}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
getHistory(items, timeFrom, timeTill, options) {
|
||||
const { intervalMs, retentionPolicy } = options;
|
||||
let { consolidateBy } = options;
|
||||
const intervalSec = Math.ceil(intervalMs / 1000);
|
||||
|
||||
const range = { timeFrom, timeTill };
|
||||
consolidateBy = consolidateBy || 'avg';
|
||||
|
||||
// Group items by value type and perform request for each value type
|
||||
const grouped_items = _.groupBy(items, 'value_type');
|
||||
const promises = _.map(grouped_items, (items, value_type) => {
|
||||
const itemids = _.map(items, 'itemid');
|
||||
const table = HISTORY_TO_TABLE_MAP[value_type];
|
||||
const query = this.buildHistoryQuery(itemids, table, range, intervalSec, consolidateBy, retentionPolicy);
|
||||
return this.invokeInfluxDBQuery(query);
|
||||
});
|
||||
|
||||
return Promise.all(promises)
|
||||
.then(_.flatten)
|
||||
.then((results) => {
|
||||
return handleInfluxHistoryResponse(results);
|
||||
});
|
||||
}
|
||||
|
||||
getTrends(items, timeFrom, timeTill, options) {
|
||||
options.retentionPolicy = this.retentionPolicy;
|
||||
return this.getHistory(items, timeFrom, timeTill, options);
|
||||
}
|
||||
|
||||
buildHistoryQuery(itemids, table, range, intervalSec, aggFunction, retentionPolicy) {
|
||||
const { timeFrom, timeTill } = range;
|
||||
const measurement = retentionPolicy ? `"${retentionPolicy}"."${table}"` : `"${table}"`;
|
||||
let value = 'value';
|
||||
if (retentionPolicy) {
|
||||
value = consolidateByTrendColumns[aggFunction] || 'value_avg';
|
||||
}
|
||||
const aggregation = consolidateByFunc[aggFunction] || aggFunction;
|
||||
const where_clause = this.buildWhereClause(itemids);
|
||||
const query = `SELECT ${aggregation}("${value}")
|
||||
FROM ${measurement}
|
||||
WHERE ${where_clause}
|
||||
AND "time" >= ${timeFrom}s
|
||||
AND "time" <= ${timeTill}s
|
||||
GROUP BY time(${intervalSec}s), "itemid" fill(none)`;
|
||||
return compactQuery(query);
|
||||
}
|
||||
|
||||
buildWhereClause(itemids) {
|
||||
const itemidsWhere = itemids.map((itemid) => `"itemid" = '${itemid}'`).join(' OR ');
|
||||
return `(${itemidsWhere})`;
|
||||
}
|
||||
|
||||
async invokeInfluxDBQuery(query) {
|
||||
const data = await this.influxDS._seriesQuery(query).toPromise();
|
||||
return data?.results || [];
|
||||
}
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function handleInfluxHistoryResponse(results) {
|
||||
if (!results) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const frames: DataFrame[] = [];
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
const result = results[i];
|
||||
|
||||
if (result.error) {
|
||||
const error = `InfluxDB error: ${result.error}`;
|
||||
return Promise.reject(new Error(error));
|
||||
}
|
||||
|
||||
if (!result || !result.series) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const influxSeriesList = results[i].series;
|
||||
|
||||
for (let y = 0; y < influxSeriesList.length; y++) {
|
||||
const influxSeries = influxSeriesList[y];
|
||||
const tsBuffer = [];
|
||||
const valuesBuffer = [];
|
||||
if (influxSeries.values) {
|
||||
for (i = 0; i < influxSeries.values.length; i++) {
|
||||
tsBuffer.push(influxSeries.values[i][0]);
|
||||
valuesBuffer.push(influxSeries.values[i][1]);
|
||||
}
|
||||
}
|
||||
const timeFiled: Field<number> = {
|
||||
name: TIME_SERIES_TIME_FIELD_NAME,
|
||||
type: FieldType.time,
|
||||
config: {},
|
||||
values: new ArrayVector(tsBuffer),
|
||||
};
|
||||
|
||||
const valueFiled: Field<number | null> = {
|
||||
name: influxSeries?.tags?.itemid,
|
||||
type: FieldType.number,
|
||||
config: {},
|
||||
values: new ArrayVector(valuesBuffer),
|
||||
};
|
||||
|
||||
frames.push(
|
||||
new MutableDataFrame({
|
||||
name: influxSeries?.tags?.itemid,
|
||||
fields: [timeFiled, valueFiled],
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return frames.map((f) => dataFrameToJSON(f));
|
||||
}
|
||||
46
src/datasource/zabbix/connectors/sql/mysql.ts
Normal file
46
src/datasource/zabbix/connectors/sql/mysql.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
/**
|
||||
* MySQL queries
|
||||
*/
|
||||
|
||||
function historyQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction) {
|
||||
const time_expression = `clock DIV ${intervalSec} * ${intervalSec}`;
|
||||
return `
|
||||
SELECT CAST(itemid AS CHAR) AS metric, ${time_expression} AS time_sec, ${aggFunction}(value) AS value
|
||||
FROM ${table}
|
||||
WHERE itemid IN (${itemids})
|
||||
AND clock
|
||||
> ${timeFrom}
|
||||
AND clock
|
||||
< ${timeTill}
|
||||
GROUP BY ${time_expression}, metric
|
||||
ORDER BY time_sec ASC
|
||||
`;
|
||||
}
|
||||
|
||||
function trendsQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction, valueColumn) {
|
||||
const time_expression = `clock DIV ${intervalSec} * ${intervalSec}`;
|
||||
return `
|
||||
SELECT CAST(itemid AS CHAR) AS metric, ${time_expression} AS time_sec, ${aggFunction}(${valueColumn}) AS value
|
||||
FROM ${table}
|
||||
WHERE itemid IN (${itemids})
|
||||
AND clock
|
||||
> ${timeFrom}
|
||||
AND clock
|
||||
< ${timeTill}
|
||||
GROUP BY ${time_expression}, metric
|
||||
ORDER BY time_sec ASC
|
||||
`;
|
||||
}
|
||||
|
||||
function testQuery() {
|
||||
return `SELECT CAST(itemid AS CHAR) AS metric, clock AS time_sec, value_avg AS value
|
||||
FROM trends_uint LIMIT 1`;
|
||||
}
|
||||
|
||||
const mysql = {
|
||||
historyQuery,
|
||||
trendsQuery,
|
||||
testQuery,
|
||||
};
|
||||
|
||||
export default mysql;
|
||||
52
src/datasource/zabbix/connectors/sql/postgres.ts
Normal file
52
src/datasource/zabbix/connectors/sql/postgres.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
/**
|
||||
* Postgres queries
|
||||
*/
|
||||
|
||||
const ITEMID_FORMAT = 'FM99999999999999999999';
|
||||
|
||||
function historyQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction) {
|
||||
const time_expression = `clock / ${intervalSec} * ${intervalSec}`;
|
||||
return `
|
||||
SELECT to_char(itemid, '${ITEMID_FORMAT}') AS metric, ${time_expression} AS time, ${aggFunction}(value) AS value
|
||||
FROM ${table}
|
||||
WHERE itemid IN (${itemids})
|
||||
AND clock
|
||||
> ${timeFrom}
|
||||
AND clock
|
||||
< ${timeTill}
|
||||
GROUP BY 1, 2
|
||||
ORDER BY time ASC
|
||||
`;
|
||||
}
|
||||
|
||||
function trendsQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction, valueColumn) {
|
||||
const time_expression = `clock / ${intervalSec} * ${intervalSec}`;
|
||||
return `
|
||||
SELECT to_char(itemid, '${ITEMID_FORMAT}') AS metric, ${time_expression} AS time, ${aggFunction}(${valueColumn}) AS value
|
||||
FROM ${table}
|
||||
WHERE itemid IN (${itemids})
|
||||
AND clock
|
||||
> ${timeFrom}
|
||||
AND clock
|
||||
< ${timeTill}
|
||||
GROUP BY 1, 2
|
||||
ORDER BY time ASC
|
||||
`;
|
||||
}
|
||||
|
||||
const TEST_QUERY = `
|
||||
SELECT to_char(itemid, '${ITEMID_FORMAT}') AS metric, clock AS time, value_avg AS value
|
||||
FROM trends_uint LIMIT 1
|
||||
`;
|
||||
|
||||
function testQuery() {
|
||||
return TEST_QUERY;
|
||||
}
|
||||
|
||||
const postgres = {
|
||||
historyQuery,
|
||||
trendsQuery,
|
||||
testQuery,
|
||||
};
|
||||
|
||||
export default postgres;
|
||||
140
src/datasource/zabbix/connectors/sql/sqlConnector.ts
Normal file
140
src/datasource/zabbix/connectors/sql/sqlConnector.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
import _ from 'lodash';
|
||||
import { getBackendSrv } from '@grafana/runtime';
|
||||
import { compactQuery } from '../../../utils';
|
||||
import mysql from './mysql';
|
||||
import postgres from './postgres';
|
||||
import dbConnector, {
|
||||
DBConnector,
|
||||
DEFAULT_QUERY_LIMIT,
|
||||
HISTORY_TO_TABLE_MAP,
|
||||
TREND_TO_TABLE_MAP,
|
||||
} from '../dbConnector';
|
||||
|
||||
const supportedDatabases = {
|
||||
mysql: 'mysql',
|
||||
postgres: 'postgres',
|
||||
};
|
||||
|
||||
export class SQLConnector extends DBConnector {
|
||||
private limit: number;
|
||||
private sqlDialect: any;
|
||||
|
||||
constructor(options) {
|
||||
super(options);
|
||||
|
||||
this.limit = options.limit || DEFAULT_QUERY_LIMIT;
|
||||
this.sqlDialect = null;
|
||||
|
||||
super.loadDBDataSource().then(() => {
|
||||
this.loadSQLDialect();
|
||||
});
|
||||
}
|
||||
|
||||
loadSQLDialect() {
|
||||
if (this.datasourceTypeId === supportedDatabases.postgres) {
|
||||
this.sqlDialect = postgres;
|
||||
} else {
|
||||
this.sqlDialect = mysql;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to invoke test query for one of Zabbix database tables.
|
||||
*/
|
||||
testDataSource() {
|
||||
const testQuery = this.sqlDialect.testQuery();
|
||||
return this.invokeSQLQuery(testQuery);
|
||||
}
|
||||
|
||||
getHistory(items, timeFrom, timeTill, options) {
|
||||
const { aggFunction, intervalSec } = getAggFunc(timeFrom, timeTill, options);
|
||||
|
||||
// Group items by value type and perform request for each value type
|
||||
const grouped_items = _.groupBy(items, 'value_type');
|
||||
const promises = _.map(grouped_items, (items, value_type) => {
|
||||
const itemids = _.map(items, 'itemid').join(', ');
|
||||
const table = HISTORY_TO_TABLE_MAP[value_type];
|
||||
let query = this.sqlDialect.historyQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction);
|
||||
|
||||
query = compactQuery(query);
|
||||
return this.invokeSQLQuery(query);
|
||||
});
|
||||
|
||||
return Promise.all(promises).then((results) => {
|
||||
return _.flatten(results);
|
||||
});
|
||||
}
|
||||
|
||||
getTrends(items, timeFrom, timeTill, options) {
|
||||
const { consolidateBy } = options;
|
||||
const { aggFunction, intervalSec } = getAggFunc(timeFrom, timeTill, options);
|
||||
|
||||
// Group items by value type and perform request for each value type
|
||||
const grouped_items = _.groupBy(items, 'value_type');
|
||||
const promises = _.map(grouped_items, (items, value_type) => {
|
||||
const itemids = _.map(items, 'itemid').join(', ');
|
||||
const table = TREND_TO_TABLE_MAP[value_type];
|
||||
let valueColumn = _.includes(['avg', 'min', 'max', 'sum'], consolidateBy) ? consolidateBy : 'avg';
|
||||
valueColumn = dbConnector.consolidateByTrendColumns[valueColumn];
|
||||
let query = this.sqlDialect.trendsQuery(
|
||||
itemids,
|
||||
table,
|
||||
timeFrom,
|
||||
timeTill,
|
||||
intervalSec,
|
||||
aggFunction,
|
||||
valueColumn
|
||||
);
|
||||
|
||||
query = compactQuery(query);
|
||||
return this.invokeSQLQuery(query);
|
||||
});
|
||||
|
||||
return Promise.all(promises).then((results) => {
|
||||
return _.flatten(results);
|
||||
});
|
||||
}
|
||||
|
||||
invokeSQLQuery(query) {
|
||||
const queryDef = {
|
||||
refId: 'A',
|
||||
format: 'time_series',
|
||||
datasourceId: this.datasourceId,
|
||||
rawSql: query,
|
||||
maxDataPoints: this.limit,
|
||||
};
|
||||
|
||||
return getBackendSrv()
|
||||
.datasourceRequest({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
queries: [queryDef],
|
||||
},
|
||||
})
|
||||
.then((response) => {
|
||||
const results = response.data.results;
|
||||
if (results['A']) {
|
||||
return results['A'].frames;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function getAggFunc(timeFrom, timeTill, options) {
|
||||
const { intervalMs } = options;
|
||||
let { consolidateBy } = options;
|
||||
let intervalSec = Math.ceil(intervalMs / 1000);
|
||||
|
||||
// The interval must match the time range exactly n times, otherwise
|
||||
// the resulting first and last data points will yield invalid values in the
|
||||
// calculated average value in downsampleSeries - when using consolidateBy(avg)
|
||||
const numOfIntervals = Math.ceil((timeTill - timeFrom) / intervalSec);
|
||||
intervalSec = Math.ceil((timeTill - timeFrom) / numOfIntervals);
|
||||
|
||||
consolidateBy = consolidateBy || 'avg';
|
||||
const aggFunction = dbConnector.consolidateByFunc[consolidateBy];
|
||||
return { aggFunction, intervalSec };
|
||||
}
|
||||
52
src/datasource/zabbix/connectors/zabbix_api/types.ts
Normal file
52
src/datasource/zabbix/connectors/zabbix_api/types.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
export interface JSONRPCRequest {
|
||||
jsonrpc: '2.0' | string;
|
||||
method: string;
|
||||
id: number;
|
||||
auth?: string | null;
|
||||
params?: JSONRPCRequestParams;
|
||||
}
|
||||
|
||||
export interface JSONRPCResponse<T> {
|
||||
jsonrpc: '2.0' | string;
|
||||
id: number;
|
||||
result?: T;
|
||||
error?: JSONRPCError;
|
||||
}
|
||||
|
||||
export interface JSONRPCError {
|
||||
code?: number;
|
||||
message?: string;
|
||||
data?: string;
|
||||
}
|
||||
|
||||
export type JSONRPCRequestParams = {[key: string]: any};
|
||||
|
||||
export type HTTPMethod = 'GET' | 'POST' | 'PUT' | 'PATCH' | 'DELETE' | 'HEAD' | 'CONNECT' | 'OPTIONS' | 'TRACE';
|
||||
|
||||
export type GFRequestOptions = {[key: string]: any};
|
||||
|
||||
export interface ZabbixRequestResponse {
|
||||
data?: JSONRPCResponse<any>;
|
||||
}
|
||||
|
||||
export type ZabbixAPIResponse<T> = Promise<T>;
|
||||
|
||||
export type APILoginResponse = string;
|
||||
|
||||
export interface ZBXScript {
|
||||
scriptid: string;
|
||||
name?: string;
|
||||
command?: string;
|
||||
host_access?: string;
|
||||
usrgrpid?: string;
|
||||
groupid?: string;
|
||||
description?: string;
|
||||
confirmation?: string;
|
||||
type?: string;
|
||||
execute_on?: string;
|
||||
}
|
||||
|
||||
export interface APIExecuteScriptResponse {
|
||||
response: 'success' | 'failed';
|
||||
value?: string;
|
||||
}
|
||||
@@ -0,0 +1,773 @@
|
||||
import _ from 'lodash';
|
||||
import semver from 'semver';
|
||||
import kbn from 'grafana/app/core/utils/kbn';
|
||||
import * as utils from '../../../utils';
|
||||
import { MIN_SLA_INTERVAL, ZBX_ACK_ACTION_ADD_MESSAGE, ZBX_ACK_ACTION_NONE } from '../../../constants';
|
||||
import { ShowProblemTypes, ZBXProblem } from '../../../types';
|
||||
import { APIExecuteScriptResponse, JSONRPCError, ZBXScript } from './types';
|
||||
import { BackendSrvRequest, getBackendSrv } from '@grafana/runtime';
|
||||
import { rangeUtil } from '@grafana/data';
|
||||
|
||||
const DEFAULT_ZABBIX_VERSION = '3.0.0';
|
||||
|
||||
// Backward compatibility. Since Grafana 7.2 roundInterval() func was moved to @grafana/data package
|
||||
const roundInterval: (interval: number) => number = rangeUtil?.roundInterval || kbn.roundInterval || kbn.round_interval;
|
||||
|
||||
/**
|
||||
* Zabbix API Wrapper.
|
||||
* Creates Zabbix API instance with given parameters (url, credentials and other).
|
||||
* Wraps API calls and provides high-level methods.
|
||||
*/
|
||||
export class ZabbixAPIConnector {
|
||||
backendAPIUrl: string;
|
||||
requestOptions: { basicAuth: any; withCredentials: boolean };
|
||||
getTrend: (items: any, timeFrom: any, timeTill: any) => Promise<any[]>;
|
||||
version: string;
|
||||
getVersionPromise: Promise<string>;
|
||||
datasourceId: number;
|
||||
|
||||
constructor(basicAuth: any, withCredentials: boolean, datasourceId: number) {
|
||||
this.datasourceId = datasourceId;
|
||||
this.backendAPIUrl = `/api/datasources/${this.datasourceId}/resources/zabbix-api`;
|
||||
|
||||
this.requestOptions = {
|
||||
basicAuth: basicAuth,
|
||||
withCredentials: withCredentials,
|
||||
};
|
||||
|
||||
this.getTrend = this.getTrend_ZBXNEXT1193;
|
||||
//getTrend = getTrend_30;
|
||||
|
||||
this.initVersion();
|
||||
}
|
||||
|
||||
//////////////////////////
|
||||
// Core method wrappers //
|
||||
//////////////////////////
|
||||
|
||||
request(method: string, params?: any) {
|
||||
if (!this.version) {
|
||||
return this.initVersion().then(() => this.request(method, params));
|
||||
}
|
||||
|
||||
return this.backendAPIRequest(method, params);
|
||||
}
|
||||
|
||||
async backendAPIRequest(method: string, params: any = {}) {
|
||||
const requestOptions: BackendSrvRequest = {
|
||||
url: this.backendAPIUrl,
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
hideFromInspector: false,
|
||||
data: {
|
||||
datasourceId: this.datasourceId,
|
||||
method,
|
||||
params,
|
||||
},
|
||||
};
|
||||
|
||||
// Set request options for basic auth
|
||||
if (this.requestOptions.basicAuth || this.requestOptions.withCredentials) {
|
||||
requestOptions.withCredentials = true;
|
||||
}
|
||||
if (this.requestOptions.basicAuth) {
|
||||
requestOptions.headers.Authorization = this.requestOptions.basicAuth;
|
||||
}
|
||||
|
||||
const response = await getBackendSrv().fetch<any>(requestOptions).toPromise();
|
||||
return response?.data?.result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Zabbix API version
|
||||
*/
|
||||
getVersion() {
|
||||
return this.backendAPIRequest('apiinfo.version');
|
||||
}
|
||||
|
||||
initVersion(): Promise<string> {
|
||||
if (!this.getVersionPromise) {
|
||||
this.getVersionPromise = Promise.resolve(
|
||||
this.getVersion().then((version) => {
|
||||
if (version) {
|
||||
console.log(`Zabbix version detected: ${version}`);
|
||||
} else {
|
||||
console.log(`Failed to detect Zabbix version, use default ${DEFAULT_ZABBIX_VERSION}`);
|
||||
}
|
||||
|
||||
this.version = version || DEFAULT_ZABBIX_VERSION;
|
||||
this.getVersionPromise = null;
|
||||
return version;
|
||||
})
|
||||
);
|
||||
}
|
||||
return this.getVersionPromise;
|
||||
}
|
||||
|
||||
isZabbix54OrHigher() {
|
||||
return semver.gte(this.version, '5.4.0');
|
||||
}
|
||||
|
||||
////////////////////////////////
|
||||
// Zabbix API method wrappers //
|
||||
////////////////////////////////
|
||||
|
||||
acknowledgeEvent(eventid: string, message: string, action?: number, severity?: number) {
|
||||
if (!action) {
|
||||
action = semver.gte(this.version, '4.0.0') ? ZBX_ACK_ACTION_ADD_MESSAGE : ZBX_ACK_ACTION_NONE;
|
||||
}
|
||||
|
||||
const params: any = {
|
||||
eventids: eventid,
|
||||
message: message,
|
||||
action: action,
|
||||
};
|
||||
|
||||
if (severity !== undefined) {
|
||||
params.severity = severity;
|
||||
}
|
||||
|
||||
return this.request('event.acknowledge', params);
|
||||
}
|
||||
|
||||
getGroups() {
|
||||
const params = {
|
||||
output: ['name', 'groupid'],
|
||||
sortfield: 'name',
|
||||
real_hosts: true,
|
||||
};
|
||||
|
||||
return this.request('hostgroup.get', params);
|
||||
}
|
||||
|
||||
getHosts(groupids) {
|
||||
const params: any = {
|
||||
output: ['hostid', 'name', 'host'],
|
||||
sortfield: 'name',
|
||||
};
|
||||
if (groupids) {
|
||||
params.groupids = groupids;
|
||||
}
|
||||
|
||||
return this.request('host.get', params);
|
||||
}
|
||||
|
||||
async getApps(hostids): Promise<any[]> {
|
||||
if (this.isZabbix54OrHigher()) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const params = {
|
||||
output: 'extend',
|
||||
hostids: hostids,
|
||||
};
|
||||
|
||||
return this.request('application.get', params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Zabbix items
|
||||
* @param {[type]} hostids host ids
|
||||
* @param {[type]} appids application ids
|
||||
* @param {String} itemtype 'num' or 'text'
|
||||
* @return {[type]} array of items
|
||||
*/
|
||||
getItems(hostids, appids, itemtype) {
|
||||
const params: any = {
|
||||
output: ['itemid', 'name', 'key_', 'value_type', 'hostid', 'status', 'state', 'units', 'valuemapid', 'delay'],
|
||||
sortfield: 'name',
|
||||
webitems: true,
|
||||
filter: {},
|
||||
selectHosts: ['hostid', 'name', 'host'],
|
||||
};
|
||||
if (hostids) {
|
||||
params.hostids = hostids;
|
||||
}
|
||||
if (appids) {
|
||||
params.applicationids = appids;
|
||||
}
|
||||
if (itemtype === 'num') {
|
||||
// Return only numeric metrics
|
||||
params.filter.value_type = [0, 3];
|
||||
}
|
||||
if (itemtype === 'text') {
|
||||
// Return only text metrics
|
||||
params.filter.value_type = [1, 2, 4];
|
||||
}
|
||||
|
||||
if (this.isZabbix54OrHigher()) {
|
||||
params.selectTags = 'extend';
|
||||
}
|
||||
|
||||
return this.request('item.get', params).then(utils.expandItems);
|
||||
}
|
||||
|
||||
getItemsByIDs(itemids) {
|
||||
const params: any = {
|
||||
itemids: itemids,
|
||||
output: ['itemid', 'name', 'key_', 'value_type', 'hostid', 'status', 'state', 'units', 'valuemapid', 'delay'],
|
||||
webitems: true,
|
||||
selectHosts: ['hostid', 'name'],
|
||||
};
|
||||
|
||||
if (this.isZabbix54OrHigher()) {
|
||||
params.selectTags = 'extend';
|
||||
}
|
||||
|
||||
return this.request('item.get', params).then((items) => utils.expandItems(items));
|
||||
}
|
||||
|
||||
getMacros(hostids) {
|
||||
const params = {
|
||||
output: 'extend',
|
||||
hostids: hostids,
|
||||
};
|
||||
|
||||
return this.request('usermacro.get', params);
|
||||
}
|
||||
|
||||
getGlobalMacros() {
|
||||
const params = {
|
||||
output: 'extend',
|
||||
globalmacro: true,
|
||||
};
|
||||
|
||||
return this.request('usermacro.get', params);
|
||||
}
|
||||
|
||||
getLastValue(itemid) {
|
||||
const params = {
|
||||
output: ['lastvalue'],
|
||||
itemids: itemid,
|
||||
};
|
||||
return this.request('item.get', params).then((items) => (items.length ? items[0].lastvalue : null));
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform history query from Zabbix API
|
||||
*
|
||||
* @param {Array} items Array of Zabbix item objects
|
||||
* @param {Number} timeFrom Time in seconds
|
||||
* @param {Number} timeTill Time in seconds
|
||||
* @return {Array} Array of Zabbix history objects
|
||||
*/
|
||||
getHistory(items, timeFrom, timeTill) {
|
||||
// Group items by value type and perform request for each value type
|
||||
const grouped_items = _.groupBy(items, 'value_type');
|
||||
const promises = _.map(grouped_items, (items, value_type) => {
|
||||
const itemids = _.map(items, 'itemid');
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
history: value_type,
|
||||
itemids: itemids,
|
||||
sortfield: 'clock',
|
||||
sortorder: 'ASC',
|
||||
time_from: timeFrom,
|
||||
};
|
||||
|
||||
// Relative queries (e.g. last hour) don't include an end time
|
||||
if (timeTill) {
|
||||
params.time_till = timeTill;
|
||||
}
|
||||
|
||||
return this.request('history.get', params);
|
||||
});
|
||||
|
||||
return Promise.all(promises).then(_.flatten);
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform trends query from Zabbix API
|
||||
* Use trends api extension from ZBXNEXT-1193 patch.
|
||||
*
|
||||
* @param {Array} items Array of Zabbix item objects
|
||||
* @param {Number} time_from Time in seconds
|
||||
* @param {Number} time_till Time in seconds
|
||||
* @return {Array} Array of Zabbix trend objects
|
||||
*/
|
||||
getTrend_ZBXNEXT1193(items, timeFrom, timeTill) {
|
||||
// Group items by value type and perform request for each value type
|
||||
const grouped_items = _.groupBy(items, 'value_type');
|
||||
const promises = _.map(grouped_items, (items, value_type) => {
|
||||
const itemids = _.map(items, 'itemid');
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
trend: value_type,
|
||||
itemids: itemids,
|
||||
sortfield: 'clock',
|
||||
sortorder: 'ASC',
|
||||
time_from: timeFrom,
|
||||
};
|
||||
|
||||
// Relative queries (e.g. last hour) don't include an end time
|
||||
if (timeTill) {
|
||||
params.time_till = timeTill;
|
||||
}
|
||||
|
||||
return this.request('trend.get', params);
|
||||
});
|
||||
|
||||
return Promise.all(promises).then(_.flatten);
|
||||
}
|
||||
|
||||
getTrend_30(items, time_from, time_till, value_type) {
|
||||
const self = this;
|
||||
const itemids = _.map(items, 'itemid');
|
||||
|
||||
const params: any = {
|
||||
output: ['itemid', 'clock', value_type],
|
||||
itemids: itemids,
|
||||
time_from: time_from,
|
||||
};
|
||||
|
||||
// Relative queries (e.g. last hour) don't include an end time
|
||||
if (time_till) {
|
||||
params.time_till = time_till;
|
||||
}
|
||||
|
||||
return self.request('trend.get', params);
|
||||
}
|
||||
|
||||
getITService(serviceids?) {
|
||||
const params = {
|
||||
output: 'extend',
|
||||
serviceids: serviceids,
|
||||
};
|
||||
return this.request('service.get', params);
|
||||
}
|
||||
|
||||
getSLA(serviceids, timeRange, options) {
|
||||
const [timeFrom, timeTo] = timeRange;
|
||||
let intervals = [{ from: timeFrom, to: timeTo }];
|
||||
if (options.slaInterval === 'auto') {
|
||||
const interval = getSLAInterval(options.intervalMs);
|
||||
intervals = buildSLAIntervals(timeRange, interval);
|
||||
} else if (options.slaInterval !== 'none') {
|
||||
const interval = utils.parseInterval(options.slaInterval) / 1000;
|
||||
intervals = buildSLAIntervals(timeRange, interval);
|
||||
}
|
||||
|
||||
const params: any = {
|
||||
serviceids,
|
||||
intervals,
|
||||
};
|
||||
|
||||
return this.request('service.getsla', params);
|
||||
}
|
||||
|
||||
async getSLA60(serviceids, timeRange, options) {
|
||||
const [timeFrom, timeTo] = timeRange;
|
||||
let intervals = [{ from: timeFrom, to: timeTo }];
|
||||
if (options.slaInterval === 'auto') {
|
||||
const interval = getSLAInterval(options.intervalMs);
|
||||
intervals = buildSLAIntervals(timeRange, interval);
|
||||
} else if (options.slaInterval !== 'none') {
|
||||
const interval = utils.parseInterval(options.slaInterval) / 1000;
|
||||
intervals = buildSLAIntervals(timeRange, interval);
|
||||
}
|
||||
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
serviceids,
|
||||
};
|
||||
|
||||
const slaObjects = await this.request('sla.get', params);
|
||||
if (slaObjects.length === 0) {
|
||||
return {};
|
||||
}
|
||||
const sla = slaObjects[0];
|
||||
|
||||
// const periods = intervals.map(interval => ({
|
||||
// period_from: interval.from,
|
||||
// period_to: interval.to,
|
||||
// }));
|
||||
const sliParams: any = {
|
||||
slaid: sla.slaid,
|
||||
serviceids,
|
||||
period_from: timeFrom,
|
||||
period_to: timeTo,
|
||||
periods: Math.min(intervals.length, 100),
|
||||
};
|
||||
|
||||
const sliResponse = await this.request('sla.getsli', sliParams);
|
||||
if (sliResponse.length === 0) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const slaLikeResponse: any = {};
|
||||
sliResponse.serviceids.forEach((serviceid) => {
|
||||
slaLikeResponse[serviceid] = {
|
||||
sla: [],
|
||||
};
|
||||
});
|
||||
sliResponse.sli.forEach((sliItem, i) => {
|
||||
sliItem.forEach((sli, j) => {
|
||||
slaLikeResponse[sliResponse.serviceids[j]].sla.push({
|
||||
downtimeTime: sli.downtime,
|
||||
okTime: sli.uptime,
|
||||
sla: sli.sli,
|
||||
from: sliResponse.periods[i].period_from,
|
||||
to: sliResponse.periods[i].period_to,
|
||||
});
|
||||
});
|
||||
});
|
||||
return slaLikeResponse;
|
||||
}
|
||||
|
||||
getProblems(groupids, hostids, applicationids, options): Promise<ZBXProblem[]> {
|
||||
const { timeFrom, timeTo, recent, severities, limit, acknowledged, tags } = options;
|
||||
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
selectAcknowledges: 'extend',
|
||||
selectSuppressionData: 'extend',
|
||||
selectTags: 'extend',
|
||||
source: '0',
|
||||
object: '0',
|
||||
sortfield: ['eventid'],
|
||||
sortorder: 'DESC',
|
||||
evaltype: '0',
|
||||
// preservekeys: '1',
|
||||
groupids,
|
||||
hostids,
|
||||
applicationids,
|
||||
recent,
|
||||
};
|
||||
|
||||
if (severities) {
|
||||
params.severities = severities;
|
||||
}
|
||||
|
||||
if (acknowledged !== undefined) {
|
||||
params.acknowledged = acknowledged;
|
||||
}
|
||||
|
||||
if (tags) {
|
||||
params.tags = tags;
|
||||
}
|
||||
|
||||
if (limit) {
|
||||
params.limit = limit;
|
||||
}
|
||||
|
||||
if (timeFrom || timeTo) {
|
||||
params.time_from = timeFrom;
|
||||
params.time_till = timeTo;
|
||||
}
|
||||
|
||||
return this.request('problem.get', params).then(utils.mustArray);
|
||||
}
|
||||
|
||||
getTriggersByIds(triggerids: string[]) {
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
triggerids: triggerids,
|
||||
expandDescription: true,
|
||||
expandData: true,
|
||||
expandComment: true,
|
||||
monitored: true,
|
||||
skipDependent: true,
|
||||
selectGroups: ['name', 'groupid'],
|
||||
selectHosts: ['hostid', 'name', 'host', 'maintenance_status', 'proxy_hostid'],
|
||||
selectItems: ['itemid', 'name', 'key_', 'lastvalue'],
|
||||
// selectLastEvent: 'extend',
|
||||
// selectTags: 'extend',
|
||||
preservekeys: '1',
|
||||
};
|
||||
|
||||
return this.request('trigger.get', params).then(utils.mustArray);
|
||||
}
|
||||
|
||||
getTriggers(groupids, hostids, applicationids, options) {
|
||||
const { showTriggers, maintenance, timeFrom, timeTo } = options;
|
||||
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
groupids: groupids,
|
||||
hostids: hostids,
|
||||
applicationids: applicationids,
|
||||
expandDescription: true,
|
||||
expandData: true,
|
||||
expandComment: true,
|
||||
monitored: true,
|
||||
skipDependent: true,
|
||||
//only_true: true,
|
||||
filter: {
|
||||
value: 1,
|
||||
},
|
||||
selectGroups: ['groupid', 'name'],
|
||||
selectHosts: ['hostid', 'name', 'host', 'maintenance_status', 'proxy_hostid'],
|
||||
selectItems: ['itemid', 'name', 'key_', 'lastvalue'],
|
||||
selectLastEvent: 'extend',
|
||||
selectTags: 'extend',
|
||||
};
|
||||
|
||||
if (showTriggers === ShowProblemTypes.Problems) {
|
||||
params.filter.value = 1;
|
||||
} else if (showTriggers === ShowProblemTypes.Recent || showTriggers === ShowProblemTypes.History) {
|
||||
params.filter.value = [0, 1];
|
||||
}
|
||||
|
||||
if (maintenance) {
|
||||
params.maintenance = true;
|
||||
}
|
||||
|
||||
if (timeFrom || timeTo) {
|
||||
params.lastChangeSince = timeFrom;
|
||||
params.lastChangeTill = timeTo;
|
||||
}
|
||||
|
||||
return this.request('trigger.get', params);
|
||||
}
|
||||
|
||||
getEvents(objectids, timeFrom, timeTo, showEvents, limit) {
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
time_from: timeFrom,
|
||||
time_till: timeTo,
|
||||
objectids: objectids,
|
||||
select_acknowledges: 'extend',
|
||||
selectHosts: 'extend',
|
||||
value: showEvents,
|
||||
};
|
||||
|
||||
if (limit) {
|
||||
params.limit = limit;
|
||||
params.sortfield = 'clock';
|
||||
params.sortorder = 'DESC';
|
||||
}
|
||||
|
||||
return this.request('event.get', params).then(utils.mustArray);
|
||||
}
|
||||
|
||||
getEventsHistory(groupids, hostids, applicationids, options) {
|
||||
const { timeFrom, timeTo, severities, limit, value } = options;
|
||||
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
time_from: timeFrom,
|
||||
time_till: timeTo,
|
||||
value: '1',
|
||||
source: '0',
|
||||
object: '0',
|
||||
evaltype: '0',
|
||||
sortfield: ['eventid'],
|
||||
sortorder: 'DESC',
|
||||
select_acknowledges: 'extend',
|
||||
selectTags: 'extend',
|
||||
selectSuppressionData: ['maintenanceid', 'suppress_until'],
|
||||
groupids,
|
||||
hostids,
|
||||
applicationids,
|
||||
};
|
||||
|
||||
if (limit) {
|
||||
params.limit = limit;
|
||||
}
|
||||
|
||||
if (severities) {
|
||||
params.severities = severities;
|
||||
}
|
||||
|
||||
if (value) {
|
||||
params.value = value;
|
||||
}
|
||||
|
||||
return this.request('event.get', params).then(utils.mustArray);
|
||||
}
|
||||
|
||||
getExtendedEventData(eventids) {
|
||||
const params = {
|
||||
output: 'extend',
|
||||
eventids: eventids,
|
||||
preservekeys: true,
|
||||
select_acknowledges: 'extend',
|
||||
selectTags: 'extend',
|
||||
sortfield: 'clock',
|
||||
sortorder: 'DESC',
|
||||
};
|
||||
|
||||
return this.request('event.get', params);
|
||||
}
|
||||
|
||||
getEventAlerts(eventids) {
|
||||
const params = {
|
||||
eventids: eventids,
|
||||
output: ['alertid', 'eventid', 'message', 'clock', 'error'],
|
||||
selectUsers: true,
|
||||
};
|
||||
|
||||
return this.request('alert.get', params);
|
||||
}
|
||||
|
||||
getAcknowledges(eventids) {
|
||||
const params = {
|
||||
output: 'extend',
|
||||
eventids: eventids,
|
||||
preservekeys: true,
|
||||
select_acknowledges: 'extend',
|
||||
sortfield: 'clock',
|
||||
sortorder: 'DESC',
|
||||
};
|
||||
|
||||
return this.request('event.get', params).then((events) => {
|
||||
return _.filter(events, (event) => event.acknowledges.length);
|
||||
});
|
||||
}
|
||||
|
||||
getAlerts(itemids, timeFrom, timeTo) {
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
itemids: itemids,
|
||||
expandDescription: true,
|
||||
expandData: true,
|
||||
expandComment: true,
|
||||
monitored: true,
|
||||
skipDependent: true,
|
||||
//only_true: true,
|
||||
// filter: {
|
||||
// value: 1
|
||||
// },
|
||||
selectLastEvent: 'extend',
|
||||
};
|
||||
|
||||
if (timeFrom || timeTo) {
|
||||
params.lastChangeSince = timeFrom;
|
||||
params.lastChangeTill = timeTo;
|
||||
}
|
||||
|
||||
return this.request('trigger.get', params);
|
||||
}
|
||||
|
||||
getHostAlerts(hostids, applicationids, options) {
|
||||
const { minSeverity, acknowledged, count, timeFrom, timeTo } = options;
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
hostids: hostids,
|
||||
min_severity: minSeverity,
|
||||
filter: { value: 1 },
|
||||
expandDescription: true,
|
||||
expandData: true,
|
||||
expandComment: true,
|
||||
monitored: true,
|
||||
skipDependent: true,
|
||||
selectLastEvent: 'extend',
|
||||
selectGroups: 'extend',
|
||||
selectHosts: ['hostid', 'host', 'name'],
|
||||
};
|
||||
|
||||
if (count && acknowledged !== 0 && acknowledged !== 1) {
|
||||
params.countOutput = true;
|
||||
}
|
||||
|
||||
if (applicationids && applicationids.length) {
|
||||
params.applicationids = applicationids;
|
||||
}
|
||||
|
||||
if (timeFrom || timeTo) {
|
||||
params.lastChangeSince = timeFrom;
|
||||
params.lastChangeTill = timeTo;
|
||||
}
|
||||
|
||||
return this.request('trigger.get', params).then((triggers) => {
|
||||
if (!count || acknowledged === 0 || acknowledged === 1) {
|
||||
triggers = filterTriggersByAcknowledge(triggers, acknowledged);
|
||||
if (count) {
|
||||
triggers = triggers.length;
|
||||
}
|
||||
}
|
||||
return triggers;
|
||||
});
|
||||
}
|
||||
|
||||
getProxies() {
|
||||
const params = {
|
||||
output: ['proxyid', 'host'],
|
||||
};
|
||||
|
||||
return this.request('proxy.get', params);
|
||||
}
|
||||
|
||||
getScripts(hostids: string[], options?: any): Promise<ZBXScript[]> {
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
hostids,
|
||||
};
|
||||
|
||||
return this.request('script.get', params).then(utils.mustArray);
|
||||
}
|
||||
|
||||
executeScript(hostid: string, scriptid: string): Promise<APIExecuteScriptResponse> {
|
||||
const params: any = {
|
||||
hostid,
|
||||
scriptid,
|
||||
};
|
||||
|
||||
return this.request('script.execute', params);
|
||||
}
|
||||
|
||||
getValueMappings() {
|
||||
const params = {
|
||||
output: 'extend',
|
||||
selectMappings: 'extend',
|
||||
};
|
||||
|
||||
return this.request('valuemap.get', params);
|
||||
}
|
||||
}
|
||||
|
||||
function filterTriggersByAcknowledge(triggers, acknowledged) {
|
||||
if (acknowledged === 0) {
|
||||
return _.filter(triggers, (trigger) => trigger.lastEvent.acknowledged === '0');
|
||||
} else if (acknowledged === 1) {
|
||||
return _.filter(triggers, (trigger) => trigger.lastEvent.acknowledged === '1');
|
||||
} else {
|
||||
return triggers;
|
||||
}
|
||||
}
|
||||
|
||||
function getSLAInterval(intervalMs) {
|
||||
// Too many intervals may cause significant load on the database, so decrease number of resulting points
|
||||
const resolutionRatio = 100;
|
||||
const interval = roundInterval(intervalMs * resolutionRatio) / 1000;
|
||||
return Math.max(interval, MIN_SLA_INTERVAL);
|
||||
}
|
||||
|
||||
function buildSLAIntervals(timeRange, interval) {
|
||||
let [timeFrom, timeTo] = timeRange;
|
||||
const intervals = [];
|
||||
|
||||
// Align time range with calculated interval
|
||||
timeFrom = Math.floor(timeFrom / interval) * interval;
|
||||
timeTo = Math.ceil(timeTo / interval) * interval;
|
||||
|
||||
for (let i = timeFrom; i <= timeTo - interval; i += interval) {
|
||||
intervals.push({
|
||||
from: i,
|
||||
to: i + interval,
|
||||
});
|
||||
}
|
||||
|
||||
return intervals;
|
||||
}
|
||||
|
||||
// Define zabbix API exception type
|
||||
export class ZabbixAPIError {
|
||||
code: number;
|
||||
name: string;
|
||||
data: string;
|
||||
message: string;
|
||||
|
||||
constructor(error: JSONRPCError) {
|
||||
this.code = error.code || null;
|
||||
this.name = error.message || '';
|
||||
this.data = error.data || '';
|
||||
this.message = 'Zabbix API Error: ' + this.name + ' ' + this.data;
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.name + ' ' + this.data;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user