Build plugin with grafana toolkit (#1539)
* Use grafana toolkit template for building plugin * Fix linter and type errors * Update styles building * Fix sass deprecation warning * Remove empty js files produced by webpack building sass * Fix signing script * Replace classnames with cx * Fix data source config page * Use custom webpack config instead of overriding original one * Use gpx_ prefix for plugin executable * Remove unused configs * Roll back react hooks dependencies usage * Move plugin-specific ts config to root config file * Temporary do not use rst2html for function description tooltip * Remove unused code * remove unused dependencies * update react table dependency * Migrate tests to typescript * remove unused dependencies * Remove old webpack configs * Add sign target to makefile * Add magefile * Update CI test job * Update go packages * Update build instructions * Downgrade go version to 1.18 * Fix go version in ci * Fix metric picker * Add comment to webpack config * remove angular mocks * update bra config * Rename datasource-zabbix to datasource (fix mage build) * Add instructions for building backend with mage * Fix webpack targets * Fix ci backend tests * Add initial e2e tests * Fix e2e ci tests * Update docker compose for cypress tests * build grafana docker image * Fix docker stop task * CI: add Grafana compatibility check
This commit is contained in:
195
src/datasource/components/AnnotationQueryEditor.tsx
Normal file
195
src/datasource/components/AnnotationQueryEditor.tsx
Normal file
@@ -0,0 +1,195 @@
|
||||
import _ from 'lodash';
|
||||
import React, { useEffect, FormEvent } from 'react';
|
||||
import { useAsyncFn } from 'react-use';
|
||||
import { AnnotationQuery, SelectableValue } from '@grafana/data';
|
||||
import { InlineField, InlineSwitch, Input, Select } from '@grafana/ui';
|
||||
import { ZabbixMetricsQuery } from '../types';
|
||||
import { ZabbixQueryEditorProps } from './QueryEditor';
|
||||
import { QueryEditorRow } from './QueryEditor/QueryEditorRow';
|
||||
import { MetricPicker } from '../../components';
|
||||
import { getVariableOptions } from './QueryEditor/utils';
|
||||
import { prepareAnnotation } from '../migrations';
|
||||
|
||||
const severityOptions: Array<SelectableValue<number>> = [
|
||||
{ value: 0, label: 'Not classified' },
|
||||
{ value: 1, label: 'Information' },
|
||||
{ value: 2, label: 'Warning' },
|
||||
{ value: 3, label: 'Average' },
|
||||
{ value: 4, label: 'High' },
|
||||
{ value: 5, label: 'Disaster' },
|
||||
];
|
||||
|
||||
type Props = ZabbixQueryEditorProps & {
|
||||
annotation?: AnnotationQuery<ZabbixMetricsQuery>;
|
||||
onAnnotationChange?: (annotation: AnnotationQuery<ZabbixMetricsQuery>) => void;
|
||||
};
|
||||
|
||||
export const AnnotationQueryEditor = ({ annotation, onAnnotationChange, datasource }: Props) => {
|
||||
annotation = prepareAnnotation(annotation);
|
||||
const query = annotation.target;
|
||||
|
||||
const loadGroupOptions = async () => {
|
||||
const groups = await datasource.zabbix.getAllGroups();
|
||||
const options = groups?.map((group) => ({
|
||||
value: group.name,
|
||||
label: group.name,
|
||||
}));
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: groupsLoading, value: groupsOptions }, fetchGroups] = useAsyncFn(async () => {
|
||||
const options = await loadGroupOptions();
|
||||
return options;
|
||||
}, []);
|
||||
|
||||
const loadHostOptions = async (group: string) => {
|
||||
const groupFilter = datasource.replaceTemplateVars(group);
|
||||
const hosts = await datasource.zabbix.getAllHosts(groupFilter);
|
||||
let options: Array<SelectableValue<string>> = hosts?.map((host) => ({
|
||||
value: host.name,
|
||||
label: host.name,
|
||||
}));
|
||||
options = _.uniqBy(options, (o) => o.value);
|
||||
options.unshift({ value: '/.*/' });
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: hostsLoading, value: hostOptions }, fetchHosts] = useAsyncFn(async () => {
|
||||
const options = await loadHostOptions(query.group.filter);
|
||||
return options;
|
||||
}, [query.group.filter]);
|
||||
|
||||
const loadAppOptions = async (group: string, host: string) => {
|
||||
const groupFilter = datasource.replaceTemplateVars(group);
|
||||
const hostFilter = datasource.replaceTemplateVars(host);
|
||||
const apps = await datasource.zabbix.getAllApps(groupFilter, hostFilter);
|
||||
let options: Array<SelectableValue<string>> = apps?.map((app) => ({
|
||||
value: app.name,
|
||||
label: app.name,
|
||||
}));
|
||||
options = _.uniqBy(options, (o) => o.value);
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: appsLoading, value: appOptions }, fetchApps] = useAsyncFn(async () => {
|
||||
const options = await loadAppOptions(query.group.filter, query.host.filter);
|
||||
return options;
|
||||
}, [query.group.filter, query.host.filter]);
|
||||
|
||||
// Update suggestions on every metric change
|
||||
const groupFilter = datasource.replaceTemplateVars(query.group?.filter);
|
||||
const hostFilter = datasource.replaceTemplateVars(query.host?.filter);
|
||||
|
||||
useEffect(() => {
|
||||
fetchGroups();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
fetchHosts();
|
||||
}, [groupFilter]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchApps();
|
||||
}, [groupFilter, hostFilter]);
|
||||
|
||||
const onChange = (query: any) => {
|
||||
onAnnotationChange({
|
||||
...annotation,
|
||||
target: query,
|
||||
});
|
||||
};
|
||||
|
||||
const onFilterChange = (prop: string) => {
|
||||
return (value: string) => {
|
||||
if (value !== null) {
|
||||
onChange({ ...query, [prop]: { filter: value } });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const onTextFilterChange = (prop: string) => {
|
||||
return (v: FormEvent<HTMLInputElement>) => {
|
||||
const newValue = v?.currentTarget?.value;
|
||||
if (newValue !== null) {
|
||||
onChange({ ...query, [prop]: { filter: newValue } });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const onMinSeverityChange = (option: SelectableValue) => {
|
||||
if (option.value !== null) {
|
||||
onChange({ ...query, options: { ...query.options, minSeverity: option.value } });
|
||||
}
|
||||
};
|
||||
|
||||
const onOptionSwitch = (prop: string) => () => {
|
||||
onChange({ ...query, options: { ...query.options, [prop]: !query.options[prop] } });
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<QueryEditorRow>
|
||||
<InlineField label="Group" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.group?.filter}
|
||||
options={groupsOptions}
|
||||
isLoading={groupsLoading}
|
||||
onChange={onFilterChange('group')}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Host" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.host?.filter}
|
||||
options={hostOptions}
|
||||
isLoading={hostsLoading}
|
||||
onChange={onFilterChange('host')}
|
||||
/>
|
||||
</InlineField>
|
||||
</QueryEditorRow>
|
||||
<QueryEditorRow>
|
||||
<InlineField label="Application" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.application?.filter}
|
||||
options={appOptions}
|
||||
isLoading={appsLoading}
|
||||
onChange={onFilterChange('application')}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Problem" labelWidth={12}>
|
||||
<Input
|
||||
width={24}
|
||||
defaultValue={query.trigger?.filter}
|
||||
placeholder="Problem name"
|
||||
onBlur={onTextFilterChange('trigger')}
|
||||
/>
|
||||
</InlineField>
|
||||
</QueryEditorRow>
|
||||
<>
|
||||
<InlineField label="Min severity" labelWidth={12}>
|
||||
<Select
|
||||
isSearchable={false}
|
||||
width={24}
|
||||
value={query.options?.minSeverity}
|
||||
options={severityOptions}
|
||||
onChange={onMinSeverityChange}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Show OK events" labelWidth={24}>
|
||||
<InlineSwitch value={query.options.showOkEvents} onChange={onOptionSwitch('showOkEvents')} />
|
||||
</InlineField>
|
||||
<InlineField label="Hide acknowledged events" labelWidth={24}>
|
||||
<InlineSwitch value={query.options.hideAcknowledged} onChange={onOptionSwitch('hideAcknowledged')} />
|
||||
</InlineField>
|
||||
<InlineField label="Show hostname" labelWidth={24}>
|
||||
<InlineSwitch value={query.options.showHostname} onChange={onOptionSwitch('showHostname')} />
|
||||
</InlineField>
|
||||
</>
|
||||
</>
|
||||
);
|
||||
};
|
||||
345
src/datasource/components/ConfigEditor.tsx
Normal file
345
src/datasource/components/ConfigEditor.tsx
Normal file
@@ -0,0 +1,345 @@
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { getDataSourceSrv } from '@grafana/runtime';
|
||||
import { DataSourcePluginOptionsEditorProps, DataSourceSettings, SelectableValue } from '@grafana/data';
|
||||
import { Button, DataSourceHttpSettings, InlineFormLabel, LegacyForms, Select } from '@grafana/ui';
|
||||
import { ZabbixDSOptions, ZabbixSecureJSONData } from '../types';
|
||||
|
||||
const { FormField, Switch } = LegacyForms;
|
||||
|
||||
const SUPPORTED_SQL_DS = ['mysql', 'postgres', 'influxdb'];
|
||||
|
||||
export type Props = DataSourcePluginOptionsEditorProps<ZabbixDSOptions, ZabbixSecureJSONData>;
|
||||
export const ConfigEditor = (props: Props) => {
|
||||
const { options, onOptionsChange } = props;
|
||||
|
||||
const [selectedDBDatasource, setSelectedDBDatasource] = useState(null);
|
||||
const [currentDSType, setCurrentDSType] = useState('');
|
||||
|
||||
// Apply some defaults on initial render
|
||||
useEffect(() => {
|
||||
const { jsonData, secureJsonFields } = options;
|
||||
|
||||
// Set secureJsonFields.password to password and then remove it from config
|
||||
const { password, ...restJsonData } = jsonData;
|
||||
if (!secureJsonFields?.password) {
|
||||
if (!options.secureJsonData) {
|
||||
options.secureJsonData = {};
|
||||
}
|
||||
options.secureJsonData.password = password;
|
||||
}
|
||||
|
||||
onOptionsChange({
|
||||
...options,
|
||||
jsonData: {
|
||||
trends: true,
|
||||
trendsFrom: '',
|
||||
trendsRange: '',
|
||||
cacheTTL: '',
|
||||
timeout: undefined,
|
||||
disableDataAlignment: false,
|
||||
...restJsonData,
|
||||
},
|
||||
});
|
||||
|
||||
if (options.jsonData.dbConnectionEnable) {
|
||||
if (!options.jsonData.dbConnectionDatasourceId) {
|
||||
const dsName = options.jsonData.dbConnectionDatasourceName;
|
||||
getDataSourceSrv()
|
||||
.get(dsName)
|
||||
.then((ds) => {
|
||||
if (ds) {
|
||||
const selectedDs = getDirectDBDatasources().find((dsOption) => dsOption.id === ds.id);
|
||||
setSelectedDBDatasource({ label: selectedDs.name, value: selectedDs.id });
|
||||
setCurrentDSType(selectedDs.type);
|
||||
onOptionsChange({
|
||||
...options,
|
||||
jsonData: {
|
||||
...options.jsonData,
|
||||
dbConnectionDatasourceId: ds.id,
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
const selectedDs = getDirectDBDatasources().find(
|
||||
(dsOption) => dsOption.id === options.jsonData.dbConnectionDatasourceId
|
||||
);
|
||||
setSelectedDBDatasource({ label: selectedDs.name, value: selectedDs.id });
|
||||
setCurrentDSType(selectedDs.type);
|
||||
}
|
||||
}
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<>
|
||||
<DataSourceHttpSettings
|
||||
defaultUrl={'http://localhost/zabbix/api_jsonrpc.php'}
|
||||
dataSourceConfig={options}
|
||||
showAccessOptions={true}
|
||||
onChange={onOptionsChange}
|
||||
/>
|
||||
|
||||
<div className="gf-form-group">
|
||||
<h3 className="page-heading">Zabbix API details</h3>
|
||||
<div className="gf-form max-width-25">
|
||||
<FormField
|
||||
labelWidth={7}
|
||||
inputWidth={15}
|
||||
label="Username"
|
||||
value={options.jsonData.username || ''}
|
||||
onChange={jsonDataChangeHandler('username', options, onOptionsChange)}
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
<div className="gf-form max-width-25">
|
||||
{options.secureJsonFields?.password ? (
|
||||
<>
|
||||
<FormField
|
||||
labelWidth={7}
|
||||
inputWidth={15}
|
||||
label="Password"
|
||||
disabled={true}
|
||||
value=""
|
||||
placeholder="Configured"
|
||||
/>
|
||||
<Button onClick={resetSecureJsonField('password', options, onOptionsChange)}>Reset</Button>
|
||||
</>
|
||||
) : (
|
||||
<FormField
|
||||
labelWidth={7}
|
||||
inputWidth={15}
|
||||
label="Password"
|
||||
type="password"
|
||||
value={options.secureJsonData?.password || options.jsonData.password || ''}
|
||||
onChange={secureJsonDataChangeHandler('password', options, onOptionsChange)}
|
||||
required
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<Switch
|
||||
label="Trends"
|
||||
labelClass="width-7"
|
||||
checked={options.jsonData.trends}
|
||||
onChange={jsonDataSwitchHandler('trends', options, onOptionsChange)}
|
||||
/>
|
||||
{options.jsonData.trends && (
|
||||
<>
|
||||
<div className="gf-form">
|
||||
<FormField
|
||||
labelWidth={7}
|
||||
inputWidth={4}
|
||||
label="After"
|
||||
value={options.jsonData.trendsFrom || ''}
|
||||
placeholder="7d"
|
||||
onChange={jsonDataChangeHandler('trendsFrom', options, onOptionsChange)}
|
||||
tooltip="Time after which trends will be used.
|
||||
Best practice is to set this value to your history storage period (7d, 30d, etc)."
|
||||
/>
|
||||
</div>
|
||||
<div className="gf-form">
|
||||
<FormField
|
||||
labelWidth={7}
|
||||
inputWidth={4}
|
||||
label="Range"
|
||||
value={options.jsonData.trendsRange || ''}
|
||||
placeholder="4d"
|
||||
onChange={jsonDataChangeHandler('trendsRange', options, onOptionsChange)}
|
||||
tooltip="Time range width after which trends will be used instead of history.
|
||||
It's better to set this value in range of 4 to 7 days to prevent loading large amount of history data."
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
<div className="gf-form">
|
||||
<FormField
|
||||
labelWidth={7}
|
||||
inputWidth={4}
|
||||
label="Cache TTL"
|
||||
value={options.jsonData.cacheTTL || ''}
|
||||
placeholder="1h"
|
||||
onChange={jsonDataChangeHandler('cacheTTL', options, onOptionsChange)}
|
||||
tooltip="Zabbix data source caches metric names in memory. Specify how often data will be updated."
|
||||
/>
|
||||
</div>
|
||||
<div className="gf-form">
|
||||
<FormField
|
||||
labelWidth={7}
|
||||
inputWidth={4}
|
||||
type="number"
|
||||
label="Timeout"
|
||||
value={options.jsonData.timeout}
|
||||
onChange={(event) => {
|
||||
onOptionsChange({
|
||||
...options,
|
||||
jsonData: { ...options.jsonData, timeout: parseInt(event.currentTarget.value, 10) },
|
||||
});
|
||||
}}
|
||||
tooltip="Zabbix API connection timeout in seconds. Default is 30."
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="gf-form-group">
|
||||
<h3 className="page-heading">Direct DB Connection</h3>
|
||||
<Switch
|
||||
label="Enable"
|
||||
labelClass="width-9"
|
||||
checked={options.jsonData.dbConnectionEnable}
|
||||
onChange={jsonDataSwitchHandler('dbConnectionEnable', options, onOptionsChange)}
|
||||
/>
|
||||
{options.jsonData.dbConnectionEnable && (
|
||||
<>
|
||||
<div className="gf-form">
|
||||
<InlineFormLabel width={9}>Data Source</InlineFormLabel>
|
||||
<Select
|
||||
width={32}
|
||||
options={getDirectDBDSOptions()}
|
||||
value={selectedDBDatasource}
|
||||
onChange={directDBDatasourceChanegeHandler(
|
||||
options,
|
||||
onOptionsChange,
|
||||
setSelectedDBDatasource,
|
||||
setCurrentDSType
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
{currentDSType === 'influxdb' && (
|
||||
<div className="gf-form">
|
||||
<FormField
|
||||
labelWidth={9}
|
||||
inputWidth={16}
|
||||
label="Retention Policy"
|
||||
value={options.jsonData.dbConnectionRetentionPolicy || ''}
|
||||
placeholder="Retention policy name"
|
||||
onChange={jsonDataChangeHandler('dbConnectionRetentionPolicy', options, onOptionsChange)}
|
||||
tooltip="Specify retention policy name for fetching long-term stored data (optional).
|
||||
Leave it blank if only default retention policy used."
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="gf-form-group">
|
||||
<h3 className="page-heading">Other</h3>
|
||||
<Switch
|
||||
label="Disable acknowledges for read-only users"
|
||||
labelClass="width-16"
|
||||
checked={options.jsonData.disableReadOnlyUsersAck}
|
||||
onChange={jsonDataSwitchHandler('disableReadOnlyUsersAck', options, onOptionsChange)}
|
||||
/>
|
||||
<Switch
|
||||
label="Disable data alignment"
|
||||
labelClass="width-16"
|
||||
checked={!!options.jsonData.disableDataAlignment}
|
||||
onChange={jsonDataSwitchHandler('disableDataAlignment', options, onOptionsChange)}
|
||||
tooltip="Data alignment feature aligns points based on item update interval.
|
||||
For instance, if value collected once per minute, then timestamp of the each point will be set to the start of corresponding minute.
|
||||
This alignment required for proper work of the stacked graphs.
|
||||
If you don't need stacked graphs and want to get exactly the same timestamps as in Zabbix, then you can disable this feature."
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const jsonDataChangeHandler =
|
||||
(
|
||||
key: keyof ZabbixDSOptions,
|
||||
value: DataSourceSettings<ZabbixDSOptions, ZabbixSecureJSONData>,
|
||||
onChange: Props['onOptionsChange']
|
||||
) =>
|
||||
(event: React.SyntheticEvent<HTMLInputElement | HTMLSelectElement>) => {
|
||||
onChange({
|
||||
...value,
|
||||
jsonData: {
|
||||
...value.jsonData,
|
||||
[key]: event.currentTarget.value,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const jsonDataSwitchHandler =
|
||||
(
|
||||
key: keyof ZabbixDSOptions,
|
||||
value: DataSourceSettings<ZabbixDSOptions, ZabbixSecureJSONData>,
|
||||
onChange: Props['onOptionsChange']
|
||||
) =>
|
||||
(event: React.SyntheticEvent<HTMLInputElement>) => {
|
||||
onChange({
|
||||
...value,
|
||||
jsonData: {
|
||||
...value.jsonData,
|
||||
[key]: (event.target as HTMLInputElement).checked,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const secureJsonDataChangeHandler =
|
||||
(
|
||||
key: keyof ZabbixDSOptions,
|
||||
value: DataSourceSettings<ZabbixDSOptions, ZabbixSecureJSONData>,
|
||||
onChange: Props['onOptionsChange']
|
||||
) =>
|
||||
(event: React.SyntheticEvent<HTMLInputElement | HTMLSelectElement>) => {
|
||||
onChange({
|
||||
...value,
|
||||
secureJsonData: {
|
||||
...value.secureJsonData,
|
||||
[key]: event.currentTarget.value,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const resetSecureJsonField =
|
||||
(
|
||||
key: keyof ZabbixDSOptions,
|
||||
value: DataSourceSettings<ZabbixDSOptions, ZabbixSecureJSONData>,
|
||||
onChange: Props['onOptionsChange']
|
||||
) =>
|
||||
(event: React.SyntheticEvent<HTMLButtonElement>) => {
|
||||
onChange({
|
||||
...value,
|
||||
secureJsonFields: {
|
||||
...value.secureJsonFields,
|
||||
[key]: false,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const directDBDatasourceChanegeHandler =
|
||||
(
|
||||
options: DataSourceSettings<ZabbixDSOptions, ZabbixSecureJSONData>,
|
||||
onChange: Props['onOptionsChange'],
|
||||
setSelectedDS: React.Dispatch<any>,
|
||||
setSelectedDSType: React.Dispatch<any>
|
||||
) =>
|
||||
(value: SelectableValue<number>) => {
|
||||
const selectedDs = getDirectDBDatasources().find((dsOption) => dsOption.id === value.value);
|
||||
setSelectedDS({ label: selectedDs.name, value: selectedDs.id });
|
||||
setSelectedDSType(selectedDs.type);
|
||||
onChange({
|
||||
...options,
|
||||
jsonData: {
|
||||
...options.jsonData,
|
||||
dbConnectionDatasourceId: value.value,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const getDirectDBDatasources = () => {
|
||||
let dsList = (getDataSourceSrv() as any).getAll();
|
||||
dsList = dsList.filter((ds) => SUPPORTED_SQL_DS.includes(ds.type));
|
||||
return dsList;
|
||||
};
|
||||
|
||||
const getDirectDBDSOptions = () => {
|
||||
const dsList = getDirectDBDatasources();
|
||||
const dsOpts: Array<SelectableValue<number>> = dsList.map((ds) => ({
|
||||
label: ds.name,
|
||||
value: ds.id,
|
||||
description: ds.type,
|
||||
}));
|
||||
return dsOpts;
|
||||
};
|
||||
@@ -0,0 +1,73 @@
|
||||
import { css, cx } from '@emotion/css';
|
||||
import React, { useMemo, useState } from 'react';
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { Button, ClickOutsideWrapper, Icon, Input, Menu, useStyles2, useTheme2 } from '@grafana/ui';
|
||||
import { FuncDef } from '../../types';
|
||||
import { getCategories } from '../../metricFunctions';
|
||||
|
||||
// import { mapFuncDefsToSelectables } from './helpers';
|
||||
|
||||
type Props = {
|
||||
// funcDefs: MetricFunc;
|
||||
onFuncAdd: (def: FuncDef) => void;
|
||||
};
|
||||
|
||||
export function AddZabbixFunction({ onFuncAdd }: Props) {
|
||||
const [showMenu, setShowMenu] = useState(false);
|
||||
const styles = useStyles2(getStyles);
|
||||
const theme = useTheme2();
|
||||
|
||||
const onFuncAddInternal = (def: FuncDef) => {
|
||||
onFuncAdd(def);
|
||||
setShowMenu(false);
|
||||
};
|
||||
|
||||
const onSearch = (e: React.FormEvent<HTMLInputElement>) => {
|
||||
console.log(e.currentTarget.value);
|
||||
};
|
||||
|
||||
const onClickOutside = () => {
|
||||
setShowMenu(false);
|
||||
};
|
||||
|
||||
const menuItems = useMemo(() => buildMenuItems(onFuncAddInternal), [onFuncAdd]);
|
||||
|
||||
return (
|
||||
<div>
|
||||
{!showMenu && (
|
||||
<Button
|
||||
icon="plus"
|
||||
variant="secondary"
|
||||
className={cx(styles.button)}
|
||||
aria-label="Add new function"
|
||||
onClick={() => setShowMenu(!showMenu)}
|
||||
/>
|
||||
)}
|
||||
{showMenu && (
|
||||
<ClickOutsideWrapper onClick={onClickOutside} useCapture>
|
||||
<Input onChange={onSearch} suffix={<Icon name="search" />} />
|
||||
<Menu style={{ position: 'absolute', zIndex: theme.zIndex.dropdown }}>{menuItems}</Menu>
|
||||
</ClickOutsideWrapper>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function buildMenuItems(onClick: (func: FuncDef) => void) {
|
||||
const categories = getCategories();
|
||||
const menuItems: JSX.Element[] = [];
|
||||
for (const categoryName in categories) {
|
||||
const functions = categories[categoryName];
|
||||
const subItems = functions.map((f) => <Menu.Item label={f.name} key={f.name} onClick={() => onClick(f)} />);
|
||||
menuItems.push(<Menu.Item label={categoryName} key={categoryName} childItems={subItems} />);
|
||||
}
|
||||
return menuItems;
|
||||
}
|
||||
|
||||
function getStyles(theme: GrafanaTheme2) {
|
||||
return {
|
||||
button: css`
|
||||
margin-right: ${theme.spacing(0.5)};
|
||||
`,
|
||||
};
|
||||
}
|
||||
50
src/datasource/components/FunctionEditor/FunctionEditor.tsx
Normal file
50
src/datasource/components/FunctionEditor/FunctionEditor.tsx
Normal file
@@ -0,0 +1,50 @@
|
||||
import { css } from '@emotion/css';
|
||||
import React from 'react';
|
||||
import { FunctionEditorControlsProps, FunctionEditorControls } from './FunctionEditorControls';
|
||||
|
||||
import { useStyles2, Tooltip } from '@grafana/ui';
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { MetricFunc } from '../../types';
|
||||
|
||||
interface FunctionEditorProps extends FunctionEditorControlsProps {
|
||||
func: MetricFunc;
|
||||
}
|
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => {
|
||||
return {
|
||||
icon: css`
|
||||
margin-right: ${theme.spacing(0.5)};
|
||||
`,
|
||||
label: css({
|
||||
fontWeight: theme.typography.fontWeightMedium,
|
||||
fontSize: theme.typography.bodySmall.fontSize, // to match .gf-form-label
|
||||
cursor: 'pointer',
|
||||
display: 'inline-block',
|
||||
}),
|
||||
};
|
||||
};
|
||||
|
||||
export const FunctionEditor: React.FC<FunctionEditorProps> = ({ onMoveLeft, onMoveRight, func, ...props }) => {
|
||||
const styles = useStyles2(getStyles);
|
||||
|
||||
const renderContent = ({ updatePopperPosition }: any) => (
|
||||
<FunctionEditorControls
|
||||
{...props}
|
||||
func={func}
|
||||
onMoveLeft={() => {
|
||||
onMoveLeft(func);
|
||||
updatePopperPosition();
|
||||
}}
|
||||
onMoveRight={() => {
|
||||
onMoveRight(func);
|
||||
updatePopperPosition();
|
||||
}}
|
||||
/>
|
||||
);
|
||||
|
||||
return (
|
||||
<Tooltip content={renderContent} placement="top" interactive>
|
||||
<span className={styles.label}>{func.def.name}</span>
|
||||
</Tooltip>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,45 @@
|
||||
import React from 'react';
|
||||
import { Icon } from '@grafana/ui';
|
||||
import { MetricFunc } from '../../types';
|
||||
|
||||
const DOCS_FUNC_REF_URL = 'https://alexanderzobnin.github.io/grafana-zabbix/reference/functions/';
|
||||
|
||||
export interface FunctionEditorControlsProps {
|
||||
onMoveLeft: (func: MetricFunc) => void;
|
||||
onMoveRight: (func: MetricFunc) => void;
|
||||
onRemove: (func: MetricFunc) => void;
|
||||
}
|
||||
|
||||
const FunctionHelpButton = (props: { description?: string; name: string }) => {
|
||||
return (
|
||||
<Icon
|
||||
className="pointer"
|
||||
name="question-circle"
|
||||
onClick={() => {
|
||||
window.open(`${DOCS_FUNC_REF_URL}#${props.name}`, '_blank');
|
||||
}}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export const FunctionEditorControls = (
|
||||
props: FunctionEditorControlsProps & {
|
||||
func: MetricFunc;
|
||||
}
|
||||
) => {
|
||||
const { func, onMoveLeft, onMoveRight, onRemove } = props;
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
width: '60px',
|
||||
justifyContent: 'space-between',
|
||||
}}
|
||||
>
|
||||
<Icon className="pointer" name="arrow-left" onClick={() => onMoveLeft(func)} />
|
||||
<FunctionHelpButton name={func.def.name} description={func.def.description} />
|
||||
<Icon className="pointer" name="times" onClick={() => onRemove(func)} />
|
||||
<Icon className="pointer" name="arrow-right" onClick={() => onMoveRight(func)} />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,77 @@
|
||||
import { css } from '@emotion/css';
|
||||
import React from 'react';
|
||||
|
||||
import { GrafanaTheme2, SelectableValue } from '@grafana/data';
|
||||
import { Segment, SegmentInput, useStyles2 } from '@grafana/ui';
|
||||
|
||||
export type EditableParam = {
|
||||
name: string;
|
||||
value: string;
|
||||
optional: boolean;
|
||||
multiple: boolean;
|
||||
options: Array<SelectableValue<string>>;
|
||||
};
|
||||
|
||||
type FieldEditorProps = {
|
||||
editableParam: EditableParam;
|
||||
onChange: (value: string) => void;
|
||||
onExpandedChange: (expanded: boolean) => void;
|
||||
autofocus: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* Render a function parameter with a segment dropdown for multiple options or simple input.
|
||||
*/
|
||||
export function FunctionParamEditor({ editableParam, onChange, onExpandedChange, autofocus }: FieldEditorProps) {
|
||||
const styles = useStyles2(getStyles);
|
||||
|
||||
if (editableParam.options?.length > 0) {
|
||||
return (
|
||||
<Segment
|
||||
autofocus={autofocus}
|
||||
value={editableParam.value}
|
||||
inputPlaceholder={editableParam.name}
|
||||
className={styles.segment}
|
||||
options={editableParam.options}
|
||||
placeholder={' +' + editableParam.name}
|
||||
onChange={(value) => {
|
||||
onChange(value.value || '');
|
||||
}}
|
||||
onExpandedChange={onExpandedChange}
|
||||
inputMinWidth={150}
|
||||
allowCustomValue={true}
|
||||
allowEmptyValue={true}
|
||||
></Segment>
|
||||
);
|
||||
} else {
|
||||
return (
|
||||
<SegmentInput
|
||||
autofocus={autofocus}
|
||||
className={styles.input}
|
||||
value={editableParam.value || ''}
|
||||
placeholder={' +' + editableParam.name}
|
||||
inputPlaceholder={editableParam.name}
|
||||
onChange={(value) => {
|
||||
onChange(value.toString());
|
||||
}}
|
||||
onExpandedChange={onExpandedChange}
|
||||
// input style
|
||||
style={{ height: '25px', paddingTop: '2px', marginTop: '2px', paddingLeft: '4px', minWidth: '100px' }}
|
||||
></SegmentInput>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => ({
|
||||
segment: css({
|
||||
margin: 0,
|
||||
padding: 0,
|
||||
}),
|
||||
input: css`
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
input {
|
||||
height: 25px;
|
||||
},
|
||||
`,
|
||||
});
|
||||
@@ -0,0 +1,90 @@
|
||||
import { css, cx } from '@emotion/css';
|
||||
import React, { useState } from 'react';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { HorizontalGroup, InlineLabel, useStyles2 } from '@grafana/ui';
|
||||
|
||||
import { FunctionEditor } from './FunctionEditor';
|
||||
import { EditableParam, FunctionParamEditor } from './FunctionParamEditor';
|
||||
import { mapFuncInstanceToParams } from './helpers';
|
||||
import { MetricFunc } from '../../types';
|
||||
|
||||
export type FunctionEditorProps = {
|
||||
func: MetricFunc;
|
||||
onMoveLeft: (func: MetricFunc) => void;
|
||||
onMoveRight: (func: MetricFunc) => void;
|
||||
onRemove: (func: MetricFunc) => void;
|
||||
onParamChange: (func: MetricFunc, index: number, value: string) => void;
|
||||
};
|
||||
|
||||
/**
|
||||
* Allows editing function params and removing/moving a function (note: editing function name is not supported)
|
||||
*/
|
||||
export function ZabbixFunctionEditor({ func, onMoveLeft, onMoveRight, onRemove, onParamChange }: FunctionEditorProps) {
|
||||
const styles = useStyles2(getStyles);
|
||||
|
||||
// keep track of mouse over and isExpanded state to display buttons for adding optional/multiple params
|
||||
// only when the user mouse over over the function editor OR any param editor is expanded.
|
||||
const [mouseOver, setIsMouseOver] = useState(false);
|
||||
const [expanded, setIsExpanded] = useState(false);
|
||||
|
||||
let params = mapFuncInstanceToParams(func);
|
||||
params = params.filter((p: EditableParam, index: number) => {
|
||||
// func.added is set for newly added functions - see autofocus below
|
||||
return (index < func.def.params.length && !p.optional) || func.added || p.value || expanded || mouseOver;
|
||||
});
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cx(styles.container)}
|
||||
onMouseOver={() => setIsMouseOver(true)}
|
||||
onMouseLeave={() => setIsMouseOver(false)}
|
||||
>
|
||||
<HorizontalGroup spacing="none">
|
||||
<FunctionEditor func={func} onMoveLeft={onMoveLeft} onMoveRight={onMoveRight} onRemove={onRemove} />
|
||||
<InlineLabel className={styles.label}>(</InlineLabel>
|
||||
{params.map((editableParam: EditableParam, index: number) => {
|
||||
return (
|
||||
<React.Fragment key={index}>
|
||||
<FunctionParamEditor
|
||||
autofocus={index === 0 && func.added}
|
||||
editableParam={editableParam}
|
||||
onChange={(value) => {
|
||||
if (value !== '' || editableParam.optional) {
|
||||
// dispatch(actions.updateFunctionParam({ func, index, value }));
|
||||
onParamChange(func, index, value);
|
||||
}
|
||||
setIsExpanded(false);
|
||||
setIsMouseOver(false);
|
||||
}}
|
||||
onExpandedChange={setIsExpanded}
|
||||
/>
|
||||
{index !== params.length - 1 ? ',' : ''}
|
||||
</React.Fragment>
|
||||
);
|
||||
})}
|
||||
<InlineLabel className={styles.label}>)</InlineLabel>
|
||||
</HorizontalGroup>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => ({
|
||||
container: css({
|
||||
backgroundColor: theme.colors.background.secondary,
|
||||
borderRadius: theme.shape.borderRadius(),
|
||||
marginRight: theme.spacing(0.5),
|
||||
padding: `0 ${theme.spacing(1)}`,
|
||||
height: `${theme.v1.spacing.formInputHeight}px`,
|
||||
}),
|
||||
error: css`
|
||||
border: 1px solid ${theme.colors.error.main};
|
||||
`,
|
||||
label: css({
|
||||
padding: 0,
|
||||
margin: 0,
|
||||
}),
|
||||
button: css({
|
||||
padding: theme.spacing(0.5),
|
||||
}),
|
||||
});
|
||||
58
src/datasource/components/FunctionEditor/helpers.ts
Normal file
58
src/datasource/components/FunctionEditor/helpers.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
import { MetricFunc } from '../../types';
|
||||
|
||||
export type ParamDef = {
|
||||
name: string;
|
||||
type: string;
|
||||
options?: Array<string | number>;
|
||||
multiple?: boolean;
|
||||
optional?: boolean;
|
||||
version?: string;
|
||||
};
|
||||
|
||||
export type EditableParam = {
|
||||
name: string;
|
||||
value: string;
|
||||
optional: boolean;
|
||||
multiple: boolean;
|
||||
options: Array<SelectableValue<string>>;
|
||||
};
|
||||
|
||||
function createEditableParam(paramDef: ParamDef, additional: boolean, value?: string | number): EditableParam {
|
||||
return {
|
||||
name: paramDef.name,
|
||||
value: value?.toString() || '',
|
||||
optional: !!paramDef.optional || additional, // only first param is required when multiple are allowed
|
||||
multiple: !!paramDef.multiple,
|
||||
options:
|
||||
paramDef.options?.map((option: string | number) => ({
|
||||
value: option.toString(),
|
||||
label: option.toString(),
|
||||
})) ?? [],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a list of params that can be edited in the function editor.
|
||||
*/
|
||||
export function mapFuncInstanceToParams(func: MetricFunc): EditableParam[] {
|
||||
// list of required parameters (from func.def)
|
||||
const params: EditableParam[] = func.def.params.map((paramDef: ParamDef, index: number) =>
|
||||
createEditableParam(paramDef, false, func.params[index])
|
||||
);
|
||||
|
||||
// list of additional (multiple or optional) params entered by the user
|
||||
while (params.length < func.params.length) {
|
||||
const paramDef = func.def.params[func.def.params.length - 1];
|
||||
const value = func.params[params.length];
|
||||
params.push(createEditableParam(paramDef, true, value));
|
||||
}
|
||||
|
||||
// extra "fake" param to allow adding more multiple values at the end
|
||||
if (params.length && params[params.length - 1].value && params[params.length - 1]?.multiple) {
|
||||
const paramDef = func.def.params[func.def.params.length - 1];
|
||||
params.push(createEditableParam(paramDef, true, ''));
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
208
src/datasource/components/QueryEditor.tsx
Normal file
208
src/datasource/components/QueryEditor.tsx
Normal file
@@ -0,0 +1,208 @@
|
||||
import React, { useEffect } from 'react';
|
||||
import { QueryEditorProps, SelectableValue } from '@grafana/data';
|
||||
import { InlineField, InlineFieldRow, Select } from '@grafana/ui';
|
||||
import * as c from '../constants';
|
||||
import * as migrations from '../migrations';
|
||||
import { ZabbixDatasource } from '../datasource';
|
||||
import { ShowProblemTypes, ZabbixDSOptions, ZabbixMetricsQuery, ZabbixQueryOptions } from '../types';
|
||||
import { MetricsQueryEditor } from './QueryEditor/MetricsQueryEditor';
|
||||
import { QueryFunctionsEditor } from './QueryEditor/QueryFunctionsEditor';
|
||||
import { QueryOptionsEditor } from './QueryEditor/QueryOptionsEditor';
|
||||
import { TextMetricsQueryEditor } from './QueryEditor/TextMetricsQueryEditor';
|
||||
import { ProblemsQueryEditor } from './QueryEditor/ProblemsQueryEditor';
|
||||
import { ItemIdQueryEditor } from './QueryEditor/ItemIdQueryEditor';
|
||||
import { ITServicesQueryEditor } from './QueryEditor/ITServicesQueryEditor';
|
||||
import { TriggersQueryEditor } from './QueryEditor/TriggersQueryEditor';
|
||||
|
||||
const zabbixQueryTypeOptions: Array<SelectableValue<string>> = [
|
||||
{
|
||||
value: c.MODE_METRICS,
|
||||
label: 'Metrics',
|
||||
description: 'Query numeric metrics',
|
||||
},
|
||||
{
|
||||
value: c.MODE_TEXT,
|
||||
label: 'Text',
|
||||
description: 'Query text data',
|
||||
},
|
||||
{
|
||||
value: c.MODE_ITSERVICE,
|
||||
label: 'IT Services',
|
||||
description: 'Query IT Services data',
|
||||
},
|
||||
{
|
||||
value: c.MODE_ITEMID,
|
||||
label: 'Item Id',
|
||||
description: 'Query metrics by item ids',
|
||||
},
|
||||
{
|
||||
value: c.MODE_TRIGGERS,
|
||||
label: 'Triggers',
|
||||
description: 'Query triggers data',
|
||||
},
|
||||
{
|
||||
value: c.MODE_PROBLEMS,
|
||||
label: 'Problems',
|
||||
description: 'Query problems',
|
||||
},
|
||||
];
|
||||
|
||||
const getDefaultQuery: () => Partial<ZabbixMetricsQuery> = () => ({
|
||||
queryType: c.MODE_METRICS,
|
||||
group: { filter: '' },
|
||||
host: { filter: '' },
|
||||
application: { filter: '' },
|
||||
itemTag: { filter: '' },
|
||||
item: { filter: '' },
|
||||
functions: [],
|
||||
triggers: {
|
||||
count: true,
|
||||
minSeverity: 3,
|
||||
acknowledged: 2,
|
||||
},
|
||||
trigger: { filter: '' },
|
||||
tags: { filter: '' },
|
||||
proxy: { filter: '' },
|
||||
textFilter: '',
|
||||
options: {
|
||||
showDisabledItems: false,
|
||||
skipEmptyValues: false,
|
||||
disableDataAlignment: false,
|
||||
useZabbixValueMapping: false,
|
||||
},
|
||||
table: {
|
||||
skipEmptyValues: false,
|
||||
},
|
||||
});
|
||||
|
||||
function getSLAQueryDefaults() {
|
||||
return {
|
||||
itServiceFilter: '',
|
||||
slaProperty: 'sla',
|
||||
slaInterval: 'none',
|
||||
};
|
||||
}
|
||||
|
||||
function getProblemsQueryDefaults(): Partial<ZabbixMetricsQuery> {
|
||||
return {
|
||||
showProblems: ShowProblemTypes.Problems,
|
||||
options: {
|
||||
minSeverity: 0,
|
||||
sortProblems: 'default',
|
||||
acknowledged: 2,
|
||||
hostsInMaintenance: false,
|
||||
hostProxy: false,
|
||||
limit: c.DEFAULT_ZABBIX_PROBLEMS_LIMIT,
|
||||
useTimeRange: false,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export interface ZabbixQueryEditorProps
|
||||
extends QueryEditorProps<ZabbixDatasource, ZabbixMetricsQuery, ZabbixDSOptions> {}
|
||||
|
||||
export const QueryEditor = ({ query, datasource, onChange, onRunQuery }: ZabbixQueryEditorProps) => {
|
||||
query = { ...getDefaultQuery(), ...query };
|
||||
const { queryType } = query;
|
||||
if (queryType === c.MODE_PROBLEMS || queryType === c.MODE_TRIGGERS) {
|
||||
const defaults = getProblemsQueryDefaults();
|
||||
query = { ...defaults, ...query };
|
||||
query.options = { ...defaults.options, ...query.options };
|
||||
}
|
||||
if (queryType === c.MODE_ITSERVICE) {
|
||||
query = { ...getSLAQueryDefaults(), ...query };
|
||||
}
|
||||
|
||||
// Migrate query on load
|
||||
useEffect(() => {
|
||||
const migratedQuery = migrations.migrate(query);
|
||||
onChange(migratedQuery);
|
||||
}, []);
|
||||
|
||||
const onPropChange = (prop: string) => {
|
||||
return (option: SelectableValue) => {
|
||||
if (option.value !== null) {
|
||||
onChangeInternal({ ...query, [prop]: option.value });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const onChangeInternal = (query: ZabbixMetricsQuery) => {
|
||||
onChange(query);
|
||||
onRunQuery();
|
||||
};
|
||||
|
||||
const onOptionsChange = (options: ZabbixQueryOptions) => {
|
||||
onChangeInternal({ ...query, options });
|
||||
};
|
||||
|
||||
const renderMetricsEditor = () => {
|
||||
return (
|
||||
<>
|
||||
<MetricsQueryEditor query={query} datasource={datasource} onChange={onChangeInternal} />
|
||||
<QueryFunctionsEditor query={query} onChange={onChangeInternal} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const renderItemIdsEditor = () => {
|
||||
return (
|
||||
<>
|
||||
<ItemIdQueryEditor query={query} onChange={onChangeInternal} />
|
||||
<QueryFunctionsEditor query={query} onChange={onChangeInternal} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const renderTextMetricsEditor = () => {
|
||||
return (
|
||||
<>
|
||||
<TextMetricsQueryEditor query={query} datasource={datasource} onChange={onChangeInternal} />
|
||||
{/* <QueryFunctionsEditor query={query} onChange={onChangeInternal} /> */}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const renderITServicesEditor = () => {
|
||||
return (
|
||||
<>
|
||||
<ITServicesQueryEditor query={query} datasource={datasource} onChange={onChangeInternal} />
|
||||
<QueryFunctionsEditor query={query} onChange={onChangeInternal} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const renderProblemsEditor = () => {
|
||||
return <ProblemsQueryEditor query={query} datasource={datasource} onChange={onChangeInternal} />;
|
||||
};
|
||||
|
||||
const renderTriggersEditor = () => {
|
||||
return <TriggersQueryEditor query={query} datasource={datasource} onChange={onChangeInternal} />;
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<InlineFieldRow>
|
||||
<InlineField label="Query type" labelWidth={12}>
|
||||
<Select
|
||||
isSearchable={false}
|
||||
width={24}
|
||||
value={queryType}
|
||||
options={zabbixQueryTypeOptions}
|
||||
onChange={onPropChange('queryType')}
|
||||
/>
|
||||
</InlineField>
|
||||
<div className="gf-form gf-form--grow">
|
||||
<div className="gf-form-label gf-form-label--grow" />
|
||||
</div>
|
||||
</InlineFieldRow>
|
||||
{queryType === c.MODE_METRICS && renderMetricsEditor()}
|
||||
{queryType === c.MODE_ITEMID && renderItemIdsEditor()}
|
||||
{queryType === c.MODE_TEXT && renderTextMetricsEditor()}
|
||||
{queryType === c.MODE_ITSERVICE && renderITServicesEditor()}
|
||||
{queryType === c.MODE_PROBLEMS && renderProblemsEditor()}
|
||||
{queryType === c.MODE_TRIGGERS && renderTriggersEditor()}
|
||||
<QueryOptionsEditor queryType={queryType} queryOptions={query.options} onChange={onOptionsChange} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
102
src/datasource/components/QueryEditor/ITServicesQueryEditor.tsx
Normal file
102
src/datasource/components/QueryEditor/ITServicesQueryEditor.tsx
Normal file
@@ -0,0 +1,102 @@
|
||||
import _ from 'lodash';
|
||||
import React, { useEffect } from 'react';
|
||||
import { useAsyncFn } from 'react-use';
|
||||
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
import { InlineField, Select } from '@grafana/ui';
|
||||
import { QueryEditorRow } from './QueryEditorRow';
|
||||
import { MetricPicker } from '../../../components';
|
||||
import { getVariableOptions } from './utils';
|
||||
import { ZabbixDatasource } from '../../datasource';
|
||||
import { ZabbixMetricsQuery } from '../../types';
|
||||
|
||||
const slaPropertyList: Array<SelectableValue<string>> = [
|
||||
{ label: 'Status', value: 'status' },
|
||||
{ label: 'SLA', value: 'sla' },
|
||||
{ label: 'OK time', value: 'okTime' },
|
||||
{ label: 'Problem time', value: 'problemTime' },
|
||||
{ label: 'Down time', value: 'downtimeTime' },
|
||||
];
|
||||
|
||||
const slaIntervals: Array<SelectableValue<string>> = [
|
||||
{ label: 'No interval', value: 'none' },
|
||||
{ label: 'Auto', value: 'auto' },
|
||||
{ label: '1 hour', value: '1h' },
|
||||
{ label: '12 hours', value: '12h' },
|
||||
{ label: '24 hours', value: '1d' },
|
||||
{ label: '1 week', value: '1w' },
|
||||
{ label: '1 month', value: '1M' },
|
||||
];
|
||||
|
||||
export interface Props {
|
||||
query: ZabbixMetricsQuery;
|
||||
datasource: ZabbixDatasource;
|
||||
onChange: (query: ZabbixMetricsQuery) => void;
|
||||
}
|
||||
|
||||
export const ITServicesQueryEditor = ({ query, datasource, onChange }: Props) => {
|
||||
const loadITServiceOptions = async () => {
|
||||
const services = await datasource.zabbix.getITService();
|
||||
const options = services?.map((s) => ({
|
||||
value: s.name,
|
||||
label: s.name,
|
||||
}));
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: itServicesLoading, value: itServicesOptions }, fetchITServices] = useAsyncFn(async () => {
|
||||
const options = await loadITServiceOptions();
|
||||
return options;
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
fetchITServices();
|
||||
}, []);
|
||||
|
||||
const onPropChange = (prop: string) => {
|
||||
return (option: SelectableValue) => {
|
||||
if (option.value) {
|
||||
onChange({ ...query, [prop]: option.value });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const onITServiceChange = (value: string) => {
|
||||
if (value !== null) {
|
||||
onChange({ ...query, itServiceFilter: value });
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<QueryEditorRow>
|
||||
<InlineField label="IT Service" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.itServiceFilter}
|
||||
options={itServicesOptions}
|
||||
isLoading={itServicesLoading}
|
||||
onChange={onITServiceChange}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Property" labelWidth={12}>
|
||||
<Select
|
||||
isSearchable={false}
|
||||
width={24}
|
||||
value={query.slaProperty}
|
||||
options={slaPropertyList}
|
||||
onChange={onPropChange('slaProperty')}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Interval" labelWidth={12}>
|
||||
<Select
|
||||
isSearchable={false}
|
||||
width={24}
|
||||
value={query.slaInterval}
|
||||
options={slaIntervals}
|
||||
onChange={onPropChange('slaInterval')}
|
||||
/>
|
||||
</InlineField>
|
||||
</QueryEditorRow>
|
||||
);
|
||||
};
|
||||
26
src/datasource/components/QueryEditor/ItemIdQueryEditor.tsx
Normal file
26
src/datasource/components/QueryEditor/ItemIdQueryEditor.tsx
Normal file
@@ -0,0 +1,26 @@
|
||||
import React, { FormEvent } from 'react';
|
||||
import { InlineField, Input } from '@grafana/ui';
|
||||
import { ZabbixMetricsQuery } from '../../types';
|
||||
import { QueryEditorRow } from './QueryEditorRow';
|
||||
|
||||
export interface Props {
|
||||
query: ZabbixMetricsQuery;
|
||||
onChange: (query: ZabbixMetricsQuery) => void;
|
||||
}
|
||||
|
||||
export const ItemIdQueryEditor = ({ query, onChange }: Props) => {
|
||||
const onItemIdsChange = (v: FormEvent<HTMLInputElement>) => {
|
||||
const newValue = v?.currentTarget?.value;
|
||||
if (newValue !== null) {
|
||||
onChange({ ...query, itemids: newValue });
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<QueryEditorRow>
|
||||
<InlineField label="Item Ids" labelWidth={12}>
|
||||
<Input width={24} defaultValue={query.itemids} onBlur={onItemIdsChange} />
|
||||
</InlineField>
|
||||
</QueryEditorRow>
|
||||
);
|
||||
};
|
||||
174
src/datasource/components/QueryEditor/MetricsQueryEditor.tsx
Normal file
174
src/datasource/components/QueryEditor/MetricsQueryEditor.tsx
Normal file
@@ -0,0 +1,174 @@
|
||||
import _ from 'lodash';
|
||||
import React, { useEffect } from 'react';
|
||||
import { useAsyncFn } from 'react-use';
|
||||
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
import { InlineField } from '@grafana/ui';
|
||||
import { QueryEditorRow } from './QueryEditorRow';
|
||||
import { MetricPicker } from '../../../components';
|
||||
import { getVariableOptions } from './utils';
|
||||
import { ZabbixDatasource } from '../../datasource';
|
||||
import { ZabbixMetricsQuery } from '../../types';
|
||||
|
||||
export interface Props {
|
||||
query: ZabbixMetricsQuery;
|
||||
datasource: ZabbixDatasource;
|
||||
onChange: (query: ZabbixMetricsQuery) => void;
|
||||
}
|
||||
|
||||
export const MetricsQueryEditor = ({ query, datasource, onChange }: Props) => {
|
||||
const loadGroupOptions = async () => {
|
||||
const groups = await datasource.zabbix.getAllGroups();
|
||||
const options = groups?.map((group) => ({
|
||||
value: group.name,
|
||||
label: group.name,
|
||||
}));
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: groupsLoading, value: groupsOptions }, fetchGroups] = useAsyncFn(async () => {
|
||||
const options = await loadGroupOptions();
|
||||
return options;
|
||||
}, []);
|
||||
|
||||
const loadHostOptions = async (group: string) => {
|
||||
const groupFilter = datasource.replaceTemplateVars(group);
|
||||
const hosts = await datasource.zabbix.getAllHosts(groupFilter);
|
||||
let options: Array<SelectableValue<string>> = hosts?.map((host) => ({
|
||||
value: host.name,
|
||||
label: host.name,
|
||||
}));
|
||||
options = _.uniqBy(options, (o) => o.value);
|
||||
options.unshift({ value: '/.*/' });
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: hostsLoading, value: hostOptions }, fetchHosts] = useAsyncFn(async () => {
|
||||
const options = await loadHostOptions(query.group.filter);
|
||||
return options;
|
||||
}, [query.group.filter]);
|
||||
|
||||
const loadAppOptions = async (group: string, host: string) => {
|
||||
const groupFilter = datasource.replaceTemplateVars(group);
|
||||
const hostFilter = datasource.replaceTemplateVars(host);
|
||||
const apps = await datasource.zabbix.getAllApps(groupFilter, hostFilter);
|
||||
let options: Array<SelectableValue<string>> = apps?.map((app) => ({
|
||||
value: app.name,
|
||||
label: app.name,
|
||||
}));
|
||||
options = _.uniqBy(options, (o) => o.value);
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: appsLoading, value: appOptions }, fetchApps] = useAsyncFn(async () => {
|
||||
const options = await loadAppOptions(query.group.filter, query.host.filter);
|
||||
return options;
|
||||
}, [query.group.filter, query.host.filter]);
|
||||
|
||||
const loadItemOptions = async (group: string, host: string, app: string, itemTag: string) => {
|
||||
const groupFilter = datasource.replaceTemplateVars(group);
|
||||
const hostFilter = datasource.replaceTemplateVars(host);
|
||||
const appFilter = datasource.replaceTemplateVars(app);
|
||||
const tagFilter = datasource.replaceTemplateVars(itemTag);
|
||||
const options = {
|
||||
itemtype: 'num',
|
||||
showDisabledItems: query.options.showDisabledItems,
|
||||
};
|
||||
const items = await datasource.zabbix.getAllItems(groupFilter, hostFilter, appFilter, tagFilter, options);
|
||||
let itemOptions: Array<SelectableValue<string>> = items?.map((item) => ({
|
||||
value: item.name,
|
||||
label: item.name,
|
||||
}));
|
||||
itemOptions = _.uniqBy(itemOptions, (o) => o.value);
|
||||
itemOptions.unshift(...getVariableOptions());
|
||||
return itemOptions;
|
||||
};
|
||||
|
||||
const [{ loading: itemsLoading, value: itemOptions }, fetchItems] = useAsyncFn(async () => {
|
||||
const options = await loadItemOptions(
|
||||
query.group.filter,
|
||||
query.host.filter,
|
||||
query.application.filter,
|
||||
query.itemTag.filter
|
||||
);
|
||||
return options;
|
||||
}, [query.group.filter, query.host.filter, query.application.filter, query.itemTag.filter]);
|
||||
|
||||
// Update suggestions on every metric change
|
||||
const groupFilter = datasource.replaceTemplateVars(query.group?.filter);
|
||||
const hostFilter = datasource.replaceTemplateVars(query.host?.filter);
|
||||
const appFilter = datasource.replaceTemplateVars(query.application?.filter);
|
||||
const tagFilter = datasource.replaceTemplateVars(query.itemTag?.filter);
|
||||
|
||||
useEffect(() => {
|
||||
fetchGroups();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
fetchHosts();
|
||||
}, [groupFilter]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchApps();
|
||||
}, [groupFilter, hostFilter]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchItems();
|
||||
}, [groupFilter, hostFilter, appFilter, tagFilter]);
|
||||
|
||||
const onFilterChange = (prop: string) => {
|
||||
return (value: string) => {
|
||||
if (value !== null) {
|
||||
onChange({ ...query, [prop]: { filter: value } });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<QueryEditorRow>
|
||||
<InlineField label="Group" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.group.filter}
|
||||
options={groupsOptions}
|
||||
isLoading={groupsLoading}
|
||||
onChange={onFilterChange('group')}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Host" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.host.filter}
|
||||
options={hostOptions}
|
||||
isLoading={hostsLoading}
|
||||
onChange={onFilterChange('host')}
|
||||
/>
|
||||
</InlineField>
|
||||
</QueryEditorRow>
|
||||
<QueryEditorRow>
|
||||
<InlineField label="Application" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.application.filter}
|
||||
options={appOptions}
|
||||
isLoading={appsLoading}
|
||||
onChange={onFilterChange('application')}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Item" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.item.filter}
|
||||
options={itemOptions}
|
||||
isLoading={itemsLoading}
|
||||
onChange={onFilterChange('item')}
|
||||
/>
|
||||
</InlineField>
|
||||
</QueryEditorRow>
|
||||
</>
|
||||
);
|
||||
};
|
||||
232
src/datasource/components/QueryEditor/ProblemsQueryEditor.tsx
Normal file
232
src/datasource/components/QueryEditor/ProblemsQueryEditor.tsx
Normal file
@@ -0,0 +1,232 @@
|
||||
import _ from 'lodash';
|
||||
import React, { useEffect, FormEvent } from 'react';
|
||||
import { useAsyncFn } from 'react-use';
|
||||
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
import { InlineField, Input, Select } from '@grafana/ui';
|
||||
import { QueryEditorRow } from './QueryEditorRow';
|
||||
import { MetricPicker } from '../../../components';
|
||||
import { getVariableOptions } from './utils';
|
||||
import { ZabbixDatasource } from '../../datasource';
|
||||
import { ZabbixMetricsQuery } from '../../types';
|
||||
|
||||
const showProblemsOptions: Array<SelectableValue<string>> = [
|
||||
{ label: 'Problems', value: 'problems' },
|
||||
{ label: 'Recent problems', value: 'recent' },
|
||||
{ label: 'History', value: 'history' },
|
||||
];
|
||||
|
||||
const severityOptions: Array<SelectableValue<number>> = [
|
||||
{ value: 0, label: 'Not classified' },
|
||||
{ value: 1, label: 'Information' },
|
||||
{ value: 2, label: 'Warning' },
|
||||
{ value: 3, label: 'Average' },
|
||||
{ value: 4, label: 'High' },
|
||||
{ value: 5, label: 'Disaster' },
|
||||
];
|
||||
|
||||
export interface Props {
|
||||
query: ZabbixMetricsQuery;
|
||||
datasource: ZabbixDatasource;
|
||||
onChange: (query: ZabbixMetricsQuery) => void;
|
||||
}
|
||||
|
||||
export const ProblemsQueryEditor = ({ query, datasource, onChange }: Props) => {
|
||||
const loadGroupOptions = async () => {
|
||||
const groups = await datasource.zabbix.getAllGroups();
|
||||
const options = groups?.map((group) => ({
|
||||
value: group.name,
|
||||
label: group.name,
|
||||
}));
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: groupsLoading, value: groupsOptions }, fetchGroups] = useAsyncFn(async () => {
|
||||
const options = await loadGroupOptions();
|
||||
return options;
|
||||
}, []);
|
||||
|
||||
const loadHostOptions = async (group: string) => {
|
||||
const groupFilter = datasource.replaceTemplateVars(group);
|
||||
const hosts = await datasource.zabbix.getAllHosts(groupFilter);
|
||||
let options: Array<SelectableValue<string>> = hosts?.map((host) => ({
|
||||
value: host.name,
|
||||
label: host.name,
|
||||
}));
|
||||
options = _.uniqBy(options, (o) => o.value);
|
||||
options.unshift({ value: '/.*/' });
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: hostsLoading, value: hostOptions }, fetchHosts] = useAsyncFn(async () => {
|
||||
const options = await loadHostOptions(query.group.filter);
|
||||
return options;
|
||||
}, [query.group.filter]);
|
||||
|
||||
const loadAppOptions = async (group: string, host: string) => {
|
||||
const groupFilter = datasource.replaceTemplateVars(group);
|
||||
const hostFilter = datasource.replaceTemplateVars(host);
|
||||
const apps = await datasource.zabbix.getAllApps(groupFilter, hostFilter);
|
||||
let options: Array<SelectableValue<string>> = apps?.map((app) => ({
|
||||
value: app.name,
|
||||
label: app.name,
|
||||
}));
|
||||
options = _.uniqBy(options, (o) => o.value);
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: appsLoading, value: appOptions }, fetchApps] = useAsyncFn(async () => {
|
||||
const options = await loadAppOptions(query.group.filter, query.host.filter);
|
||||
return options;
|
||||
}, [query.group.filter, query.host.filter]);
|
||||
|
||||
const loadProxyOptions = async () => {
|
||||
const proxies = await datasource.zabbix.getProxies();
|
||||
const options = proxies?.map((proxy) => ({
|
||||
value: proxy.host,
|
||||
label: proxy.host,
|
||||
}));
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: proxiesLoading, value: proxiesOptions }, fetchProxies] = useAsyncFn(async () => {
|
||||
const options = await loadProxyOptions();
|
||||
return options;
|
||||
}, []);
|
||||
|
||||
// Update suggestions on every metric change
|
||||
const groupFilter = datasource.replaceTemplateVars(query.group?.filter);
|
||||
const hostFilter = datasource.replaceTemplateVars(query.host?.filter);
|
||||
|
||||
useEffect(() => {
|
||||
fetchGroups();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
fetchHosts();
|
||||
}, [groupFilter]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchApps();
|
||||
}, [groupFilter, hostFilter]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchProxies();
|
||||
}, []);
|
||||
|
||||
const onTextFilterChange = (prop: string) => {
|
||||
return (v: FormEvent<HTMLInputElement>) => {
|
||||
const newValue = v?.currentTarget?.value;
|
||||
if (newValue !== null) {
|
||||
onChange({ ...query, [prop]: { filter: newValue } });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const onFilterChange = (prop: string) => {
|
||||
return (value: string) => {
|
||||
if (value !== null) {
|
||||
onChange({ ...query, [prop]: { filter: value } });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const onPropChange = (prop: string) => {
|
||||
return (option: SelectableValue) => {
|
||||
if (option.value !== null) {
|
||||
onChange({ ...query, [prop]: option.value });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const onMinSeverityChange = (option: SelectableValue) => {
|
||||
if (option.value !== null) {
|
||||
onChange({ ...query, options: { ...query.options, minSeverity: option.value } });
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<QueryEditorRow>
|
||||
<InlineField label="Group" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.group?.filter}
|
||||
options={groupsOptions}
|
||||
isLoading={groupsLoading}
|
||||
onChange={onFilterChange('group')}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Host" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.host?.filter}
|
||||
options={hostOptions}
|
||||
isLoading={hostsLoading}
|
||||
onChange={onFilterChange('host')}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Proxy" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.proxy?.filter}
|
||||
options={proxiesOptions}
|
||||
isLoading={proxiesLoading}
|
||||
onChange={onFilterChange('proxy')}
|
||||
/>
|
||||
</InlineField>
|
||||
</QueryEditorRow>
|
||||
<QueryEditorRow>
|
||||
<InlineField label="Application" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.application?.filter}
|
||||
options={appOptions}
|
||||
isLoading={appsLoading}
|
||||
onChange={onFilterChange('application')}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Problem" labelWidth={12}>
|
||||
<Input
|
||||
width={24}
|
||||
defaultValue={query.trigger?.filter}
|
||||
placeholder="Problem name"
|
||||
onBlur={onTextFilterChange('trigger')}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Tags" labelWidth={12}>
|
||||
<Input
|
||||
width={24}
|
||||
defaultValue={query.tags?.filter}
|
||||
placeholder="tag1:value1, tag2:value2"
|
||||
onBlur={onTextFilterChange('tags')}
|
||||
/>
|
||||
</InlineField>
|
||||
</QueryEditorRow>
|
||||
<QueryEditorRow>
|
||||
<InlineField label="Show" labelWidth={12}>
|
||||
<Select
|
||||
isSearchable={false}
|
||||
width={24}
|
||||
value={query.showProblems}
|
||||
options={showProblemsOptions}
|
||||
onChange={onPropChange('showProblems')}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Min severity" labelWidth={12}>
|
||||
<Select
|
||||
isSearchable={false}
|
||||
width={24}
|
||||
value={query.options?.minSeverity}
|
||||
options={severityOptions}
|
||||
onChange={onMinSeverityChange}
|
||||
/>
|
||||
</InlineField>
|
||||
</QueryEditorRow>
|
||||
</>
|
||||
);
|
||||
};
|
||||
13
src/datasource/components/QueryEditor/QueryEditorRow.tsx
Normal file
13
src/datasource/components/QueryEditor/QueryEditorRow.tsx
Normal file
@@ -0,0 +1,13 @@
|
||||
import React from 'react';
|
||||
import { InlineFieldRow } from '@grafana/ui';
|
||||
|
||||
export const QueryEditorRow = ({ children }: React.PropsWithChildren<{}>) => {
|
||||
return (
|
||||
<InlineFieldRow>
|
||||
{children}
|
||||
<div className="gf-form gf-form--grow">
|
||||
<div className="gf-form-label gf-form-label--grow" />
|
||||
</div>
|
||||
</InlineFieldRow>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,82 @@
|
||||
import React from 'react';
|
||||
import { swap } from '../../utils';
|
||||
import { createFuncInstance } from '../../metricFunctions';
|
||||
import { FuncDef, MetricFunc, ZabbixMetricsQuery } from '../../types';
|
||||
import { QueryEditorRow } from './QueryEditorRow';
|
||||
import { InlineFormLabel } from '@grafana/ui';
|
||||
import { ZabbixFunctionEditor } from '../FunctionEditor/ZabbixFunctionEditor';
|
||||
import { AddZabbixFunction } from '../FunctionEditor/AddZabbixFunction';
|
||||
|
||||
export interface Props {
|
||||
query: ZabbixMetricsQuery;
|
||||
onChange: (query: ZabbixMetricsQuery) => void;
|
||||
}
|
||||
|
||||
export const QueryFunctionsEditor = ({ query, onChange }: Props) => {
|
||||
const onFuncParamChange = (func: MetricFunc, index: number, value: string) => {
|
||||
func.params[index] = value;
|
||||
const funcIndex = query.functions.findIndex((f) => f === func);
|
||||
const functions = query.functions;
|
||||
functions[funcIndex] = func;
|
||||
onChange({ ...query, functions });
|
||||
};
|
||||
|
||||
const onMoveFuncLeft = (func: MetricFunc) => {
|
||||
const index = query.functions.indexOf(func);
|
||||
const functions = swap(query.functions, index, index - 1);
|
||||
onChange({ ...query, functions });
|
||||
};
|
||||
|
||||
const onMoveFuncRight = (func: MetricFunc) => {
|
||||
const index = query.functions.indexOf(func);
|
||||
const functions = swap(query.functions, index, index + 1);
|
||||
onChange({ ...query, functions });
|
||||
};
|
||||
|
||||
const onRemoveFunc = (func: MetricFunc) => {
|
||||
const functions = query.functions?.filter((f) => f !== func);
|
||||
onChange({ ...query, functions });
|
||||
};
|
||||
|
||||
const onFuncAdd = (def: FuncDef) => {
|
||||
const newFunc = createFuncInstance(def);
|
||||
newFunc.added = true;
|
||||
let functions = query.functions.concat(newFunc);
|
||||
functions = moveAliasFuncLast(functions);
|
||||
|
||||
// if ((newFunc.params.length && newFunc.added) || newFunc.def.params.length === 0) {
|
||||
// }
|
||||
onChange({ ...query, functions });
|
||||
};
|
||||
|
||||
return (
|
||||
<QueryEditorRow>
|
||||
<InlineFormLabel width={6}>Functions</InlineFormLabel>
|
||||
{query.functions?.map((f, i) => {
|
||||
return (
|
||||
<ZabbixFunctionEditor
|
||||
func={f}
|
||||
key={i}
|
||||
onParamChange={onFuncParamChange}
|
||||
onMoveLeft={onMoveFuncLeft}
|
||||
onMoveRight={onMoveFuncRight}
|
||||
onRemove={onRemoveFunc}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
<AddZabbixFunction onFuncAdd={onFuncAdd} />
|
||||
</QueryEditorRow>
|
||||
);
|
||||
};
|
||||
|
||||
function moveAliasFuncLast(functions: MetricFunc[]) {
|
||||
const aliasFuncIndex = functions.findIndex((func) => func.def.category === 'Alias');
|
||||
|
||||
console.log(aliasFuncIndex);
|
||||
if (aliasFuncIndex >= 0) {
|
||||
const aliasFunc = functions[aliasFuncIndex];
|
||||
functions.splice(aliasFuncIndex, 1);
|
||||
functions.push(aliasFunc);
|
||||
}
|
||||
return functions;
|
||||
}
|
||||
232
src/datasource/components/QueryEditor/QueryOptionsEditor.tsx
Normal file
232
src/datasource/components/QueryEditor/QueryOptionsEditor.tsx
Normal file
@@ -0,0 +1,232 @@
|
||||
import { css } from '@emotion/css';
|
||||
import React, { useState, FormEvent } from 'react';
|
||||
import { GrafanaTheme2, SelectableValue } from '@grafana/data';
|
||||
import {
|
||||
HorizontalGroup,
|
||||
Icon,
|
||||
InlineField,
|
||||
InlineFieldRow,
|
||||
InlineSwitch,
|
||||
Input,
|
||||
Select,
|
||||
useStyles2,
|
||||
} from '@grafana/ui';
|
||||
import * as c from '../../constants';
|
||||
import { ZabbixQueryOptions } from '../../types';
|
||||
|
||||
const ackOptions: Array<SelectableValue<number>> = [
|
||||
{ label: 'all triggers', value: 2 },
|
||||
{ label: 'unacknowledged', value: 0 },
|
||||
{ label: 'acknowledged', value: 1 },
|
||||
];
|
||||
|
||||
const sortOptions: Array<SelectableValue<string>> = [
|
||||
{ label: 'Default', value: 'default' },
|
||||
{ label: 'Last change', value: 'lastchange' },
|
||||
{ label: 'Severity', value: 'severity' },
|
||||
];
|
||||
|
||||
interface Props {
|
||||
queryType: string;
|
||||
queryOptions: ZabbixQueryOptions;
|
||||
onChange: (options: ZabbixQueryOptions) => void;
|
||||
}
|
||||
|
||||
export const QueryOptionsEditor = ({ queryType, queryOptions, onChange }: Props) => {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const styles = useStyles2(getStyles);
|
||||
|
||||
const onLimitChange = (v: FormEvent<HTMLInputElement>) => {
|
||||
const newValue = Number(v?.currentTarget?.value);
|
||||
if (newValue !== null) {
|
||||
onChange({ ...queryOptions, limit: newValue });
|
||||
}
|
||||
};
|
||||
|
||||
const onPropChange = (prop: string) => {
|
||||
return (option: SelectableValue) => {
|
||||
if (option.value !== null) {
|
||||
onChange({ ...queryOptions, [prop]: option.value });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const renderClosed = () => {
|
||||
return (
|
||||
<>
|
||||
<HorizontalGroup>
|
||||
{!isOpen && <Icon name="angle-right" />}
|
||||
{isOpen && <Icon name="angle-down" />}
|
||||
<span className={styles.label}>Options</span>
|
||||
<div className={styles.options}>{renderOptions()}</div>
|
||||
</HorizontalGroup>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const renderOptions = () => {
|
||||
const elements: JSX.Element[] = [];
|
||||
for (const key in queryOptions) {
|
||||
if (queryOptions.hasOwnProperty(key)) {
|
||||
const value = queryOptions[key];
|
||||
if (value === true && value !== '' && value !== null && value !== undefined) {
|
||||
elements.push(<span className={styles.optionContainer} key={key}>{`${key} = ${value}`}</span>);
|
||||
}
|
||||
}
|
||||
}
|
||||
return elements;
|
||||
};
|
||||
|
||||
const renderEditor = () => {
|
||||
return (
|
||||
<div className={styles.editorContainer}>
|
||||
{queryType === c.MODE_METRICS && renderMetricOptions()}
|
||||
{queryType === c.MODE_ITEMID && renderMetricOptions()}
|
||||
{queryType === c.MODE_ITSERVICE && renderMetricOptions()}
|
||||
{queryType === c.MODE_TEXT && renderTextMetricsOptions()}
|
||||
{queryType === c.MODE_PROBLEMS && renderProblemsOptions()}
|
||||
{queryType === c.MODE_TRIGGERS && renderTriggersOptions()}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const renderMetricOptions = () => {
|
||||
return (
|
||||
<>
|
||||
<InlineField label="Show disabled items" labelWidth={24}>
|
||||
<InlineSwitch
|
||||
value={queryOptions.showDisabledItems}
|
||||
onChange={() => onChange({ ...queryOptions, showDisabledItems: !queryOptions.showDisabledItems })}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Use Zabbix value mapping" labelWidth={24}>
|
||||
<InlineSwitch
|
||||
value={queryOptions.useZabbixValueMapping}
|
||||
onChange={() => onChange({ ...queryOptions, useZabbixValueMapping: !queryOptions.useZabbixValueMapping })}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Disable data alignment" labelWidth={24}>
|
||||
<InlineSwitch
|
||||
value={queryOptions.disableDataAlignment}
|
||||
onChange={() => onChange({ ...queryOptions, disableDataAlignment: !queryOptions.disableDataAlignment })}
|
||||
/>
|
||||
</InlineField>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const renderTextMetricsOptions = () => {
|
||||
return (
|
||||
<>
|
||||
<InlineField label="Show disabled items" labelWidth={24}>
|
||||
<InlineSwitch
|
||||
value={queryOptions.showDisabledItems}
|
||||
onChange={() => onChange({ ...queryOptions, showDisabledItems: !queryOptions.showDisabledItems })}
|
||||
/>
|
||||
</InlineField>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const renderProblemsOptions = () => {
|
||||
return (
|
||||
<>
|
||||
<InlineField label="Acknowledged" labelWidth={24}>
|
||||
<Select
|
||||
isSearchable={false}
|
||||
width={24}
|
||||
value={queryOptions.acknowledged}
|
||||
options={ackOptions}
|
||||
onChange={onPropChange('acknowledged')}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Sort by" labelWidth={24}>
|
||||
<Select
|
||||
isSearchable={false}
|
||||
width={24}
|
||||
value={queryOptions.sortProblems}
|
||||
options={sortOptions}
|
||||
onChange={onPropChange('sortProblems')}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Use time range" labelWidth={24}>
|
||||
<InlineSwitch
|
||||
value={queryOptions.useTimeRange}
|
||||
onChange={() => onChange({ ...queryOptions, useTimeRange: !queryOptions.useTimeRange })}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Hosts in maintenance" labelWidth={24}>
|
||||
<InlineSwitch
|
||||
value={queryOptions.hostsInMaintenance}
|
||||
onChange={() => onChange({ ...queryOptions, hostsInMaintenance: !queryOptions.hostsInMaintenance })}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Host proxy" labelWidth={24}>
|
||||
<InlineSwitch
|
||||
value={queryOptions.hostProxy}
|
||||
onChange={() => onChange({ ...queryOptions, hostProxy: !queryOptions.hostProxy })}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Limit" labelWidth={24}>
|
||||
<Input width={12} type="number" defaultValue={queryOptions.limit} onBlur={onLimitChange} />
|
||||
</InlineField>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const renderTriggersOptions = () => {
|
||||
return (
|
||||
<>
|
||||
<InlineField label="Acknowledged" labelWidth={24}>
|
||||
<Select
|
||||
isSearchable={false}
|
||||
width={24}
|
||||
value={queryOptions.acknowledged}
|
||||
options={ackOptions}
|
||||
onChange={onPropChange('acknowledged')}
|
||||
/>
|
||||
</InlineField>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<InlineFieldRow>
|
||||
<div className={styles.container} onClick={() => setIsOpen(!isOpen)}>
|
||||
{renderClosed()}
|
||||
</div>
|
||||
</InlineFieldRow>
|
||||
<InlineFieldRow>{isOpen && renderEditor()}</InlineFieldRow>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => ({
|
||||
container: css({
|
||||
backgroundColor: theme.colors.background.secondary,
|
||||
borderRadius: theme.shape.borderRadius(),
|
||||
marginRight: theme.spacing(0.5),
|
||||
marginBottom: theme.spacing(0.5),
|
||||
padding: `0 ${theme.spacing(1)}`,
|
||||
height: `${theme.v1.spacing.formInputHeight}px`,
|
||||
width: `100%`,
|
||||
}),
|
||||
label: css({
|
||||
color: theme.colors.info.text,
|
||||
fontWeight: theme.typography.fontWeightMedium,
|
||||
cursor: 'pointer',
|
||||
}),
|
||||
options: css({
|
||||
color: theme.colors.text.disabled,
|
||||
fontSize: theme.typography.bodySmall.fontSize,
|
||||
}),
|
||||
optionContainer: css`
|
||||
margin-right: ${theme.spacing(2)};
|
||||
`,
|
||||
editorContainer: css`
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
margin-left: ${theme.spacing(4)};
|
||||
`,
|
||||
});
|
||||
192
src/datasource/components/QueryEditor/TextMetricsQueryEditor.tsx
Normal file
192
src/datasource/components/QueryEditor/TextMetricsQueryEditor.tsx
Normal file
@@ -0,0 +1,192 @@
|
||||
import _ from 'lodash';
|
||||
import React, { useEffect, FormEvent } from 'react';
|
||||
import { useAsyncFn } from 'react-use';
|
||||
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
import { InlineField, InlineSwitch, Input } from '@grafana/ui';
|
||||
import { QueryEditorRow } from './QueryEditorRow';
|
||||
import { MetricPicker } from '../../../components';
|
||||
import { getVariableOptions } from './utils';
|
||||
import { ZabbixDatasource } from '../../datasource';
|
||||
import { ZabbixMetricsQuery } from '../../types';
|
||||
|
||||
export interface Props {
|
||||
query: ZabbixMetricsQuery;
|
||||
datasource: ZabbixDatasource;
|
||||
onChange: (query: ZabbixMetricsQuery) => void;
|
||||
}
|
||||
|
||||
export const TextMetricsQueryEditor = ({ query, datasource, onChange }: Props) => {
|
||||
const loadGroupOptions = async () => {
|
||||
const groups = await datasource.zabbix.getAllGroups();
|
||||
const options = groups?.map((group) => ({
|
||||
value: group.name,
|
||||
label: group.name,
|
||||
}));
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: groupsLoading, value: groupsOptions }, fetchGroups] = useAsyncFn(async () => {
|
||||
const options = await loadGroupOptions();
|
||||
return options;
|
||||
}, []);
|
||||
|
||||
const loadHostOptions = async (group: string) => {
|
||||
const groupFilter = datasource.replaceTemplateVars(group);
|
||||
const hosts = await datasource.zabbix.getAllHosts(groupFilter);
|
||||
let options: Array<SelectableValue<string>> = hosts?.map((host) => ({
|
||||
value: host.name,
|
||||
label: host.name,
|
||||
}));
|
||||
options = _.uniqBy(options, (o) => o.value);
|
||||
options.unshift({ value: '/.*/' });
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: hostsLoading, value: hostOptions }, fetchHosts] = useAsyncFn(async () => {
|
||||
const options = await loadHostOptions(query.group.filter);
|
||||
return options;
|
||||
}, [query.group.filter]);
|
||||
|
||||
const loadAppOptions = async (group: string, host: string) => {
|
||||
const groupFilter = datasource.replaceTemplateVars(group);
|
||||
const hostFilter = datasource.replaceTemplateVars(host);
|
||||
const apps = await datasource.zabbix.getAllApps(groupFilter, hostFilter);
|
||||
let options: Array<SelectableValue<string>> = apps?.map((app) => ({
|
||||
value: app.name,
|
||||
label: app.name,
|
||||
}));
|
||||
options = _.uniqBy(options, (o) => o.value);
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: appsLoading, value: appOptions }, fetchApps] = useAsyncFn(async () => {
|
||||
const options = await loadAppOptions(query.group.filter, query.host.filter);
|
||||
return options;
|
||||
}, [query.group.filter, query.host.filter]);
|
||||
|
||||
const loadItemOptions = async (group: string, host: string, app: string, itemTag: string) => {
|
||||
const groupFilter = datasource.replaceTemplateVars(group);
|
||||
const hostFilter = datasource.replaceTemplateVars(host);
|
||||
const appFilter = datasource.replaceTemplateVars(app);
|
||||
const tagFilter = datasource.replaceTemplateVars(itemTag);
|
||||
const options = {
|
||||
itemtype: 'text',
|
||||
showDisabledItems: query.options.showDisabledItems,
|
||||
};
|
||||
const items = await datasource.zabbix.getAllItems(groupFilter, hostFilter, appFilter, tagFilter, options);
|
||||
let itemOptions: Array<SelectableValue<string>> = items?.map((item) => ({
|
||||
value: item.name,
|
||||
label: item.name,
|
||||
}));
|
||||
itemOptions = _.uniqBy(itemOptions, (o) => o.value);
|
||||
itemOptions.unshift(...getVariableOptions());
|
||||
return itemOptions;
|
||||
};
|
||||
|
||||
const [{ loading: itemsLoading, value: itemOptions }, fetchItems] = useAsyncFn(async () => {
|
||||
const options = await loadItemOptions(
|
||||
query.group.filter,
|
||||
query.host.filter,
|
||||
query.application.filter,
|
||||
query.itemTag.filter
|
||||
);
|
||||
return options;
|
||||
}, [query.group.filter, query.host.filter, query.application.filter, query.itemTag.filter]);
|
||||
|
||||
// Update suggestions on every metric change
|
||||
const groupFilter = datasource.replaceTemplateVars(query.group?.filter);
|
||||
const hostFilter = datasource.replaceTemplateVars(query.host?.filter);
|
||||
const appFilter = datasource.replaceTemplateVars(query.application?.filter);
|
||||
const tagFilter = datasource.replaceTemplateVars(query.itemTag?.filter);
|
||||
|
||||
useEffect(() => {
|
||||
fetchGroups();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
fetchHosts();
|
||||
}, [groupFilter]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchApps();
|
||||
}, [groupFilter, hostFilter]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchItems();
|
||||
}, [groupFilter, hostFilter, appFilter, tagFilter]);
|
||||
|
||||
const onTextFilterChange = (v: FormEvent<HTMLInputElement>) => {
|
||||
const newValue = v?.currentTarget?.value;
|
||||
if (newValue !== null) {
|
||||
onChange({ ...query, textFilter: newValue });
|
||||
}
|
||||
};
|
||||
|
||||
const onFilterChange = (prop: string) => {
|
||||
return (value: string) => {
|
||||
if (value !== null) {
|
||||
onChange({ ...query, [prop]: { filter: value } });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<QueryEditorRow>
|
||||
<InlineField label="Group" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.group.filter}
|
||||
options={groupsOptions}
|
||||
isLoading={groupsLoading}
|
||||
onChange={onFilterChange('group')}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Host" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.host.filter}
|
||||
options={hostOptions}
|
||||
isLoading={hostsLoading}
|
||||
onChange={onFilterChange('host')}
|
||||
/>
|
||||
</InlineField>
|
||||
</QueryEditorRow>
|
||||
<QueryEditorRow>
|
||||
<InlineField label="Application" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.application.filter}
|
||||
options={appOptions}
|
||||
isLoading={appsLoading}
|
||||
onChange={onFilterChange('application')}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Item" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.item.filter}
|
||||
options={itemOptions}
|
||||
isLoading={itemsLoading}
|
||||
onChange={onFilterChange('item')}
|
||||
/>
|
||||
</InlineField>
|
||||
</QueryEditorRow>
|
||||
<QueryEditorRow>
|
||||
<InlineField label="Text filter" labelWidth={12}>
|
||||
<Input width={24} defaultValue={query.textFilter} onBlur={onTextFilterChange} />
|
||||
</InlineField>
|
||||
<InlineField label="Use capture groups" labelWidth={16}>
|
||||
<InlineSwitch
|
||||
value={query.useCaptureGroups}
|
||||
onChange={() => onChange({ ...query, useCaptureGroups: !query.useCaptureGroups })}
|
||||
/>
|
||||
</InlineField>
|
||||
</QueryEditorRow>
|
||||
</>
|
||||
);
|
||||
};
|
||||
160
src/datasource/components/QueryEditor/TriggersQueryEditor.tsx
Normal file
160
src/datasource/components/QueryEditor/TriggersQueryEditor.tsx
Normal file
@@ -0,0 +1,160 @@
|
||||
import _ from 'lodash';
|
||||
import React, { useEffect } from 'react';
|
||||
import { useAsyncFn } from 'react-use';
|
||||
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
import { InlineField, InlineSwitch, Select } from '@grafana/ui';
|
||||
import { QueryEditorRow } from './QueryEditorRow';
|
||||
import { MetricPicker } from '../../../components';
|
||||
import { getVariableOptions } from './utils';
|
||||
import { ZabbixDatasource } from '../../datasource';
|
||||
import { ZabbixMetricsQuery } from '../../types';
|
||||
|
||||
const severityOptions: Array<SelectableValue<number>> = [
|
||||
{ value: 0, label: 'Not classified' },
|
||||
{ value: 1, label: 'Information' },
|
||||
{ value: 2, label: 'Warning' },
|
||||
{ value: 3, label: 'Average' },
|
||||
{ value: 4, label: 'High' },
|
||||
{ value: 5, label: 'Disaster' },
|
||||
];
|
||||
|
||||
export interface Props {
|
||||
query: ZabbixMetricsQuery;
|
||||
datasource: ZabbixDatasource;
|
||||
onChange: (query: ZabbixMetricsQuery) => void;
|
||||
}
|
||||
|
||||
export const TriggersQueryEditor = ({ query, datasource, onChange }: Props) => {
|
||||
const loadGroupOptions = async () => {
|
||||
const groups = await datasource.zabbix.getAllGroups();
|
||||
const options = groups?.map((group) => ({
|
||||
value: group.name,
|
||||
label: group.name,
|
||||
}));
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: groupsLoading, value: groupsOptions }, fetchGroups] = useAsyncFn(async () => {
|
||||
const options = await loadGroupOptions();
|
||||
return options;
|
||||
}, []);
|
||||
|
||||
const loadHostOptions = async (group: string) => {
|
||||
const groupFilter = datasource.replaceTemplateVars(group);
|
||||
const hosts = await datasource.zabbix.getAllHosts(groupFilter);
|
||||
let options: Array<SelectableValue<string>> = hosts?.map((host) => ({
|
||||
value: host.name,
|
||||
label: host.name,
|
||||
}));
|
||||
options = _.uniqBy(options, (o) => o.value);
|
||||
options.unshift({ value: '/.*/' });
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: hostsLoading, value: hostOptions }, fetchHosts] = useAsyncFn(async () => {
|
||||
const options = await loadHostOptions(query.group.filter);
|
||||
return options;
|
||||
}, [query.group.filter]);
|
||||
|
||||
const loadAppOptions = async (group: string, host: string) => {
|
||||
const groupFilter = datasource.replaceTemplateVars(group);
|
||||
const hostFilter = datasource.replaceTemplateVars(host);
|
||||
const apps = await datasource.zabbix.getAllApps(groupFilter, hostFilter);
|
||||
let options: Array<SelectableValue<string>> = apps?.map((app) => ({
|
||||
value: app.name,
|
||||
label: app.name,
|
||||
}));
|
||||
options = _.uniqBy(options, (o) => o.value);
|
||||
options.unshift(...getVariableOptions());
|
||||
return options;
|
||||
};
|
||||
|
||||
const [{ loading: appsLoading, value: appOptions }, fetchApps] = useAsyncFn(async () => {
|
||||
const options = await loadAppOptions(query.group.filter, query.host.filter);
|
||||
return options;
|
||||
}, [query.group.filter, query.host.filter]);
|
||||
|
||||
// Update suggestions on every metric change
|
||||
const groupFilter = datasource.replaceTemplateVars(query.group?.filter);
|
||||
const hostFilter = datasource.replaceTemplateVars(query.host?.filter);
|
||||
|
||||
useEffect(() => {
|
||||
fetchGroups();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
fetchHosts();
|
||||
}, [groupFilter]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchApps();
|
||||
}, [groupFilter, hostFilter]);
|
||||
|
||||
const onFilterChange = (prop: string) => {
|
||||
return (value: string) => {
|
||||
if (value !== null) {
|
||||
onChange({ ...query, [prop]: { filter: value } });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const onMinSeverityChange = (option: SelectableValue) => {
|
||||
if (option.value !== null) {
|
||||
onChange({ ...query, options: { ...query.options, minSeverity: option.value } });
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<QueryEditorRow>
|
||||
<InlineField label="Group" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.group?.filter}
|
||||
options={groupsOptions}
|
||||
isLoading={groupsLoading}
|
||||
onChange={onFilterChange('group')}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Host" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.host?.filter}
|
||||
options={hostOptions}
|
||||
isLoading={hostsLoading}
|
||||
onChange={onFilterChange('host')}
|
||||
/>
|
||||
</InlineField>
|
||||
</QueryEditorRow>
|
||||
<QueryEditorRow>
|
||||
<InlineField label="Application" labelWidth={12}>
|
||||
<MetricPicker
|
||||
width={24}
|
||||
value={query.application?.filter}
|
||||
options={appOptions}
|
||||
isLoading={appsLoading}
|
||||
onChange={onFilterChange('application')}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Min severity" labelWidth={12}>
|
||||
<Select
|
||||
isSearchable={false}
|
||||
width={24}
|
||||
value={query.triggers?.minSeverity}
|
||||
options={severityOptions}
|
||||
onChange={onMinSeverityChange}
|
||||
/>
|
||||
</InlineField>
|
||||
<InlineField label="Count" labelWidth={12}>
|
||||
<InlineSwitch
|
||||
value={query.triggers?.count}
|
||||
onChange={() => onChange({ ...query, triggers: { ...query.triggers, count: !query.triggers?.count } })}
|
||||
/>
|
||||
</InlineField>
|
||||
</QueryEditorRow>
|
||||
</>
|
||||
);
|
||||
};
|
||||
13
src/datasource/components/QueryEditor/utils.ts
Normal file
13
src/datasource/components/QueryEditor/utils.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { getTemplateSrv } from '@grafana/runtime';
|
||||
|
||||
export const getVariableOptions = () => {
|
||||
const variables = getTemplateSrv()
|
||||
.getVariables()
|
||||
.filter((v) => {
|
||||
return v.type !== 'datasource' && v.type !== 'interval';
|
||||
});
|
||||
return variables?.map((v) => ({
|
||||
value: `$${v.name}`,
|
||||
label: `$${v.name}`,
|
||||
}));
|
||||
};
|
||||
176
src/datasource/components/VariableQueryEditor.tsx
Normal file
176
src/datasource/components/VariableQueryEditor.tsx
Normal file
@@ -0,0 +1,176 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
import { parseLegacyVariableQuery } from '../utils';
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
import { VariableQuery, VariableQueryData, VariableQueryProps, VariableQueryTypes } from '../types';
|
||||
import { ZabbixInput } from './ZabbixInput';
|
||||
import { InlineFormLabel, Input, Select } from '@grafana/ui';
|
||||
|
||||
export class ZabbixVariableQueryEditor extends PureComponent<VariableQueryProps, VariableQueryData> {
|
||||
queryTypes: Array<SelectableValue<VariableQueryTypes>> = [
|
||||
{ value: VariableQueryTypes.Group, label: 'Group' },
|
||||
{ value: VariableQueryTypes.Host, label: 'Host' },
|
||||
{ value: VariableQueryTypes.Application, label: 'Application' },
|
||||
{ value: VariableQueryTypes.ItemTag, label: 'Item tag' },
|
||||
{ value: VariableQueryTypes.Item, label: 'Item' },
|
||||
{ value: VariableQueryTypes.ItemValues, label: 'Item values' },
|
||||
];
|
||||
|
||||
defaults: VariableQueryData = {
|
||||
selectedQueryType: { value: VariableQueryTypes.Group, label: 'Group' },
|
||||
queryType: VariableQueryTypes.Group,
|
||||
group: '/.*/',
|
||||
host: '',
|
||||
application: '',
|
||||
itemTag: '',
|
||||
item: '',
|
||||
};
|
||||
|
||||
constructor(props: VariableQueryProps) {
|
||||
super(props);
|
||||
|
||||
if (this.props.query && typeof this.props.query === 'string') {
|
||||
// Backward compatibility
|
||||
const query = parseLegacyVariableQuery(this.props.query);
|
||||
const selectedQueryType = this.getSelectedQueryType(query.queryType);
|
||||
this.state = {
|
||||
selectedQueryType,
|
||||
legacyQuery: this.props.query,
|
||||
...query,
|
||||
};
|
||||
} else if (this.props.query) {
|
||||
const query = this.props.query as VariableQuery;
|
||||
const selectedQueryType = this.getSelectedQueryType(query.queryType);
|
||||
this.state = {
|
||||
...this.defaults,
|
||||
...query,
|
||||
selectedQueryType,
|
||||
};
|
||||
} else {
|
||||
this.state = this.defaults;
|
||||
}
|
||||
}
|
||||
|
||||
getSelectedQueryType(queryType: VariableQueryTypes) {
|
||||
return this.queryTypes.find((q) => q.value === queryType);
|
||||
}
|
||||
|
||||
handleQueryUpdate = (evt: React.ChangeEvent<HTMLInputElement>, prop: string) => {
|
||||
const value = evt.currentTarget.value;
|
||||
this.setState((prevState: VariableQueryData) => {
|
||||
const newQuery = {
|
||||
...prevState,
|
||||
};
|
||||
newQuery[prop] = value;
|
||||
|
||||
return {
|
||||
...newQuery,
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
handleQueryChange = () => {
|
||||
const { queryType, group, host, application, itemTag, item } = this.state;
|
||||
const queryModel = { queryType, group, host, application, itemTag, item };
|
||||
this.props.onChange(queryModel, `Zabbix - ${queryType}`);
|
||||
};
|
||||
|
||||
handleQueryTypeChange = (selectedItem: SelectableValue<VariableQueryTypes>) => {
|
||||
this.setState({
|
||||
...this.state,
|
||||
selectedQueryType: selectedItem,
|
||||
queryType: selectedItem.value,
|
||||
});
|
||||
|
||||
const { group, host, application, itemTag, item } = this.state;
|
||||
const queryType = selectedItem.value;
|
||||
const queryModel = { queryType, group, host, application, itemTag, item };
|
||||
this.props.onChange(queryModel, `Zabbix - ${queryType}`);
|
||||
};
|
||||
|
||||
render() {
|
||||
const { selectedQueryType, legacyQuery, group, host, application, itemTag, item } = this.state;
|
||||
const { datasource } = this.props;
|
||||
const supportsItemTags = datasource?.zabbix?.isZabbix54OrHigher() || false;
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="gf-form max-width-21">
|
||||
<InlineFormLabel width={10}>Query Type</InlineFormLabel>
|
||||
<Select
|
||||
width={11}
|
||||
value={selectedQueryType}
|
||||
options={this.queryTypes}
|
||||
onChange={this.handleQueryTypeChange}
|
||||
/>
|
||||
</div>
|
||||
<div className="gf-form-inline">
|
||||
<div className="gf-form max-width-30">
|
||||
<InlineFormLabel width={10}>Group</InlineFormLabel>
|
||||
<ZabbixInput
|
||||
value={group}
|
||||
onChange={(evt) => this.handleQueryUpdate(evt, 'group')}
|
||||
onBlur={this.handleQueryChange}
|
||||
/>
|
||||
</div>
|
||||
{selectedQueryType.value !== VariableQueryTypes.Group && (
|
||||
<div className="gf-form max-width-30">
|
||||
<InlineFormLabel width={10}>Host</InlineFormLabel>
|
||||
<ZabbixInput
|
||||
value={host}
|
||||
onChange={(evt) => this.handleQueryUpdate(evt, 'host')}
|
||||
onBlur={this.handleQueryChange}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{(selectedQueryType.value === VariableQueryTypes.Application ||
|
||||
selectedQueryType.value === VariableQueryTypes.ItemTag ||
|
||||
selectedQueryType.value === VariableQueryTypes.Item ||
|
||||
selectedQueryType.value === VariableQueryTypes.ItemValues) && (
|
||||
<div className="gf-form-inline">
|
||||
{supportsItemTags && (
|
||||
<div className="gf-form max-width-30">
|
||||
<InlineFormLabel width={10}>Item tag</InlineFormLabel>
|
||||
<ZabbixInput
|
||||
value={itemTag}
|
||||
onChange={(evt) => this.handleQueryUpdate(evt, 'itemTag')}
|
||||
onBlur={this.handleQueryChange}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{!supportsItemTags && (
|
||||
<div className="gf-form max-width-30">
|
||||
<InlineFormLabel width={10}>Application</InlineFormLabel>
|
||||
<ZabbixInput
|
||||
value={application}
|
||||
onChange={(evt) => this.handleQueryUpdate(evt, 'application')}
|
||||
onBlur={this.handleQueryChange}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{(selectedQueryType.value === VariableQueryTypes.Item ||
|
||||
selectedQueryType.value === VariableQueryTypes.ItemValues) && (
|
||||
<div className="gf-form max-width-30">
|
||||
<InlineFormLabel width={10}>Item</InlineFormLabel>
|
||||
<ZabbixInput
|
||||
value={item}
|
||||
onChange={(evt) => this.handleQueryUpdate(evt, 'item')}
|
||||
onBlur={this.handleQueryChange}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{legacyQuery && (
|
||||
<div className="gf-form">
|
||||
<InlineFormLabel width={10} tooltip="Original query string, read-only">
|
||||
Legacy Query
|
||||
</InlineFormLabel>
|
||||
<Input value={legacyQuery} readOnly={true} />
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
}
|
||||
60
src/datasource/components/ZabbixInput.tsx
Normal file
60
src/datasource/components/ZabbixInput.tsx
Normal file
@@ -0,0 +1,60 @@
|
||||
import React, { FC } from 'react';
|
||||
import { css } from '@emotion/css';
|
||||
import { EventsWithValidation, ValidationEvents, useStyles2 } from '@grafana/ui';
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { isRegex, variableRegex } from '../utils';
|
||||
|
||||
import * as grafanaUi from '@grafana/ui';
|
||||
const Input = (grafanaUi as any).LegacyForms?.Input || (grafanaUi as any).Input;
|
||||
|
||||
const variablePattern = RegExp(`^${variableRegex.source}`);
|
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => ({
|
||||
inputRegex: css`
|
||||
color: ${theme.colors.warning.main};
|
||||
`,
|
||||
inputVariable: css`
|
||||
color: ${theme.colors.action.focus};
|
||||
`,
|
||||
});
|
||||
|
||||
const zabbixInputValidationEvents: ValidationEvents = {
|
||||
[EventsWithValidation.onBlur]: [
|
||||
{
|
||||
rule: (value) => {
|
||||
if (!value) {
|
||||
return true;
|
||||
}
|
||||
if (value.length > 1 && value[0] === '/') {
|
||||
if (value[value.length - 1] !== '/') {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
},
|
||||
errorMessage: 'Not a valid regex',
|
||||
},
|
||||
{
|
||||
rule: (value) => {
|
||||
if (value === '*') {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
errorMessage: 'Wildcards not supported. Use /.*/ instead',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const ZabbixInput: FC<any> = ({ value, ref, validationEvents, ...restProps }) => {
|
||||
const styles = useStyles2(getStyles);
|
||||
|
||||
let inputClass = styles.inputRegex;
|
||||
if (variablePattern.test(value as string)) {
|
||||
inputClass = styles.inputVariable;
|
||||
} else if (isRegex(value)) {
|
||||
inputClass = styles.inputRegex;
|
||||
}
|
||||
|
||||
return <Input className={inputClass} value={value} validationEvents={zabbixInputValidationEvents} {...restProps} />;
|
||||
};
|
||||
50
src/datasource/constants.ts
Normal file
50
src/datasource/constants.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
// Plugin IDs
|
||||
export const ZABBIX_PROBLEMS_PANEL_ID = 'alexanderzobnin-zabbix-triggers-panel';
|
||||
export const ZABBIX_DS_ID = 'alexanderzobnin-zabbix-datasource';
|
||||
|
||||
// Data point
|
||||
export const DATAPOINT_VALUE = 0;
|
||||
export const DATAPOINT_TS = 1;
|
||||
|
||||
// Editor modes
|
||||
export const MODE_METRICS = '0';
|
||||
export const MODE_ITSERVICE = '1';
|
||||
export const MODE_TEXT = '2';
|
||||
export const MODE_ITEMID = '3';
|
||||
export const MODE_TRIGGERS = '4';
|
||||
export const MODE_PROBLEMS = '5';
|
||||
|
||||
// Triggers severity
|
||||
export const SEV_NOT_CLASSIFIED = 0;
|
||||
export const SEV_INFORMATION = 1;
|
||||
export const SEV_WARNING = 2;
|
||||
export const SEV_AVERAGE = 3;
|
||||
export const SEV_HIGH = 4;
|
||||
export const SEV_DISASTER = 5;
|
||||
|
||||
export const SHOW_ALL_TRIGGERS = [0, 1];
|
||||
export const SHOW_ALL_EVENTS = [0, 1];
|
||||
export const SHOW_OK_EVENTS = 1;
|
||||
|
||||
// Acknowledge
|
||||
export const ZBX_ACK_ACTION_NONE = 0;
|
||||
export const ZBX_ACK_ACTION_CLOSE = 1;
|
||||
export const ZBX_ACK_ACTION_ACK = 2;
|
||||
export const ZBX_ACK_ACTION_ADD_MESSAGE = 4;
|
||||
export const ZBX_ACK_ACTION_CHANGE_SEVERITY = 8;
|
||||
|
||||
export const TRIGGER_SEVERITY = [
|
||||
{ val: 0, text: 'Not classified' },
|
||||
{ val: 1, text: 'Information' },
|
||||
{ val: 2, text: 'Warning' },
|
||||
{ val: 3, text: 'Average' },
|
||||
{ val: 4, text: 'High' },
|
||||
{ val: 5, text: 'Disaster' },
|
||||
];
|
||||
|
||||
/** Minimum interval for SLA over time (1 hour) */
|
||||
export const MIN_SLA_INTERVAL = 3600;
|
||||
|
||||
export const RANGE_VARIABLE_VALUE = 'range_series';
|
||||
|
||||
export const DEFAULT_ZABBIX_PROBLEMS_LIMIT = 1001;
|
||||
115
src/datasource/dataProcessor.ts
Normal file
115
src/datasource/dataProcessor.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import _ from 'lodash';
|
||||
import * as utils from './utils';
|
||||
import { getTemplateSrv } from '@grafana/runtime';
|
||||
import { DataFrame, FieldType, TIME_SERIES_VALUE_FIELD_NAME } from '@grafana/data';
|
||||
|
||||
function setAlias(alias: string, frame: DataFrame) {
|
||||
if (frame.fields?.length <= 2) {
|
||||
const valueField = frame.fields.find((f) => f.name === TIME_SERIES_VALUE_FIELD_NAME);
|
||||
if (valueField?.config?.custom?.scopedVars) {
|
||||
alias = getTemplateSrv().replace(alias, valueField?.config?.custom?.scopedVars);
|
||||
}
|
||||
if (valueField) {
|
||||
valueField.config.displayNameFromDS = alias;
|
||||
}
|
||||
frame.name = alias;
|
||||
return frame;
|
||||
}
|
||||
|
||||
for (let fieldIndex = 0; fieldIndex < frame.fields.length; fieldIndex++) {
|
||||
const field = frame.fields[fieldIndex];
|
||||
if (field.type !== FieldType.time) {
|
||||
if (field?.config?.custom?.scopedVars) {
|
||||
alias = getTemplateSrv().replace(alias, field?.config?.custom?.scopedVars);
|
||||
}
|
||||
field.config.displayNameFromDS = alias;
|
||||
}
|
||||
}
|
||||
return frame;
|
||||
}
|
||||
|
||||
function replaceAlias(regexp: string, newAlias: string, frame: DataFrame) {
|
||||
let pattern: string | RegExp;
|
||||
if (utils.isRegex(regexp)) {
|
||||
pattern = utils.buildRegex(regexp);
|
||||
} else {
|
||||
pattern = regexp;
|
||||
}
|
||||
|
||||
if (frame.fields?.length <= 2) {
|
||||
let alias = frame.name.replace(pattern, newAlias);
|
||||
const valueField = frame.fields.find((f) => f.name === TIME_SERIES_VALUE_FIELD_NAME);
|
||||
if (valueField?.state?.scopedVars) {
|
||||
alias = getTemplateSrv().replace(alias, valueField?.state?.scopedVars);
|
||||
}
|
||||
if (valueField) {
|
||||
valueField.config.displayNameFromDS = alias;
|
||||
}
|
||||
frame.name = alias;
|
||||
return frame;
|
||||
}
|
||||
|
||||
for (const field of frame.fields) {
|
||||
if (field.type !== FieldType.time) {
|
||||
let alias = field.config?.displayNameFromDS?.replace(pattern, newAlias);
|
||||
if (field?.state?.scopedVars && alias) {
|
||||
alias = getTemplateSrv().replace(alias, field?.state?.scopedVars);
|
||||
}
|
||||
field.name = alias || field.name;
|
||||
}
|
||||
}
|
||||
return frame;
|
||||
}
|
||||
|
||||
function setAliasByRegex(alias: string, frame: DataFrame) {
|
||||
if (frame.fields?.length <= 2) {
|
||||
const valueField = frame.fields.find((f) => f.name === TIME_SERIES_VALUE_FIELD_NAME);
|
||||
try {
|
||||
if (valueField) {
|
||||
valueField.config.displayNameFromDS = extractText(valueField.config?.displayNameFromDS, alias);
|
||||
}
|
||||
frame.name = extractText(frame.name, alias);
|
||||
} catch (error: any) {
|
||||
console.error('Failed to apply RegExp:', error?.message || error);
|
||||
}
|
||||
return frame;
|
||||
}
|
||||
|
||||
for (const field of frame.fields) {
|
||||
if (field.type !== FieldType.time) {
|
||||
try {
|
||||
field.config.displayNameFromDS = extractText(field.config?.displayNameFromDS, alias);
|
||||
} catch (error: any) {
|
||||
console.error('Failed to apply RegExp:', error?.message || error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return frame;
|
||||
}
|
||||
|
||||
function extractText(str: string, pattern: string) {
|
||||
const extractPattern = new RegExp(pattern);
|
||||
const extractedValue = extractPattern.exec(str);
|
||||
return extractedValue[0];
|
||||
}
|
||||
|
||||
function timeShift(interval, range) {
|
||||
const shift = utils.parseTimeShiftInterval(interval) / 1000;
|
||||
return _.map(range, (time) => {
|
||||
return time - shift;
|
||||
});
|
||||
}
|
||||
|
||||
const metricFunctions = {
|
||||
timeShift: timeShift,
|
||||
setAlias: setAlias,
|
||||
setAliasByRegex: setAliasByRegex,
|
||||
replaceAlias: replaceAlias,
|
||||
};
|
||||
|
||||
export default {
|
||||
get metricFunctions() {
|
||||
return metricFunctions;
|
||||
},
|
||||
};
|
||||
963
src/datasource/datasource.ts
Normal file
963
src/datasource/datasource.ts
Normal file
@@ -0,0 +1,963 @@
|
||||
import _ from 'lodash';
|
||||
import config from 'grafana/app/core/config';
|
||||
import { contextSrv } from 'grafana/app/core/core';
|
||||
import * as dateMath from 'grafana/app/core/utils/datemath';
|
||||
import * as utils from './utils';
|
||||
import * as migrations from './migrations';
|
||||
import * as metricFunctions from './metricFunctions';
|
||||
import * as c from './constants';
|
||||
import dataProcessor from './dataProcessor';
|
||||
import responseHandler from './responseHandler';
|
||||
import problemsHandler from './problemsHandler';
|
||||
import { Zabbix } from './zabbix/zabbix';
|
||||
import { ZabbixAPIError } from './zabbix/connectors/zabbix_api/zabbixAPIConnector';
|
||||
import { ProblemDTO, ShowProblemTypes, VariableQueryTypes, ZabbixDSOptions, ZabbixMetricsQuery } from './types';
|
||||
import { BackendSrvRequest, getBackendSrv, getTemplateSrv, toDataQueryResponse } from '@grafana/runtime';
|
||||
import {
|
||||
DataFrame,
|
||||
dataFrameFromJSON,
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
DataSourceApi,
|
||||
DataSourceInstanceSettings,
|
||||
FieldType,
|
||||
isDataFrame,
|
||||
LoadingState,
|
||||
toDataFrame,
|
||||
} from '@grafana/data';
|
||||
import { AnnotationQueryEditor } from './components/AnnotationQueryEditor';
|
||||
|
||||
export class ZabbixDatasource extends DataSourceApi<ZabbixMetricsQuery, ZabbixDSOptions> {
|
||||
name: string;
|
||||
basicAuth: any;
|
||||
withCredentials: any;
|
||||
|
||||
trends: boolean;
|
||||
trendsFrom: string;
|
||||
trendsRange: string;
|
||||
cacheTTL: any;
|
||||
disableReadOnlyUsersAck: boolean;
|
||||
disableDataAlignment: boolean;
|
||||
enableDirectDBConnection: boolean;
|
||||
dbConnectionDatasourceId: number;
|
||||
dbConnectionDatasourceName: string;
|
||||
dbConnectionRetentionPolicy: string;
|
||||
enableDebugLog: boolean;
|
||||
datasourceId: number;
|
||||
zabbix: Zabbix;
|
||||
|
||||
replaceTemplateVars: (target: any, scopedVars?: any) => any;
|
||||
|
||||
constructor(instanceSettings: DataSourceInstanceSettings<ZabbixDSOptions>) {
|
||||
super(instanceSettings);
|
||||
|
||||
this.enableDebugLog = config.buildInfo.env === 'development';
|
||||
|
||||
this.annotations = {
|
||||
QueryEditor: AnnotationQueryEditor,
|
||||
prepareAnnotation: migrations.prepareAnnotation,
|
||||
};
|
||||
|
||||
// Use custom format for template variables
|
||||
const templateSrv = getTemplateSrv();
|
||||
this.replaceTemplateVars = _.partial(replaceTemplateVars, templateSrv);
|
||||
|
||||
// General data source settings
|
||||
this.datasourceId = instanceSettings.id;
|
||||
this.name = instanceSettings.name;
|
||||
this.basicAuth = instanceSettings.basicAuth;
|
||||
this.withCredentials = instanceSettings.withCredentials;
|
||||
|
||||
const jsonData = migrations.migrateDSConfig(instanceSettings.jsonData);
|
||||
|
||||
// Use trends instead history since specified time
|
||||
this.trends = jsonData.trends;
|
||||
this.trendsFrom = jsonData.trendsFrom || '7d';
|
||||
this.trendsRange = jsonData.trendsRange || '4d';
|
||||
|
||||
// Set cache update interval
|
||||
const ttl = jsonData.cacheTTL || '1h';
|
||||
this.cacheTTL = utils.parseInterval(ttl);
|
||||
|
||||
// Other options
|
||||
this.disableReadOnlyUsersAck = jsonData.disableReadOnlyUsersAck;
|
||||
this.disableDataAlignment = jsonData.disableDataAlignment;
|
||||
|
||||
// Direct DB Connection options
|
||||
this.enableDirectDBConnection = jsonData.dbConnectionEnable || false;
|
||||
this.dbConnectionDatasourceId = jsonData.dbConnectionDatasourceId;
|
||||
this.dbConnectionDatasourceName = jsonData.dbConnectionDatasourceName;
|
||||
this.dbConnectionRetentionPolicy = jsonData.dbConnectionRetentionPolicy;
|
||||
|
||||
const zabbixOptions = {
|
||||
basicAuth: this.basicAuth,
|
||||
withCredentials: this.withCredentials,
|
||||
cacheTTL: this.cacheTTL,
|
||||
enableDirectDBConnection: this.enableDirectDBConnection,
|
||||
dbConnectionDatasourceId: this.dbConnectionDatasourceId,
|
||||
dbConnectionDatasourceName: this.dbConnectionDatasourceName,
|
||||
dbConnectionRetentionPolicy: this.dbConnectionRetentionPolicy,
|
||||
datasourceId: this.datasourceId,
|
||||
};
|
||||
|
||||
this.zabbix = new Zabbix(zabbixOptions);
|
||||
}
|
||||
|
||||
////////////////////////
|
||||
// Datasource methods //
|
||||
////////////////////////
|
||||
|
||||
/**
|
||||
* Query panel data. Calls for each panel in dashboard.
|
||||
* @param {Object} request Contains time range, targets and other info.
|
||||
* @return {Object} Grafana metrics object with timeseries data for each target.
|
||||
*/
|
||||
query(request: DataQueryRequest<ZabbixMetricsQuery>) {
|
||||
// Migrate old targets
|
||||
const requestTargets = request.targets.map((t) => {
|
||||
// Prevent changes of original object
|
||||
const target = _.cloneDeep(t);
|
||||
return migrations.migrate(target);
|
||||
});
|
||||
|
||||
const backendResponsePromise = this.backendQuery({ ...request, targets: requestTargets });
|
||||
const dbConnectionResponsePromise = this.dbConnectionQuery({ ...request, targets: requestTargets });
|
||||
const frontendResponsePromise = this.frontendQuery({ ...request, targets: requestTargets });
|
||||
const annotationResposePromise = this.annotationRequest({ ...request, targets: requestTargets });
|
||||
|
||||
return Promise.all([
|
||||
backendResponsePromise,
|
||||
dbConnectionResponsePromise,
|
||||
frontendResponsePromise,
|
||||
annotationResposePromise,
|
||||
]).then((rsp) => {
|
||||
// Merge backend and frontend queries results
|
||||
const [backendRes, dbConnectionRes, frontendRes, annotationRes] = rsp;
|
||||
if (dbConnectionRes.data) {
|
||||
backendRes.data = backendRes.data.concat(dbConnectionRes.data);
|
||||
}
|
||||
if (frontendRes.data) {
|
||||
backendRes.data = backendRes.data.concat(frontendRes.data);
|
||||
}
|
||||
|
||||
if (annotationRes.data) {
|
||||
backendRes.data = backendRes.data.concat(annotationRes.data);
|
||||
}
|
||||
|
||||
return {
|
||||
data: backendRes.data,
|
||||
state: LoadingState.Done,
|
||||
key: request.requestId,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async backendQuery(request: DataQueryRequest<any>): Promise<DataQueryResponse> {
|
||||
const { intervalMs, maxDataPoints, range, requestId } = request;
|
||||
const targets = request.targets.filter(this.isBackendTarget);
|
||||
|
||||
// Add range variables
|
||||
request.scopedVars = Object.assign({}, request.scopedVars, utils.getRangeScopedVars(request.range));
|
||||
|
||||
const queries = _.compact(
|
||||
targets.map((query) => {
|
||||
// Don't request for hidden targets
|
||||
if (query.hide) {
|
||||
return null;
|
||||
}
|
||||
|
||||
this.replaceTargetVariables(query, request);
|
||||
|
||||
return {
|
||||
...query,
|
||||
datasourceId: this.datasourceId,
|
||||
intervalMs,
|
||||
maxDataPoints,
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
// Return early if no queries exist
|
||||
if (!queries.length) {
|
||||
return Promise.resolve({ data: [] });
|
||||
}
|
||||
|
||||
const body: any = { queries };
|
||||
|
||||
if (range) {
|
||||
body.range = range;
|
||||
body.from = range.from.valueOf().toString();
|
||||
body.to = range.to.valueOf().toString();
|
||||
}
|
||||
|
||||
let rsp: any;
|
||||
try {
|
||||
rsp = await getBackendSrv()
|
||||
.fetch({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: body,
|
||||
requestId,
|
||||
})
|
||||
.toPromise();
|
||||
} catch (err) {
|
||||
return toDataQueryResponse(err);
|
||||
}
|
||||
|
||||
const resp = toDataQueryResponse(rsp);
|
||||
this.sortByRefId(resp);
|
||||
this.applyFrontendFunctions(resp, request);
|
||||
responseHandler.convertZabbixUnits(resp);
|
||||
if (responseHandler.isConvertibleToWide(resp.data)) {
|
||||
console.log('Converting response to the wide format');
|
||||
resp.data = responseHandler.convertToWide(resp.data);
|
||||
}
|
||||
|
||||
return resp;
|
||||
}
|
||||
|
||||
async frontendQuery(request: DataQueryRequest<any>): Promise<DataQueryResponse> {
|
||||
const frontendTargets = request.targets.filter((t) => !(this.isBackendTarget(t) || this.isDBConnectionTarget(t)));
|
||||
const promises = _.map(frontendTargets, (target) => {
|
||||
// Don't request for hidden targets
|
||||
if (target.hide) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Add range variables
|
||||
request.scopedVars = Object.assign({}, request.scopedVars, utils.getRangeScopedVars(request.range));
|
||||
this.replaceTargetVariables(target, request);
|
||||
const timeRange = this.buildTimeRange(request, target);
|
||||
|
||||
if (target.queryType === c.MODE_TEXT) {
|
||||
// Text query
|
||||
// Don't request undefined targets
|
||||
if (!target.group || !target.host || !target.item) {
|
||||
return [];
|
||||
}
|
||||
return this.queryTextData(target, timeRange);
|
||||
} else if (target.queryType === c.MODE_ITSERVICE) {
|
||||
// IT services query
|
||||
return this.queryITServiceData(target, timeRange, request);
|
||||
} else if (target.queryType === c.MODE_TRIGGERS) {
|
||||
// Triggers query
|
||||
return this.queryTriggersData(target, timeRange);
|
||||
} else if (target.queryType === c.MODE_PROBLEMS) {
|
||||
// Problems query
|
||||
return this.queryProblems(target, timeRange, request);
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
});
|
||||
|
||||
// Data for panel (all targets)
|
||||
return Promise.all(_.flatten(promises))
|
||||
.then(_.flatten)
|
||||
.then((data) => {
|
||||
if (data && data.length > 0 && isDataFrame(data[0]) && !utils.isProblemsDataFrame(data[0])) {
|
||||
data = responseHandler.alignFrames(data);
|
||||
if (responseHandler.isConvertibleToWide(data)) {
|
||||
console.log('Converting response to the wide format');
|
||||
data = responseHandler.convertToWide(data);
|
||||
}
|
||||
}
|
||||
return { data };
|
||||
});
|
||||
}
|
||||
|
||||
async dbConnectionQuery(request: DataQueryRequest<any>): Promise<DataQueryResponse> {
|
||||
const targets = request.targets.filter(this.isDBConnectionTarget);
|
||||
|
||||
const queries = _.compact(
|
||||
targets.map((target) => {
|
||||
// Don't request for hidden targets
|
||||
if (target.hide) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Add range variables
|
||||
request.scopedVars = Object.assign({}, request.scopedVars, utils.getRangeScopedVars(request.range));
|
||||
this.replaceTargetVariables(target, request);
|
||||
const timeRange = this.buildTimeRange(request, target);
|
||||
const useTrends = this.isUseTrends(timeRange);
|
||||
|
||||
if (!target.queryType || target.queryType === c.MODE_METRICS) {
|
||||
return this.queryNumericData(target, timeRange, useTrends, request);
|
||||
} else if (target.queryType === c.MODE_ITEMID) {
|
||||
// Item ID query
|
||||
if (!target.itemids) {
|
||||
return [];
|
||||
}
|
||||
return this.queryItemIdData(target, timeRange, useTrends, request);
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
const promises: Promise<DataQueryResponse> = Promise.all(queries)
|
||||
.then(_.flatten)
|
||||
.then((data) => ({ data }));
|
||||
|
||||
return promises;
|
||||
}
|
||||
|
||||
buildTimeRange(request, target) {
|
||||
let timeFrom = Math.ceil(dateMath.parse(request.range.from) / 1000);
|
||||
let timeTo = Math.ceil(dateMath.parse(request.range.to) / 1000);
|
||||
|
||||
// Apply Time-related functions (timeShift(), etc)
|
||||
const timeFunctions = bindFunctionDefs(target.functions, 'Time');
|
||||
if (timeFunctions.length) {
|
||||
const [time_from, time_to] = utils.sequence(timeFunctions)([timeFrom, timeTo]);
|
||||
timeFrom = time_from;
|
||||
timeTo = time_to;
|
||||
}
|
||||
return [timeFrom, timeTo];
|
||||
}
|
||||
|
||||
/**
|
||||
* Query target data for Metrics
|
||||
*/
|
||||
async queryNumericData(target, timeRange, useTrends, request): Promise<any> {
|
||||
const getItemOptions = {
|
||||
itemtype: 'num',
|
||||
};
|
||||
|
||||
const items = await this.zabbix.getItemsFromTarget(target, getItemOptions);
|
||||
|
||||
const queryStart = new Date().getTime();
|
||||
const result = await this.queryNumericDataForItems(items, target, timeRange, useTrends, request);
|
||||
const queryEnd = new Date().getTime();
|
||||
|
||||
if (this.enableDebugLog) {
|
||||
console.log(`Datasource::Performance Query Time (${this.name}): ${queryEnd - queryStart}`);
|
||||
}
|
||||
|
||||
return this.handleBackendPostProcessingResponse(result, request, target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Query history for numeric items
|
||||
*/
|
||||
async queryNumericDataForItems(items, target: ZabbixMetricsQuery, timeRange, useTrends, request) {
|
||||
let history;
|
||||
request.valueType = this.getTrendValueType(target);
|
||||
request.consolidateBy = getConsolidateBy(target) || request.valueType;
|
||||
|
||||
if (useTrends) {
|
||||
history = await this.zabbix.getTrends(items, timeRange, request);
|
||||
} else {
|
||||
history = await this.zabbix.getHistoryTS(items, timeRange, request);
|
||||
}
|
||||
|
||||
const range = {
|
||||
from: timeRange[0],
|
||||
to: timeRange[1],
|
||||
};
|
||||
return await this.invokeDataProcessingQuery(history, target, range);
|
||||
}
|
||||
|
||||
async invokeDataProcessingQuery(timeSeriesData, query, timeRange) {
|
||||
// Request backend for data processing
|
||||
const requestOptions: BackendSrvRequest = {
|
||||
url: `/api/datasources/${this.datasourceId}/resources/db-connection-post`,
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
hideFromInspector: false,
|
||||
data: {
|
||||
series: timeSeriesData,
|
||||
query,
|
||||
timeRange,
|
||||
},
|
||||
};
|
||||
|
||||
const response: any = await getBackendSrv().fetch<any>(requestOptions).toPromise();
|
||||
return response.data;
|
||||
}
|
||||
|
||||
handleBackendPostProcessingResponse(response, request, target) {
|
||||
const frames = [];
|
||||
for (const frameJSON of response) {
|
||||
const frame = dataFrameFromJSON(frameJSON);
|
||||
frame.refId = target.refId;
|
||||
frames.push(frame);
|
||||
}
|
||||
|
||||
const resp = { data: frames };
|
||||
this.sortByRefId(resp);
|
||||
this.applyFrontendFunctions(resp, request);
|
||||
if (responseHandler.isConvertibleToWide(resp.data)) {
|
||||
console.log('Converting response to the wide format');
|
||||
resp.data = responseHandler.convertToWide(resp.data);
|
||||
}
|
||||
|
||||
return resp.data;
|
||||
}
|
||||
|
||||
getTrendValueType(target) {
|
||||
// Find trendValue() function and get specified trend value
|
||||
const trendFunctions = _.map(metricFunctions.getCategories()['Trends'], 'name');
|
||||
const trendValueFunc = _.find(target.functions, (func) => {
|
||||
return _.includes(trendFunctions, func.def.name);
|
||||
});
|
||||
return trendValueFunc ? trendValueFunc.params[0] : 'avg';
|
||||
}
|
||||
|
||||
sortByRefId(response: DataQueryResponse) {
|
||||
response.data.sort((a, b) => {
|
||||
if (a.refId < b.refId) {
|
||||
return -1;
|
||||
} else if (a.refId > b.refId) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
}
|
||||
|
||||
applyFrontendFunctions(response: DataQueryResponse, request: DataQueryRequest<any>) {
|
||||
for (let i = 0; i < response.data.length; i++) {
|
||||
const frame: DataFrame = response.data[i];
|
||||
const target = getRequestTarget(request, frame.refId);
|
||||
|
||||
// Apply alias functions
|
||||
const aliasFunctions = bindFunctionDefs(target.functions, 'Alias');
|
||||
utils.sequence(aliasFunctions)(frame);
|
||||
}
|
||||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Query target data for Text
|
||||
*/
|
||||
queryTextData(target, timeRange) {
|
||||
const options = {
|
||||
itemtype: 'text',
|
||||
};
|
||||
return this.zabbix
|
||||
.getItemsFromTarget(target, options)
|
||||
.then((items) => {
|
||||
return this.zabbix.getHistoryText(items, timeRange, target);
|
||||
})
|
||||
.then((result) => {
|
||||
if (target.resultFormat !== 'table') {
|
||||
return result.map((s) => responseHandler.seriesToDataFrame(s, target, [], FieldType.string));
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Query target data for Item ID
|
||||
*/
|
||||
queryItemIdData(target, timeRange, useTrends, options) {
|
||||
let itemids = target.itemids;
|
||||
const templateSrv = getTemplateSrv();
|
||||
itemids = templateSrv.replace(itemids, options.scopedVars, zabbixItemIdsTemplateFormat);
|
||||
itemids = _.map(itemids.split(','), (itemid) => itemid.trim());
|
||||
|
||||
if (!itemids) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return this.zabbix.getItemsByIDs(itemids).then((items) => {
|
||||
return this.queryNumericDataForItems(items, target, timeRange, useTrends, options);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Query target data for IT Services
|
||||
*/
|
||||
async queryITServiceData(target, timeRange, request) {
|
||||
// Don't show undefined and hidden targets
|
||||
if (target.hide || (!target.itservice && !target.itServiceFilter) || !target.slaProperty) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let itServiceFilter;
|
||||
request.isOldVersion = target.itservice && !target.itServiceFilter;
|
||||
|
||||
if (request.isOldVersion) {
|
||||
// Backward compatibility
|
||||
itServiceFilter = '/.*/';
|
||||
} else {
|
||||
itServiceFilter = this.replaceTemplateVars(target.itServiceFilter, request.scopedVars);
|
||||
}
|
||||
|
||||
request.slaInterval = target.slaInterval;
|
||||
|
||||
let itservices = await this.zabbix.getITServices(itServiceFilter);
|
||||
if (request.isOldVersion) {
|
||||
itservices = _.filter(itservices, { serviceid: target.itservice?.serviceid });
|
||||
}
|
||||
const itservicesdp = await this.zabbix.getSLA(itservices, timeRange, target, request);
|
||||
const backendRequest = responseHandler.itServiceResponseToTimeSeries(itservicesdp, target.slaInterval);
|
||||
const processedResponse = await this.invokeDataProcessingQuery(backendRequest, target, {});
|
||||
return this.handleBackendPostProcessingResponse(processedResponse, request, target);
|
||||
}
|
||||
|
||||
queryTriggersData(target, timeRange) {
|
||||
const [timeFrom, timeTo] = timeRange;
|
||||
return this.zabbix.getHostsFromTarget(target).then((results) => {
|
||||
const [hosts, apps] = results;
|
||||
if (hosts.length) {
|
||||
const hostids = _.map(hosts, 'hostid');
|
||||
const appids = _.map(apps, 'applicationid');
|
||||
const options = {
|
||||
minSeverity: target.triggers.minSeverity,
|
||||
acknowledged: target.triggers.acknowledged,
|
||||
count: target.triggers.count,
|
||||
timeFrom: timeFrom,
|
||||
timeTo: timeTo,
|
||||
};
|
||||
const groupFilter = target.group.filter;
|
||||
return Promise.all([
|
||||
this.zabbix.getHostAlerts(hostids, appids, options),
|
||||
this.zabbix.getGroups(groupFilter),
|
||||
]).then(([triggers, groups]) => {
|
||||
return responseHandler.handleTriggersResponse(triggers, groups, timeRange);
|
||||
});
|
||||
} else {
|
||||
return Promise.resolve([]);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
queryProblems(target: ZabbixMetricsQuery, timeRange, options) {
|
||||
const [timeFrom, timeTo] = timeRange;
|
||||
const userIsEditor = contextSrv.isEditor || contextSrv.isGrafanaAdmin;
|
||||
|
||||
let proxies;
|
||||
let showAckButton = true;
|
||||
|
||||
const showProblems = target.showProblems || ShowProblemTypes.Problems;
|
||||
const showProxy = target.options.hostProxy;
|
||||
|
||||
const getProxiesPromise = showProxy ? this.zabbix.getProxies() : () => [];
|
||||
showAckButton = !this.disableReadOnlyUsersAck || userIsEditor;
|
||||
|
||||
// Replace template variables
|
||||
const groupFilter = this.replaceTemplateVars(target.group?.filter, options.scopedVars);
|
||||
const hostFilter = this.replaceTemplateVars(target.host?.filter, options.scopedVars);
|
||||
const appFilter = this.replaceTemplateVars(target.application?.filter, options.scopedVars);
|
||||
const proxyFilter = this.replaceTemplateVars(target.proxy?.filter, options.scopedVars);
|
||||
|
||||
const triggerFilter = this.replaceTemplateVars(target.trigger?.filter, options.scopedVars);
|
||||
const tagsFilter = this.replaceTemplateVars(target.tags?.filter, options.scopedVars);
|
||||
|
||||
const replacedTarget = {
|
||||
...target,
|
||||
trigger: { filter: triggerFilter },
|
||||
tags: { filter: tagsFilter },
|
||||
};
|
||||
|
||||
// replaceTemplateVars() builds regex-like string, so we should trim it.
|
||||
const tagsFilterStr = tagsFilter.replace('/^', '').replace('$/', '');
|
||||
const tags = utils.parseTags(tagsFilterStr);
|
||||
tags.forEach((tag) => {
|
||||
// Zabbix uses {"tag": "<tag>", "value": "<value>", "operator": "<operator>"} format, where 1 means Equal
|
||||
tag.operator = 1;
|
||||
});
|
||||
|
||||
const problemsOptions: any = {
|
||||
recent: showProblems === ShowProblemTypes.Recent,
|
||||
minSeverity: target.options?.minSeverity,
|
||||
limit: target.options?.limit,
|
||||
};
|
||||
|
||||
if (tags && tags.length) {
|
||||
problemsOptions.tags = tags;
|
||||
}
|
||||
|
||||
if (target.options?.acknowledged === 0 || target.options?.acknowledged === 1) {
|
||||
problemsOptions.acknowledged = !!target.options?.acknowledged;
|
||||
}
|
||||
|
||||
if (target.options?.minSeverity) {
|
||||
let severities = [0, 1, 2, 3, 4, 5].filter((v) => v >= target.options?.minSeverity);
|
||||
if (target.options?.severities) {
|
||||
severities = severities.filter((v) => target.options?.severities.includes(v));
|
||||
}
|
||||
problemsOptions.severities = severities;
|
||||
}
|
||||
|
||||
let getProblemsPromise: Promise<ProblemDTO[]>;
|
||||
if (showProblems === ShowProblemTypes.History || target.options?.useTimeRange) {
|
||||
problemsOptions.timeFrom = timeFrom;
|
||||
problemsOptions.timeTo = timeTo;
|
||||
getProblemsPromise = this.zabbix.getProblemsHistory(
|
||||
groupFilter,
|
||||
hostFilter,
|
||||
appFilter,
|
||||
proxyFilter,
|
||||
problemsOptions
|
||||
);
|
||||
} else {
|
||||
getProblemsPromise = this.zabbix.getProblems(groupFilter, hostFilter, appFilter, proxyFilter, problemsOptions);
|
||||
}
|
||||
|
||||
const problemsPromises = Promise.all([getProblemsPromise, getProxiesPromise])
|
||||
.then(([problems, sourceProxies]) => {
|
||||
proxies = _.keyBy(sourceProxies, 'proxyid');
|
||||
return problems;
|
||||
})
|
||||
.then((problems) => problemsHandler.setMaintenanceStatus(problems))
|
||||
.then((problems) => problemsHandler.setAckButtonStatus(problems, showAckButton))
|
||||
.then((problems) => problemsHandler.filterTriggersPre(problems, replacedTarget))
|
||||
.then((problems) => problemsHandler.sortProblems(problems, target))
|
||||
.then((problems) => problemsHandler.addTriggerDataSource(problems, target))
|
||||
.then((problems) => problemsHandler.addTriggerHostProxy(problems, proxies));
|
||||
|
||||
return problemsPromises.then((problems) => {
|
||||
const problemsDataFrame = problemsHandler.toDataFrame(problems);
|
||||
return problemsDataFrame;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Test connection to Zabbix API and external history DB.
|
||||
*/
|
||||
async testDatasource() {
|
||||
try {
|
||||
const { zabbixVersion, dbConnectorStatus } = await this.zabbix.testDataSource();
|
||||
let message = `Zabbix API version: ${zabbixVersion}`;
|
||||
if (dbConnectorStatus) {
|
||||
message += `, DB connector type: ${dbConnectorStatus.dsType}`;
|
||||
}
|
||||
return {
|
||||
status: 'success',
|
||||
title: 'Success',
|
||||
message: message,
|
||||
};
|
||||
} catch (error: any) {
|
||||
if (error instanceof ZabbixAPIError) {
|
||||
return {
|
||||
status: 'error',
|
||||
title: error.message,
|
||||
message: error.message,
|
||||
};
|
||||
} else if (error.data && error.data.message) {
|
||||
return {
|
||||
status: 'error',
|
||||
title: 'Zabbix Client Error',
|
||||
message: error.data.message,
|
||||
};
|
||||
} else if (typeof error === 'string') {
|
||||
return {
|
||||
status: 'error',
|
||||
title: 'Unknown Error',
|
||||
message: error,
|
||||
};
|
||||
} else {
|
||||
console.log(error);
|
||||
return {
|
||||
status: 'error',
|
||||
title: 'Connection failed',
|
||||
message: 'Could not connect to given url',
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
////////////////
|
||||
// Templating //
|
||||
////////////////
|
||||
|
||||
/**
|
||||
* Find metrics from templated request.
|
||||
*
|
||||
* @param {string} query Query from Templating
|
||||
* @param options
|
||||
* @return {string} Metric name - group, host, app or item or list
|
||||
* of metrics in "{metric1, metric2,..., metricN}" format.
|
||||
*/
|
||||
metricFindQuery(query, options) {
|
||||
let resultPromise;
|
||||
let queryModel = _.cloneDeep(query);
|
||||
|
||||
if (!query) {
|
||||
return Promise.resolve([]);
|
||||
}
|
||||
|
||||
if (typeof query === 'string') {
|
||||
// Backward compatibility
|
||||
queryModel = utils.parseLegacyVariableQuery(query);
|
||||
}
|
||||
|
||||
for (const prop of ['group', 'host', 'application', 'itemTag', 'item']) {
|
||||
queryModel[prop] = this.replaceTemplateVars(queryModel[prop], {});
|
||||
}
|
||||
|
||||
const { group, host, application, item } = queryModel;
|
||||
|
||||
switch (queryModel.queryType) {
|
||||
case VariableQueryTypes.Group:
|
||||
resultPromise = this.zabbix.getGroups(queryModel.group);
|
||||
break;
|
||||
case VariableQueryTypes.Host:
|
||||
resultPromise = this.zabbix.getHosts(queryModel.group, queryModel.host);
|
||||
break;
|
||||
case VariableQueryTypes.Application:
|
||||
resultPromise = this.zabbix.getApps(queryModel.group, queryModel.host, queryModel.application);
|
||||
break;
|
||||
case VariableQueryTypes.ItemTag:
|
||||
resultPromise = this.zabbix.getItemTags(queryModel.group, queryModel.host, queryModel.itemTag);
|
||||
break;
|
||||
case VariableQueryTypes.Item:
|
||||
resultPromise = this.zabbix.getItems(
|
||||
queryModel.group,
|
||||
queryModel.host,
|
||||
queryModel.application,
|
||||
null,
|
||||
queryModel.item
|
||||
);
|
||||
break;
|
||||
case VariableQueryTypes.ItemValues:
|
||||
const range = options?.range;
|
||||
resultPromise = this.zabbix.getItemValues(group, host, application, item, { range });
|
||||
break;
|
||||
default:
|
||||
resultPromise = Promise.resolve([]);
|
||||
break;
|
||||
}
|
||||
|
||||
return resultPromise.then((metrics) => {
|
||||
return _.map(metrics, formatMetric);
|
||||
});
|
||||
}
|
||||
|
||||
targetContainsTemplate(target: ZabbixMetricsQuery): boolean {
|
||||
const templateSrv = getTemplateSrv() as any;
|
||||
return (
|
||||
templateSrv.variableExists(target.group?.filter) ||
|
||||
templateSrv.variableExists(target.host?.filter) ||
|
||||
templateSrv.variableExists(target.application?.filter) ||
|
||||
templateSrv.variableExists(target.itemTag?.filter) ||
|
||||
templateSrv.variableExists(target.item?.filter) ||
|
||||
templateSrv.variableExists(target.proxy?.filter) ||
|
||||
templateSrv.variableExists(target.trigger?.filter) ||
|
||||
templateSrv.variableExists(target.textFilter) ||
|
||||
templateSrv.variableExists(target.itServiceFilter)
|
||||
);
|
||||
}
|
||||
|
||||
/////////////////
|
||||
// Annotations //
|
||||
/////////////////
|
||||
|
||||
async annotationRequest(request: DataQueryRequest<any>): Promise<DataQueryResponse> {
|
||||
const targets = request.targets.filter((t) => t.fromAnnotations);
|
||||
if (!targets.length) {
|
||||
return Promise.resolve({ data: [] });
|
||||
}
|
||||
|
||||
const events = await this.annotationQueryLegacy({ ...request, targets });
|
||||
return { data: [toDataFrame(events)] };
|
||||
}
|
||||
|
||||
annotationQueryLegacy(options) {
|
||||
const timeRange = options.range || options.rangeRaw;
|
||||
const timeFrom = Math.ceil(dateMath.parse(timeRange.from) / 1000);
|
||||
const timeTo = Math.ceil(dateMath.parse(timeRange.to) / 1000);
|
||||
const annotation = options.targets[0];
|
||||
|
||||
// Show all triggers
|
||||
const problemsOptions: any = {
|
||||
value: annotation.options.showOkEvents ? ['0', '1'] : '1',
|
||||
valueFromEvent: true,
|
||||
timeFrom,
|
||||
timeTo,
|
||||
};
|
||||
|
||||
if (annotation.options.minSeverity) {
|
||||
const severities = [0, 1, 2, 3, 4, 5].filter((v) => v >= Number(annotation.options.minSeverity));
|
||||
problemsOptions.severities = severities;
|
||||
}
|
||||
|
||||
const groupFilter = this.replaceTemplateVars(annotation.group.filter, {});
|
||||
const hostFilter = this.replaceTemplateVars(annotation.host.filter, {});
|
||||
const appFilter = this.replaceTemplateVars(annotation.application.filter, {});
|
||||
const proxyFilter = undefined;
|
||||
|
||||
return this.zabbix
|
||||
.getProblemsHistory(groupFilter, hostFilter, appFilter, proxyFilter, problemsOptions)
|
||||
.then((problems) => {
|
||||
// Filter triggers by description
|
||||
const problemName = this.replaceTemplateVars(annotation.trigger.filter, {});
|
||||
if (utils.isRegex(problemName)) {
|
||||
problems = _.filter(problems, (p) => {
|
||||
return utils.buildRegex(problemName).test(p.description);
|
||||
});
|
||||
} else if (problemName) {
|
||||
problems = _.filter(problems, (p) => {
|
||||
return p.description === problemName;
|
||||
});
|
||||
}
|
||||
|
||||
// Hide acknowledged events if option enabled
|
||||
if (annotation.hideAcknowledged) {
|
||||
problems = _.filter(problems, (p) => {
|
||||
return !p.acknowledges?.length;
|
||||
});
|
||||
}
|
||||
|
||||
return _.map(problems, (p) => {
|
||||
const formattedAcknowledges = utils.formatAcknowledges(p.acknowledges);
|
||||
|
||||
let annotationTags: string[] = [];
|
||||
if (annotation.showHostname) {
|
||||
annotationTags = _.map(p.hosts, 'name');
|
||||
}
|
||||
|
||||
return {
|
||||
title: p.value === '1' ? 'Problem' : 'OK',
|
||||
time: p.timestamp * 1000,
|
||||
annotation: annotation,
|
||||
text: p.name + formattedAcknowledges,
|
||||
tags: annotationTags,
|
||||
};
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Replace template variables
|
||||
replaceTargetVariables(target, options) {
|
||||
const templateSrv = getTemplateSrv();
|
||||
const parts = ['group', 'host', 'application', 'itemTag', 'item'];
|
||||
_.forEach(parts, (p) => {
|
||||
if (target[p] && target[p].filter) {
|
||||
target[p].filter = this.replaceTemplateVars(target[p].filter, options.scopedVars);
|
||||
}
|
||||
});
|
||||
|
||||
if (target.textFilter) {
|
||||
target.textFilter = this.replaceTemplateVars(target.textFilter, options.scopedVars);
|
||||
}
|
||||
|
||||
if (target.itemids) {
|
||||
target.itemids = templateSrv.replace(target.itemids, options.scopedVars, zabbixItemIdsTemplateFormat);
|
||||
}
|
||||
|
||||
_.forEach(target.functions, (func) => {
|
||||
func.params = _.map(func.params, (param) => {
|
||||
if (typeof param === 'number') {
|
||||
return +templateSrv.replace(param.toString(), options.scopedVars);
|
||||
} else {
|
||||
return templateSrv.replace(param, options.scopedVars);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
isUseTrends(timeRange) {
|
||||
const [timeFrom, timeTo] = timeRange;
|
||||
const useTrendsFrom = Math.ceil(dateMath.parse('now-' + this.trendsFrom) / 1000);
|
||||
const useTrendsRange = Math.ceil(utils.parseInterval(this.trendsRange) / 1000);
|
||||
const useTrends = this.trends && (timeFrom < useTrendsFrom || timeTo - timeFrom > useTrendsRange);
|
||||
return useTrends;
|
||||
}
|
||||
|
||||
isBackendTarget = (target: any): boolean => {
|
||||
if (this.enableDirectDBConnection) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return target.queryType === c.MODE_METRICS || target.queryType === c.MODE_ITEMID;
|
||||
};
|
||||
|
||||
isDBConnectionTarget = (target: any): boolean => {
|
||||
return this.enableDirectDBConnection && (target.queryType === c.MODE_METRICS || target.queryType === c.MODE_ITEMID);
|
||||
};
|
||||
}
|
||||
|
||||
function bindFunctionDefs(functionDefs, category) {
|
||||
const aggregationFunctions = _.map(metricFunctions.getCategories()[category], 'name');
|
||||
const aggFuncDefs = _.filter(functionDefs, (func) => {
|
||||
return _.includes(aggregationFunctions, func.def.name);
|
||||
});
|
||||
|
||||
return _.map(aggFuncDefs, (func) => {
|
||||
const funcInstance = metricFunctions.createFuncInstance(func.def, func.params);
|
||||
return funcInstance.bindFunction(dataProcessor.metricFunctions);
|
||||
});
|
||||
}
|
||||
|
||||
function getConsolidateBy(target) {
|
||||
let consolidateBy;
|
||||
const funcDef = _.find(target.functions, (func) => {
|
||||
return func.def.name === 'consolidateBy';
|
||||
});
|
||||
if (funcDef && funcDef.params && funcDef.params.length) {
|
||||
consolidateBy = funcDef.params[0];
|
||||
}
|
||||
return consolidateBy;
|
||||
}
|
||||
|
||||
function formatMetric(metricObj) {
|
||||
return {
|
||||
text: metricObj.name,
|
||||
expandable: false,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom formatter for template variables.
|
||||
* Default Grafana "regex" formatter returns
|
||||
* value1|value2
|
||||
* This formatter returns
|
||||
* (value1|value2)
|
||||
* This format needed for using in complex regex with
|
||||
* template variables, for example
|
||||
* /CPU $cpu_item.*time/ where $cpu_item is system,user,iowait
|
||||
*/
|
||||
export function zabbixTemplateFormat(value) {
|
||||
if (typeof value === 'string') {
|
||||
return utils.escapeRegex(value);
|
||||
}
|
||||
|
||||
const escapedValues = _.map(value, utils.escapeRegex);
|
||||
return '(' + escapedValues.join('|') + ')';
|
||||
}
|
||||
|
||||
function zabbixItemIdsTemplateFormat(value) {
|
||||
if (typeof value === 'string') {
|
||||
return value;
|
||||
}
|
||||
return value.join(',');
|
||||
}
|
||||
|
||||
/**
|
||||
* If template variables are used in request, replace it using regex format
|
||||
* and wrap with '/' for proper multi-value work. Example:
|
||||
* $variable selected as a, b, c
|
||||
* We use filter $variable
|
||||
* $variable -> a|b|c -> /a|b|c/
|
||||
* /$variable/ -> /a|b|c/ -> /a|b|c/
|
||||
*/
|
||||
export function replaceTemplateVars(templateSrv, target, scopedVars) {
|
||||
let replacedTarget = templateSrv.replace(target, scopedVars, zabbixTemplateFormat);
|
||||
if (target && target !== replacedTarget && !utils.isRegex(replacedTarget)) {
|
||||
replacedTarget = '/^' + replacedTarget + '$/';
|
||||
}
|
||||
return replacedTarget;
|
||||
}
|
||||
|
||||
export function base64StringToArrowTable(text: string) {
|
||||
const b64 = atob(text);
|
||||
const arr = Uint8Array.from(b64, (c) => {
|
||||
return c.charCodeAt(0);
|
||||
});
|
||||
return arr;
|
||||
}
|
||||
|
||||
function getRequestTarget(request: DataQueryRequest<any>, refId: string): any {
|
||||
for (let i = 0; i < request.targets.length; i++) {
|
||||
const target = request.targets[i];
|
||||
if (target.refId === refId) {
|
||||
return target;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
1
src/datasource/img/icn-zabbix-datasource.svg
Normal file
1
src/datasource/img/icn-zabbix-datasource.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 7.4 KiB |
343
src/datasource/metricFunctions.ts
Normal file
343
src/datasource/metricFunctions.ts
Normal file
@@ -0,0 +1,343 @@
|
||||
import _ from 'lodash';
|
||||
import { FuncDef } from './types';
|
||||
import { isNumeric } from './utils';
|
||||
|
||||
const index = {};
|
||||
const categories: { [key: string]: FuncDef[] } = {
|
||||
Transform: [],
|
||||
Aggregate: [],
|
||||
Filter: [],
|
||||
Trends: [],
|
||||
Time: [],
|
||||
Alias: [],
|
||||
Special: [],
|
||||
};
|
||||
|
||||
function addFuncDef(funcDef: FuncDef) {
|
||||
funcDef.params = funcDef.params || [];
|
||||
funcDef.defaultParams = funcDef.defaultParams || [];
|
||||
|
||||
if (funcDef.category) {
|
||||
categories[funcDef.category].push(funcDef);
|
||||
}
|
||||
index[funcDef.name] = funcDef;
|
||||
index[funcDef.shortName || funcDef.name] = funcDef;
|
||||
}
|
||||
|
||||
// Transform
|
||||
|
||||
addFuncDef({
|
||||
name: 'groupBy',
|
||||
category: 'Transform',
|
||||
params: [
|
||||
{ name: 'interval', type: 'string' },
|
||||
{ name: 'function', type: 'string', options: ['avg', 'min', 'max', 'sum', 'count', 'median', 'first', 'last'] },
|
||||
],
|
||||
defaultParams: ['1m', 'avg'],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'scale',
|
||||
category: 'Transform',
|
||||
params: [{ name: 'factor', type: 'float', options: [100, 0.01, 10, -1] }],
|
||||
defaultParams: [100],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'offset',
|
||||
category: 'Transform',
|
||||
params: [{ name: 'delta', type: 'float', options: [-100, 100] }],
|
||||
defaultParams: [100],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'delta',
|
||||
category: 'Transform',
|
||||
params: [],
|
||||
defaultParams: [],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'rate',
|
||||
category: 'Transform',
|
||||
params: [],
|
||||
defaultParams: [],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'movingAverage',
|
||||
category: 'Transform',
|
||||
params: [{ name: 'factor', type: 'int', options: [6, 10, 60, 100, 600] }],
|
||||
defaultParams: [10],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'exponentialMovingAverage',
|
||||
category: 'Transform',
|
||||
params: [{ name: 'smoothing', type: 'float', options: [6, 10, 60, 100, 600] }],
|
||||
defaultParams: [0.2],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'percentile',
|
||||
category: 'Transform',
|
||||
params: [
|
||||
{ name: 'interval', type: 'string' },
|
||||
{ name: 'percent', type: 'float', options: [25, 50, 75, 90, 95, 99, 99.9] },
|
||||
],
|
||||
defaultParams: ['1m', 95],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'removeAboveValue',
|
||||
category: 'Transform',
|
||||
params: [{ name: 'number', type: 'float' }],
|
||||
defaultParams: [0],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'removeBelowValue',
|
||||
category: 'Transform',
|
||||
params: [{ name: 'number', type: 'float' }],
|
||||
defaultParams: [0],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'transformNull',
|
||||
category: 'Transform',
|
||||
params: [{ name: 'number', type: 'float' }],
|
||||
defaultParams: [0],
|
||||
});
|
||||
|
||||
// Aggregate
|
||||
|
||||
addFuncDef({
|
||||
name: 'aggregateBy',
|
||||
category: 'Aggregate',
|
||||
params: [
|
||||
{ name: 'interval', type: 'string' },
|
||||
{ name: 'function', type: 'string', options: ['avg', 'min', 'max', 'sum', 'count', 'median', 'first', 'last'] },
|
||||
],
|
||||
defaultParams: ['1m', 'avg'],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'sumSeries',
|
||||
category: 'Aggregate',
|
||||
params: [],
|
||||
defaultParams: [],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'percentileAgg',
|
||||
category: 'Aggregate',
|
||||
params: [
|
||||
{ name: 'interval', type: 'string' },
|
||||
{ name: 'percent', type: 'float', options: [25, 50, 75, 90, 95, 99, 99.9] },
|
||||
],
|
||||
defaultParams: ['1m', 95],
|
||||
});
|
||||
|
||||
// Filter
|
||||
|
||||
addFuncDef({
|
||||
name: 'top',
|
||||
category: 'Filter',
|
||||
params: [
|
||||
{ name: 'number', type: 'int' },
|
||||
{ name: 'value', type: 'string', options: ['avg', 'min', 'max', 'sum', 'count', 'median', 'first', 'last'] },
|
||||
],
|
||||
defaultParams: [5, 'avg'],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'bottom',
|
||||
category: 'Filter',
|
||||
params: [
|
||||
{ name: 'number', type: 'int' },
|
||||
{ name: 'value', type: 'string', options: ['avg', 'min', 'max', 'sum', 'count', 'median', 'first', 'last'] },
|
||||
],
|
||||
defaultParams: [5, 'avg'],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'sortSeries',
|
||||
category: 'Filter',
|
||||
params: [{ name: 'direction', type: 'string', options: ['asc', 'desc'] }],
|
||||
defaultParams: ['asc'],
|
||||
});
|
||||
|
||||
// Trends
|
||||
|
||||
addFuncDef({
|
||||
name: 'trendValue',
|
||||
category: 'Trends',
|
||||
params: [{ name: 'type', type: 'string', options: ['avg', 'min', 'max', 'sum', 'count'] }],
|
||||
defaultParams: ['avg'],
|
||||
});
|
||||
|
||||
// Time
|
||||
|
||||
addFuncDef({
|
||||
name: 'timeShift',
|
||||
category: 'Time',
|
||||
params: [{ name: 'interval', type: 'string', options: ['24h', '7d', '1M', '+24h', '-24h'] }],
|
||||
defaultParams: ['24h'],
|
||||
});
|
||||
|
||||
//Alias
|
||||
|
||||
addFuncDef({
|
||||
name: 'setAlias',
|
||||
category: 'Alias',
|
||||
params: [{ name: 'alias', type: 'string' }],
|
||||
defaultParams: [],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'setAliasByRegex',
|
||||
category: 'Alias',
|
||||
params: [{ name: 'aliasByRegex', type: 'string' }],
|
||||
defaultParams: [],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'replaceAlias',
|
||||
category: 'Alias',
|
||||
params: [
|
||||
{ name: 'regexp', type: 'string' },
|
||||
{ name: 'newAlias', type: 'string' },
|
||||
],
|
||||
defaultParams: ['/(.*)/', '$1'],
|
||||
});
|
||||
|
||||
// Special
|
||||
addFuncDef({
|
||||
name: 'consolidateBy',
|
||||
category: 'Special',
|
||||
params: [{ name: 'type', type: 'string', options: ['avg', 'min', 'max', 'sum', 'count'] }],
|
||||
defaultParams: ['avg'],
|
||||
});
|
||||
|
||||
_.each(categories, (funcList, catName) => {
|
||||
categories[catName] = _.sortBy(funcList, 'name');
|
||||
});
|
||||
|
||||
class FuncInstance {
|
||||
def: any;
|
||||
params: any;
|
||||
text: string;
|
||||
added: boolean;
|
||||
|
||||
constructor(funcDef, params) {
|
||||
this.def = funcDef;
|
||||
|
||||
if (params) {
|
||||
this.params = params;
|
||||
} else {
|
||||
// Create with default params
|
||||
this.params = [];
|
||||
this.params = funcDef.defaultParams.slice(0);
|
||||
}
|
||||
|
||||
this.updateText();
|
||||
}
|
||||
|
||||
bindFunction(metricFunctions) {
|
||||
const func = metricFunctions[this.def.name];
|
||||
if (func) {
|
||||
// Bind function arguments
|
||||
let bindedFunc = func;
|
||||
let param;
|
||||
for (let i = 0; i < this.params.length; i++) {
|
||||
param = this.params[i];
|
||||
|
||||
// Convert numeric params
|
||||
if (this.def.params[i].type === 'int' || this.def.params[i].type === 'float') {
|
||||
param = Number(param);
|
||||
}
|
||||
bindedFunc = _.partial(bindedFunc, param);
|
||||
}
|
||||
return bindedFunc;
|
||||
} else {
|
||||
throw { message: 'Method not found ' + this.def.name };
|
||||
}
|
||||
}
|
||||
|
||||
render(metricExp) {
|
||||
const str = this.def.name + '(';
|
||||
const parameters = _.map(this.params, (value, index) => {
|
||||
const paramType = this.def.params[index].type;
|
||||
if (paramType === 'int' || paramType === 'float' || paramType === 'value_or_series' || paramType === 'boolean') {
|
||||
return value;
|
||||
} else if (paramType === 'int_or_interval' && isNumeric(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return "'" + value + "'";
|
||||
});
|
||||
|
||||
if (metricExp) {
|
||||
parameters.unshift(metricExp);
|
||||
}
|
||||
|
||||
return str + parameters.join(', ') + ')';
|
||||
}
|
||||
|
||||
_hasMultipleParamsInString(strValue, index) {
|
||||
if (strValue.indexOf(',') === -1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return this.def.params[index + 1] && this.def.params[index + 1].optional;
|
||||
}
|
||||
|
||||
updateParam(strValue, index) {
|
||||
// handle optional parameters
|
||||
// if string contains ',' and next param is optional, split and update both
|
||||
if (this._hasMultipleParamsInString(strValue, index)) {
|
||||
_.each(strValue.split(','), (partVal, idx) => {
|
||||
this.updateParam(partVal.trim(), idx);
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (strValue === '' && this.def.params[index].optional) {
|
||||
this.params.splice(index, 1);
|
||||
} else {
|
||||
this.params[index] = strValue;
|
||||
}
|
||||
|
||||
this.updateText();
|
||||
}
|
||||
|
||||
updateText() {
|
||||
if (this.params.length === 0) {
|
||||
this.text = this.def.name + '()';
|
||||
return;
|
||||
}
|
||||
|
||||
let text = this.def.name + '(';
|
||||
text += this.params.join(', ');
|
||||
text += ')';
|
||||
this.text = text;
|
||||
}
|
||||
}
|
||||
|
||||
export function createFuncInstance(funcDef, params?) {
|
||||
if (_.isString(funcDef)) {
|
||||
if (!index[funcDef]) {
|
||||
throw { message: 'Method not found ' + name };
|
||||
}
|
||||
funcDef = index[funcDef];
|
||||
}
|
||||
return new FuncInstance(funcDef, params);
|
||||
}
|
||||
|
||||
export function getFuncDef(name) {
|
||||
return index[name];
|
||||
}
|
||||
|
||||
export function getCategories() {
|
||||
return categories;
|
||||
}
|
||||
185
src/datasource/migrations.ts
Normal file
185
src/datasource/migrations.ts
Normal file
@@ -0,0 +1,185 @@
|
||||
import _ from 'lodash';
|
||||
import { ZabbixMetricsQuery } from './types';
|
||||
import * as c from './constants';
|
||||
|
||||
/**
|
||||
* Query format migration.
|
||||
* This module can detect query format version and make migration.
|
||||
*/
|
||||
|
||||
export function isGrafana2target(target) {
|
||||
if (!target.mode || target.mode === 0 || target.mode === 2) {
|
||||
if (
|
||||
(target.hostFilter || target.itemFilter || target.downsampleFunction || (target.host && target.host.host)) &&
|
||||
target.item.filter === undefined &&
|
||||
target.host.filter === undefined
|
||||
) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function migrateFrom2To3version(target: ZabbixMetricsQuery) {
|
||||
target.group.filter = target.group.name === '*' ? '/.*/' : target.group.name;
|
||||
target.host.filter = target.host.name === '*' ? convertToRegex(target.hostFilter) : target.host.name;
|
||||
target.application.filter = target.application.name === '*' ? '' : target.application.name;
|
||||
target.item.filter = target.item.name === 'All' ? convertToRegex(target.itemFilter) : target.item.name;
|
||||
return target;
|
||||
}
|
||||
|
||||
function migratePercentileAgg(target) {
|
||||
if (target.functions) {
|
||||
for (const f of target.functions) {
|
||||
if (f.def && f.def.name === 'percentil') {
|
||||
f.def.name = 'percentile';
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function migrateQueryType(target) {
|
||||
if (target.queryType === undefined) {
|
||||
if (target.mode === 'Metrics') {
|
||||
// Explore mode
|
||||
target.queryType = c.MODE_METRICS;
|
||||
} else if (target.mode !== undefined) {
|
||||
target.queryType = target.mode;
|
||||
delete target.mode;
|
||||
}
|
||||
}
|
||||
|
||||
// queryType is a string in query model
|
||||
if (typeof target.queryType === 'number') {
|
||||
target.queryType = (target.queryType as number)?.toString();
|
||||
}
|
||||
}
|
||||
|
||||
function migrateSLA(target) {
|
||||
if (target.queryType === c.MODE_ITSERVICE && !target.slaInterval) {
|
||||
target.slaInterval = 'none';
|
||||
}
|
||||
}
|
||||
|
||||
function migrateProblemSort(target) {
|
||||
if (target.options?.sortProblems === 'priority') {
|
||||
target.options.sortProblems = 'severity';
|
||||
}
|
||||
}
|
||||
|
||||
function migrateApplications(target) {
|
||||
if (!target.itemTag) {
|
||||
target.itemTag = { filter: '' };
|
||||
if (target.application?.filter) {
|
||||
target.itemTag.filter = `Application: ${target.application?.filter}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function migrateSLAProperty(target) {
|
||||
if (target.slaProperty?.property) {
|
||||
target.slaProperty = target.slaProperty?.property;
|
||||
}
|
||||
}
|
||||
|
||||
export function migrate(target) {
|
||||
target.resultFormat = target.resultFormat || 'time_series';
|
||||
target = fixTargetGroup(target);
|
||||
if (isGrafana2target(target)) {
|
||||
return migrateFrom2To3version(target);
|
||||
}
|
||||
migratePercentileAgg(target);
|
||||
migrateQueryType(target);
|
||||
migrateSLA(target);
|
||||
migrateProblemSort(target);
|
||||
migrateApplications(target);
|
||||
migrateSLAProperty(target);
|
||||
return target;
|
||||
}
|
||||
|
||||
function fixTargetGroup(target) {
|
||||
if (target.group && Array.isArray(target.group)) {
|
||||
target.group = { filter: '' };
|
||||
}
|
||||
return target;
|
||||
}
|
||||
|
||||
function convertToRegex(str) {
|
||||
if (str) {
|
||||
return '/' + str + '/';
|
||||
} else {
|
||||
return '/.*/';
|
||||
}
|
||||
}
|
||||
|
||||
export const DS_CONFIG_SCHEMA = 3;
|
||||
|
||||
export function migrateDSConfig(jsonData) {
|
||||
if (!jsonData) {
|
||||
jsonData = {};
|
||||
}
|
||||
|
||||
if (!shouldMigrateDSConfig(jsonData)) {
|
||||
return jsonData;
|
||||
}
|
||||
|
||||
const oldVersion = jsonData.schema || 1;
|
||||
jsonData.schema = DS_CONFIG_SCHEMA;
|
||||
|
||||
if (oldVersion < 2) {
|
||||
const dbConnectionOptions = jsonData.dbConnection || {};
|
||||
jsonData.dbConnectionEnable = dbConnectionOptions.enable || false;
|
||||
jsonData.dbConnectionDatasourceId = dbConnectionOptions.datasourceId || null;
|
||||
delete jsonData.dbConnection;
|
||||
}
|
||||
|
||||
if (oldVersion < 3) {
|
||||
jsonData.timeout = (jsonData.timeout as string) === '' ? null : Number(jsonData.timeout as string);
|
||||
}
|
||||
|
||||
return jsonData;
|
||||
}
|
||||
|
||||
function shouldMigrateDSConfig(jsonData): boolean {
|
||||
if (jsonData.dbConnection && !_.isEmpty(jsonData.dbConnection)) {
|
||||
return true;
|
||||
}
|
||||
if (jsonData.schema && jsonData.schema < DS_CONFIG_SCHEMA) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
const getDefaultAnnotationTarget = (json: any) => {
|
||||
return {
|
||||
group: { filter: json.group ?? '' },
|
||||
host: { filter: json.host ?? '' },
|
||||
application: { filter: json.application ?? '' },
|
||||
trigger: { filter: json.trigger ?? '' },
|
||||
options: {
|
||||
minSeverity: json.minseverity ?? 0,
|
||||
showOkEvents: json.showOkEvents ?? false,
|
||||
hideAcknowledged: json.hideAcknowledged ?? false,
|
||||
showHostname: json.showHostname ?? false,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export const prepareAnnotation = (json: any) => {
|
||||
const defaultTarget = getDefaultAnnotationTarget(json);
|
||||
|
||||
json.target = {
|
||||
...defaultTarget,
|
||||
...json.target,
|
||||
fromAnnotations: true,
|
||||
options: {
|
||||
...defaultTarget.options!,
|
||||
...json.target?.options,
|
||||
},
|
||||
};
|
||||
|
||||
return json;
|
||||
};
|
||||
10
src/datasource/module.ts
Normal file
10
src/datasource/module.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { DataSourcePlugin } from '@grafana/data';
|
||||
import { ZabbixDatasource } from './datasource';
|
||||
import { QueryEditor } from './components/QueryEditor';
|
||||
import { ZabbixVariableQueryEditor } from './components/VariableQueryEditor';
|
||||
import { ConfigEditor } from './components/ConfigEditor';
|
||||
|
||||
export const plugin = new DataSourcePlugin(ZabbixDatasource)
|
||||
.setConfigEditor(ConfigEditor)
|
||||
.setQueryEditor(QueryEditor)
|
||||
.setVariableQueryEditor(ZabbixVariableQueryEditor);
|
||||
40
src/datasource/plugin.json
Normal file
40
src/datasource/plugin.json
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"type": "datasource",
|
||||
"name": "Zabbix",
|
||||
"id": "alexanderzobnin-zabbix-datasource",
|
||||
"metrics": true,
|
||||
"annotations": true,
|
||||
"backend": true,
|
||||
"alerting": true,
|
||||
"executable": "../gpx_zabbix-plugin",
|
||||
"includes": [
|
||||
{
|
||||
"type": "dashboard",
|
||||
"name": "Zabbix System Status",
|
||||
"path": "dashboards/zabbix_system_status.json"
|
||||
},
|
||||
{
|
||||
"type": "dashboard",
|
||||
"name": "Zabbix Template Linux Server",
|
||||
"path": "dashboards/template_linux_server.json"
|
||||
},
|
||||
{
|
||||
"type": "dashboard",
|
||||
"name": "Zabbix Server Dashboard",
|
||||
"path": "dashboards/zabbix_server_dashboard.json"
|
||||
}
|
||||
],
|
||||
"queryOptions": {
|
||||
"maxDataPoints": true
|
||||
},
|
||||
"info": {
|
||||
"author": {
|
||||
"name": "Alexander Zobnin",
|
||||
"url": "https://github.com/alexanderzobnin/grafana-zabbix"
|
||||
},
|
||||
"logos": {
|
||||
"small": "img/icn-zabbix-datasource.svg",
|
||||
"large": "img/icn-zabbix-datasource.svg"
|
||||
}
|
||||
}
|
||||
}
|
||||
204
src/datasource/problemsHandler.ts
Normal file
204
src/datasource/problemsHandler.ts
Normal file
@@ -0,0 +1,204 @@
|
||||
import _ from 'lodash';
|
||||
import * as utils from './utils';
|
||||
import { DataFrame, Field, FieldType, ArrayVector } from '@grafana/data';
|
||||
import { ZBXProblem, ZBXTrigger, ProblemDTO, ZBXEvent } from './types';
|
||||
|
||||
export function joinTriggersWithProblems(problems: ZBXProblem[], triggers: ZBXTrigger[]): ProblemDTO[] {
|
||||
const problemDTOList: ProblemDTO[] = [];
|
||||
|
||||
for (let i = 0; i < problems.length; i++) {
|
||||
const p = problems[i];
|
||||
const triggerId = Number(p.objectid);
|
||||
const t = triggers[triggerId];
|
||||
|
||||
if (t) {
|
||||
const problemDTO: ProblemDTO = {
|
||||
timestamp: Number(p.clock),
|
||||
triggerid: p.objectid,
|
||||
eventid: p.eventid,
|
||||
name: p.name,
|
||||
severity: p.severity,
|
||||
acknowledged: p.acknowledged,
|
||||
acknowledges: p.acknowledges,
|
||||
tags: p.tags,
|
||||
suppressed: p.suppressed,
|
||||
suppression_data: p.suppression_data,
|
||||
description: p.name || t.description,
|
||||
comments: t.comments,
|
||||
value: t.value,
|
||||
groups: t.groups,
|
||||
hosts: t.hosts,
|
||||
items: t.items,
|
||||
alerts: t.alerts,
|
||||
url: t.url,
|
||||
expression: t.expression,
|
||||
correlation_mode: t.correlation_mode,
|
||||
correlation_tag: t.correlation_tag,
|
||||
manual_close: t.manual_close,
|
||||
state: t.state,
|
||||
error: t.error,
|
||||
};
|
||||
|
||||
problemDTOList.push(problemDTO);
|
||||
}
|
||||
}
|
||||
|
||||
return problemDTOList;
|
||||
}
|
||||
|
||||
interface JoinOptions {
|
||||
valueFromEvent?: boolean;
|
||||
}
|
||||
|
||||
export function joinTriggersWithEvents(
|
||||
events: ZBXEvent[],
|
||||
triggers: ZBXTrigger[],
|
||||
options?: JoinOptions
|
||||
): ProblemDTO[] {
|
||||
const { valueFromEvent } = options;
|
||||
const problemDTOList: ProblemDTO[] = [];
|
||||
|
||||
for (let i = 0; i < events.length; i++) {
|
||||
const e = events[i];
|
||||
const triggerId = Number(e.objectid);
|
||||
const t = triggers[triggerId];
|
||||
|
||||
if (t) {
|
||||
const problemDTO: ProblemDTO = {
|
||||
value: valueFromEvent ? e.value : t.value,
|
||||
timestamp: Number(e.clock),
|
||||
triggerid: e.objectid,
|
||||
eventid: e.eventid,
|
||||
name: e.name,
|
||||
severity: e.severity,
|
||||
acknowledged: e.acknowledged,
|
||||
acknowledges: e.acknowledges,
|
||||
tags: e.tags,
|
||||
suppressed: e.suppressed,
|
||||
description: t.description,
|
||||
comments: t.comments,
|
||||
groups: t.groups,
|
||||
hosts: t.hosts,
|
||||
items: t.items,
|
||||
alerts: t.alerts,
|
||||
url: t.url,
|
||||
expression: t.expression,
|
||||
correlation_mode: t.correlation_mode,
|
||||
correlation_tag: t.correlation_tag,
|
||||
manual_close: t.manual_close,
|
||||
state: t.state,
|
||||
error: t.error,
|
||||
};
|
||||
|
||||
problemDTOList.push(problemDTO);
|
||||
}
|
||||
}
|
||||
|
||||
return problemDTOList;
|
||||
}
|
||||
|
||||
export function setMaintenanceStatus(triggers) {
|
||||
_.each(triggers, (trigger) => {
|
||||
const maintenance_status = _.some(trigger.hosts, (host) => host.maintenance_status === '1');
|
||||
trigger.maintenance = maintenance_status;
|
||||
});
|
||||
return triggers;
|
||||
}
|
||||
|
||||
export function setAckButtonStatus(triggers, showAckButton) {
|
||||
_.each(triggers, (trigger) => {
|
||||
trigger.showAckButton = showAckButton;
|
||||
});
|
||||
return triggers;
|
||||
}
|
||||
|
||||
export function addTriggerDataSource(triggers, target) {
|
||||
_.each(triggers, (trigger) => {
|
||||
trigger.datasource = target.datasource;
|
||||
});
|
||||
return triggers;
|
||||
}
|
||||
|
||||
export function addTriggerHostProxy(triggers, proxies) {
|
||||
triggers.forEach((trigger) => {
|
||||
if (trigger.hosts && trigger.hosts.length) {
|
||||
const host = trigger.hosts[0];
|
||||
if (host.proxy_hostid !== '0') {
|
||||
const hostProxy = proxies[host.proxy_hostid];
|
||||
host.proxy = hostProxy ? hostProxy.host : '';
|
||||
}
|
||||
}
|
||||
});
|
||||
return triggers;
|
||||
}
|
||||
|
||||
export function filterTriggersPre(triggerList, replacedTarget) {
|
||||
// Filter triggers by description
|
||||
const triggerFilter = replacedTarget.trigger.filter;
|
||||
if (triggerFilter) {
|
||||
triggerList = filterTriggers(triggerList, triggerFilter);
|
||||
}
|
||||
|
||||
// Filter by maintenance status
|
||||
if (!replacedTarget.options.hostsInMaintenance) {
|
||||
triggerList = _.filter(triggerList, (trigger) => !trigger.maintenance);
|
||||
}
|
||||
|
||||
return triggerList;
|
||||
}
|
||||
|
||||
function filterTriggers(triggers, triggerFilter) {
|
||||
if (utils.isRegex(triggerFilter)) {
|
||||
return _.filter(triggers, (trigger) => {
|
||||
return utils.buildRegex(triggerFilter).test(trigger.description);
|
||||
});
|
||||
} else {
|
||||
return _.filter(triggers, (trigger) => {
|
||||
return trigger.description === triggerFilter;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function sortProblems(problems: ProblemDTO[], target) {
|
||||
if (target.options?.sortProblems === 'severity') {
|
||||
problems = _.orderBy(problems, ['severity', 'eventid'], ['desc', 'desc']);
|
||||
} else if (target.options?.sortProblems === 'lastchange') {
|
||||
problems = _.orderBy(problems, ['timestamp', 'eventid'], ['desc', 'desc']);
|
||||
}
|
||||
return problems;
|
||||
}
|
||||
|
||||
export function toDataFrame(problems: any[]): DataFrame {
|
||||
const problemsField: Field<any> = {
|
||||
name: 'Problems',
|
||||
type: FieldType.other,
|
||||
values: new ArrayVector(problems),
|
||||
config: {
|
||||
custom: {
|
||||
type: 'problems',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const response: DataFrame = {
|
||||
name: 'problems',
|
||||
fields: [problemsField],
|
||||
length: problems.length,
|
||||
};
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
const problemsHandler = {
|
||||
addTriggerDataSource,
|
||||
addTriggerHostProxy,
|
||||
setMaintenanceStatus,
|
||||
setAckButtonStatus,
|
||||
filterTriggersPre,
|
||||
sortProblems,
|
||||
toDataFrame,
|
||||
joinTriggersWithProblems,
|
||||
joinTriggersWithEvents,
|
||||
};
|
||||
|
||||
export default problemsHandler;
|
||||
35
src/datasource/query_help.md
Normal file
35
src/datasource/query_help.md
Normal file
@@ -0,0 +1,35 @@
|
||||
#### Max data points
|
||||
Override max data points, automatically set to graph width in pixels. Grafana-Zabbix plugin uses maxDataPoints parameter to consolidate the real number of values down to this number. If there are more real values, then by default they will be consolidated using averages. This could hide real peaks and max values in your series. Point consolidation will affect series legend values (min,max,total,current).
|
||||
|
||||
#### Query Mode
|
||||
##### Merics
|
||||
Data from numeric items.
|
||||
|
||||
##### Text
|
||||
Data from items with `Character`, `Text` or `Log` type.
|
||||
|
||||
##### IT Services
|
||||
Time series representation of IT Services data
|
||||
###### IT service property
|
||||
Zabbix returns the following availability information about IT service:
|
||||
- Status - current status of the IT service
|
||||
- SLA - SLA for the given time interval
|
||||
- OK time - time the service was in OK state, in seconds
|
||||
- Problem time - time the service was in problem state, in seconds
|
||||
- Down time - time the service was in scheduled downtime, in seconds
|
||||
|
||||
##### Item ID
|
||||
Data from items with specified ID's (comma separated).
|
||||
This mode is suitable for rendering charts in grafana by passing itemids as url params.
|
||||
1. Create multivalue template variable with type _Custom_, for example, `itemids`.
|
||||
1. Create graph with desired parameters and use `$itemids` in _Item IDs_ filed.
|
||||
1. Save dashboard.
|
||||
1. Click to graph title and select _Share_ -> _Direct link rendered image_.
|
||||
1. Use this URL for graph png image and set `var-itemids` param to desired IDs. Note, for multiple IDs you should pass multiple params, like `&var-itemids=28276&var-itemids=28277`.
|
||||
|
||||
##### Triggers
|
||||
Active triggers count for selected hosts or table data like Zabbix _System status_ panel on the main dashboard.
|
||||
|
||||
#### Documentation links:
|
||||
|
||||
[Grafana-Zabbix Documentation](https://alexanderzobnin.github.io/grafana-zabbix)
|
||||
589
src/datasource/responseHandler.ts
Normal file
589
src/datasource/responseHandler.ts
Normal file
@@ -0,0 +1,589 @@
|
||||
import _ from 'lodash';
|
||||
import TableModel from 'grafana/app/core/table_model';
|
||||
import * as c from './constants';
|
||||
import * as utils from './utils';
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
dataFrameFromJSON,
|
||||
DataFrameJSON,
|
||||
DataQueryResponse,
|
||||
Field,
|
||||
FieldType,
|
||||
getTimeField,
|
||||
MutableDataFrame,
|
||||
MutableField,
|
||||
TIME_SERIES_TIME_FIELD_NAME,
|
||||
TIME_SERIES_VALUE_FIELD_NAME,
|
||||
} from '@grafana/data';
|
||||
import { ZabbixMetricsQuery } from './types';
|
||||
|
||||
/**
|
||||
* Convert Zabbix API history.get response to Grafana format
|
||||
*
|
||||
* @return {Array} Array of timeseries in Grafana format
|
||||
* {
|
||||
* target: "Metric name",
|
||||
* datapoints: [[<value>, <unixtime>], ...]
|
||||
* }
|
||||
*/
|
||||
function convertHistory(history, items, addHostName, convertPointCallback) {
|
||||
/**
|
||||
* Response should be in the format:
|
||||
* data: [
|
||||
* {
|
||||
* target: "Metric name",
|
||||
* datapoints: [[<value>, <unixtime>], ...]
|
||||
* }, ...
|
||||
* ]
|
||||
*/
|
||||
|
||||
// Group history by itemid
|
||||
const grouped_history = _.groupBy(history, 'itemid');
|
||||
const hosts = _.uniqBy(_.flatten(_.map(items, 'hosts')), 'hostid'); //uniqBy is needed to deduplicate
|
||||
|
||||
return _.map(grouped_history, (hist, itemid) => {
|
||||
const item = _.find(items, { itemid: itemid }) as any;
|
||||
let alias = item.name;
|
||||
|
||||
// Add scopedVars for using in alias functions
|
||||
const scopedVars: any = {
|
||||
__zbx_item: { value: item.name },
|
||||
__zbx_item_name: { value: item.name },
|
||||
__zbx_item_key: { value: item.key_ },
|
||||
__zbx_item_interval: { value: item.delay },
|
||||
};
|
||||
|
||||
if (_.keys(hosts).length > 0) {
|
||||
const host = _.find(hosts, { hostid: item.hostid });
|
||||
scopedVars['__zbx_host'] = { value: host.host };
|
||||
scopedVars['__zbx_host_name'] = { value: host.name };
|
||||
|
||||
// Only add host when multiple hosts selected
|
||||
if (_.keys(hosts).length > 1 && addHostName) {
|
||||
alias = host.name + ': ' + alias;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
target: alias,
|
||||
datapoints: _.map(hist, convertPointCallback),
|
||||
scopedVars,
|
||||
item,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export function seriesToDataFrame(
|
||||
timeseries,
|
||||
target: ZabbixMetricsQuery,
|
||||
valueMappings?: any[],
|
||||
fieldType?: FieldType
|
||||
): MutableDataFrame {
|
||||
const { datapoints, scopedVars, target: seriesName, item } = timeseries;
|
||||
|
||||
const timeFiled: Field = {
|
||||
name: TIME_SERIES_TIME_FIELD_NAME,
|
||||
type: FieldType.time,
|
||||
config: {
|
||||
custom: {},
|
||||
},
|
||||
values: new ArrayVector<number>(datapoints.map((p) => p[c.DATAPOINT_TS])),
|
||||
};
|
||||
|
||||
let values: ArrayVector<number> | ArrayVector<string>;
|
||||
if (fieldType === FieldType.string) {
|
||||
values = new ArrayVector<string>(datapoints.map((p) => p[c.DATAPOINT_VALUE]));
|
||||
} else {
|
||||
values = new ArrayVector<number>(datapoints.map((p) => p[c.DATAPOINT_VALUE]));
|
||||
}
|
||||
|
||||
const valueFiled: Field = {
|
||||
name: TIME_SERIES_VALUE_FIELD_NAME,
|
||||
type: fieldType ?? FieldType.number,
|
||||
labels: {},
|
||||
config: {
|
||||
displayNameFromDS: seriesName,
|
||||
custom: {},
|
||||
},
|
||||
values,
|
||||
};
|
||||
|
||||
if (scopedVars) {
|
||||
timeFiled.config.custom = {
|
||||
itemInterval: scopedVars['__zbx_item_interval']?.value,
|
||||
};
|
||||
|
||||
valueFiled.labels = {
|
||||
host: scopedVars['__zbx_host_name']?.value,
|
||||
item: scopedVars['__zbx_item']?.value,
|
||||
item_key: scopedVars['__zbx_item_key']?.value,
|
||||
};
|
||||
|
||||
valueFiled.config.custom = {
|
||||
itemInterval: scopedVars['__zbx_item_interval']?.value,
|
||||
};
|
||||
}
|
||||
|
||||
if (item) {
|
||||
// Try to use unit configured in Zabbix
|
||||
const unit = utils.convertZabbixUnit(item.units);
|
||||
if (unit) {
|
||||
console.log(`Datasource: unit detected: ${unit} (${item.units})`);
|
||||
valueFiled.config.unit = unit;
|
||||
|
||||
if (unit === 'percent') {
|
||||
valueFiled.config.min = 0;
|
||||
valueFiled.config.max = 100;
|
||||
}
|
||||
}
|
||||
|
||||
// Try to use value mapping from Zabbix
|
||||
const mappings = utils.getValueMapping(item, valueMappings);
|
||||
if (mappings && target.options?.useZabbixValueMapping) {
|
||||
console.log(`Datasource: using Zabbix value mapping`);
|
||||
valueFiled.config.mappings = mappings;
|
||||
}
|
||||
}
|
||||
|
||||
const fields: Field[] = [timeFiled, valueFiled];
|
||||
|
||||
const frame: DataFrame = {
|
||||
name: seriesName,
|
||||
refId: target.refId,
|
||||
fields,
|
||||
length: datapoints.length,
|
||||
};
|
||||
|
||||
const mutableFrame = new MutableDataFrame(frame);
|
||||
return mutableFrame;
|
||||
}
|
||||
|
||||
// Converts DataResponse to the format which backend works with (for data processing)
|
||||
export function dataResponseToTimeSeries(response: DataFrameJSON[], items, request) {
|
||||
const series = [];
|
||||
if (response.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
for (const frameJSON of response) {
|
||||
const frame = dataFrameFromJSON(frameJSON);
|
||||
const { timeField, timeIndex } = getTimeField(frame);
|
||||
for (let i = 0; i < frame.fields.length; i++) {
|
||||
const field = frame.fields[i];
|
||||
if (i === timeIndex || !field.values || !field.values.length) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const s = [];
|
||||
for (let j = 0; j < field.values.length; j++) {
|
||||
const v = field.values.get(j);
|
||||
if (v !== null) {
|
||||
s.push({ time: timeField.values.get(j) / 1000, value: v });
|
||||
}
|
||||
}
|
||||
|
||||
const itemid = field.name;
|
||||
const item = _.find(items, { itemid: itemid });
|
||||
|
||||
// Convert interval to nanoseconds in order to unmarshall it on the backend to time.Duration
|
||||
let interval = request.intervalMs * 1000000;
|
||||
const itemInterval = utils.parseItemInterval(item.delay) * 1000000;
|
||||
// Provided interval is using for the data alignment, so it shouldn't be less than item update interval
|
||||
interval = Math.max(interval, itemInterval);
|
||||
if (interval === 0) {
|
||||
interval = null;
|
||||
}
|
||||
|
||||
let seriesName = item.name;
|
||||
if (item.hosts?.length > 0) {
|
||||
seriesName = `${item.hosts[0].name}: ${seriesName}`;
|
||||
}
|
||||
|
||||
const timeSeriesData = {
|
||||
ts: s,
|
||||
meta: {
|
||||
name: seriesName,
|
||||
item,
|
||||
interval,
|
||||
},
|
||||
};
|
||||
|
||||
series.push(timeSeriesData);
|
||||
}
|
||||
}
|
||||
|
||||
return series;
|
||||
}
|
||||
|
||||
// Get units from Zabbix
|
||||
export function convertZabbixUnits(response: DataQueryResponse) {
|
||||
for (let i = 0; i < response.data.length; i++) {
|
||||
const frame: DataFrame = response.data[i];
|
||||
for (const field of frame.fields) {
|
||||
const zabbixUnits = field.config.custom?.units;
|
||||
if (zabbixUnits) {
|
||||
const unit = utils.convertZabbixUnit(zabbixUnits);
|
||||
if (unit) {
|
||||
field.config.unit = unit;
|
||||
|
||||
if (unit === 'percent') {
|
||||
field.config.min = 0;
|
||||
field.config.max = 100;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return response;
|
||||
}
|
||||
|
||||
export function itServiceResponseToTimeSeries(response: any, interval) {
|
||||
const series = [];
|
||||
if (response.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
for (const s of response) {
|
||||
const ts = [];
|
||||
|
||||
if (!s.datapoints) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const dp = s.datapoints;
|
||||
for (let i = 0; i < dp.length; i++) {
|
||||
ts.push({ time: dp[i][1] / 1000, value: dp[i][0] });
|
||||
}
|
||||
|
||||
// Convert interval to nanoseconds in order to unmarshall it on the backend to time.Duration
|
||||
let intervalNS = utils.parseItemInterval(interval) * 1000000;
|
||||
if (intervalNS === 0) {
|
||||
intervalNS = null;
|
||||
}
|
||||
|
||||
const timeSeriesData = {
|
||||
ts: ts,
|
||||
meta: {
|
||||
name: s.target,
|
||||
interval: null,
|
||||
item: {},
|
||||
},
|
||||
};
|
||||
|
||||
series.push(timeSeriesData);
|
||||
}
|
||||
|
||||
return series;
|
||||
}
|
||||
|
||||
export function isConvertibleToWide(data: DataFrame[]): boolean {
|
||||
if (!data || data.length < 2) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const first = data[0].fields.find((f) => f.type === FieldType.time);
|
||||
if (!first) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (let i = 1; i < data.length; i++) {
|
||||
const timeField = data[i].fields.find((f) => f.type === FieldType.time);
|
||||
|
||||
for (let j = 0; j < Math.min(data.length, 2); j++) {
|
||||
if (timeField.values.get(j) !== first.values.get(j)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export function alignFrames(data: MutableDataFrame[]): MutableDataFrame[] {
|
||||
if (!data || data.length === 0) {
|
||||
return data;
|
||||
}
|
||||
|
||||
// Get oldest time stamp for all frames
|
||||
let minTimestamp = data[0].fields.find((f) => f.name === TIME_SERIES_TIME_FIELD_NAME).values.get(0);
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
const timeField = data[i].fields.find((f) => f.name === TIME_SERIES_TIME_FIELD_NAME);
|
||||
const firstTs = timeField.values.get(0);
|
||||
if (firstTs < minTimestamp) {
|
||||
minTimestamp = firstTs;
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
const frame = data[i];
|
||||
const timeField = frame.fields.find((f) => f.name === TIME_SERIES_TIME_FIELD_NAME);
|
||||
const valueField = frame.fields.find((f) => f.name === TIME_SERIES_VALUE_FIELD_NAME);
|
||||
const firstTs = timeField.values.get(0);
|
||||
|
||||
if (firstTs > minTimestamp) {
|
||||
console.log('Data frames: adding missing points');
|
||||
let timestamps = timeField.values.toArray();
|
||||
let values = valueField.values.toArray();
|
||||
const missingTimestamps = [];
|
||||
const missingValues = [];
|
||||
const frameInterval: number = timeField.config.custom?.itemInterval;
|
||||
for (let j = minTimestamp; j < firstTs; j += frameInterval) {
|
||||
missingTimestamps.push(j);
|
||||
missingValues.push(null);
|
||||
}
|
||||
|
||||
timestamps = missingTimestamps.concat(timestamps);
|
||||
values = missingValues.concat(values);
|
||||
timeField.values = new ArrayVector(timestamps);
|
||||
valueField.values = new ArrayVector(values);
|
||||
}
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
export function convertToWide(data: MutableDataFrame[]): DataFrame[] {
|
||||
const maxLengthIndex = getLongestFrame(data);
|
||||
const timeField = data[maxLengthIndex].fields.find((f) => f.type === FieldType.time);
|
||||
if (!timeField) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const fields: MutableField[] = [timeField];
|
||||
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
const valueField = data[i].fields.find((f) => f.name === TIME_SERIES_VALUE_FIELD_NAME);
|
||||
if (!valueField) {
|
||||
continue;
|
||||
}
|
||||
|
||||
valueField.name = data[i].name;
|
||||
|
||||
// Add null value to the end if series is shifted by 1 time frame
|
||||
if (timeField.values.length - valueField.values.length === 1) {
|
||||
valueField.values.add(null);
|
||||
}
|
||||
fields.push(valueField);
|
||||
}
|
||||
|
||||
const frame: DataFrame = {
|
||||
name: 'wide',
|
||||
fields,
|
||||
length: timeField.values.length,
|
||||
};
|
||||
|
||||
return [frame];
|
||||
}
|
||||
|
||||
function getLongestFrame(data: MutableDataFrame[]): number {
|
||||
let maxLengthIndex = 0;
|
||||
let maxLength = 0;
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
const timeField = data[i].fields.find((f) => f.type === FieldType.time);
|
||||
if (timeField.values.length > maxLength) {
|
||||
maxLength = timeField.values.length;
|
||||
maxLengthIndex = i;
|
||||
}
|
||||
}
|
||||
|
||||
return maxLengthIndex;
|
||||
}
|
||||
|
||||
function sortTimeseries(timeseries) {
|
||||
// Sort trend data, issue #202
|
||||
_.forEach(timeseries, (series) => {
|
||||
series.datapoints = _.sortBy(series.datapoints, (point) => point[c.DATAPOINT_TS]);
|
||||
});
|
||||
return timeseries;
|
||||
}
|
||||
|
||||
function handleHistory(history, items, addHostName = true) {
|
||||
return convertHistory(history, items, addHostName, convertHistoryPoint);
|
||||
}
|
||||
|
||||
function handleTrends(history, items, valueType, addHostName = true) {
|
||||
const convertPointCallback = _.partial(convertTrendPoint, valueType);
|
||||
return convertHistory(history, items, addHostName, convertPointCallback);
|
||||
}
|
||||
|
||||
function handleText(history, items, target, addHostName = true) {
|
||||
const convertTextCallback = _.partial(convertText, target);
|
||||
return convertHistory(history, items, addHostName, convertTextCallback);
|
||||
}
|
||||
|
||||
function handleHistoryAsTable(history, items, target) {
|
||||
const table: any = new TableModel();
|
||||
table.addColumn({ text: 'Host' });
|
||||
table.addColumn({ text: 'Item' });
|
||||
table.addColumn({ text: 'Key' });
|
||||
table.addColumn({ text: 'Last value' });
|
||||
|
||||
const grouped_history = _.groupBy(history, 'itemid');
|
||||
_.each(items, (item) => {
|
||||
const itemHistory = grouped_history[item.itemid] || [];
|
||||
const lastPoint = _.last(itemHistory);
|
||||
let lastValue = lastPoint ? lastPoint.value : null;
|
||||
|
||||
if (target.options.skipEmptyValues && (!lastValue || lastValue === '')) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Regex-based extractor
|
||||
if (target.textFilter) {
|
||||
lastValue = extractText(lastValue, target.textFilter, target.useCaptureGroups);
|
||||
}
|
||||
|
||||
let host: any = _.first(item.hosts);
|
||||
host = host ? host.name : '';
|
||||
|
||||
table.rows.push([host, item.name, item.key_, lastValue]);
|
||||
});
|
||||
|
||||
return table;
|
||||
}
|
||||
|
||||
function convertText(target, point) {
|
||||
let value = point.value;
|
||||
|
||||
// Regex-based extractor
|
||||
if (target.textFilter) {
|
||||
value = extractText(point.value, target.textFilter, target.useCaptureGroups);
|
||||
}
|
||||
|
||||
return [value, point.clock * 1000 + Math.round(point.ns / 1000000)];
|
||||
}
|
||||
|
||||
function extractText(str, pattern, useCaptureGroups) {
|
||||
const extractPattern = new RegExp(pattern);
|
||||
const extractedValue = extractPattern.exec(str);
|
||||
if (extractedValue) {
|
||||
if (useCaptureGroups) {
|
||||
return extractedValue[1];
|
||||
} else {
|
||||
return extractedValue[0];
|
||||
}
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
function handleSLAResponse(itservice, slaProperty, slaObject) {
|
||||
const targetSLA = slaObject[itservice.serviceid].sla;
|
||||
if (slaProperty === 'status') {
|
||||
const targetStatus = parseInt(slaObject[itservice.serviceid].status, 10);
|
||||
return {
|
||||
target: itservice.name + ' ' + slaProperty,
|
||||
datapoints: [[targetStatus, targetSLA[0].to * 1000]],
|
||||
};
|
||||
} else {
|
||||
let i;
|
||||
const slaArr = [];
|
||||
for (i = 0; i < targetSLA.length; i++) {
|
||||
if (i === 0) {
|
||||
slaArr.push([targetSLA[i][slaProperty], targetSLA[i].from * 1000]);
|
||||
}
|
||||
slaArr.push([targetSLA[i][slaProperty], targetSLA[i].to * 1000]);
|
||||
}
|
||||
return {
|
||||
target: itservice.name + ' ' + slaProperty,
|
||||
datapoints: slaArr,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function handleTriggersResponse(triggers, groups, timeRange) {
|
||||
if (!_.isArray(triggers)) {
|
||||
let triggersCount = null;
|
||||
try {
|
||||
triggersCount = Number(triggers);
|
||||
} catch (err) {
|
||||
console.log('Error when handling triggers count: ', err);
|
||||
}
|
||||
return {
|
||||
target: 'triggers count',
|
||||
datapoints: [[triggersCount, timeRange[1] * 1000]],
|
||||
};
|
||||
} else {
|
||||
const stats = getTriggerStats(triggers);
|
||||
const groupNames = _.map(groups, 'name');
|
||||
const table: any = new TableModel();
|
||||
table.addColumn({ text: 'Host group' });
|
||||
_.each(_.orderBy(c.TRIGGER_SEVERITY, ['val'], ['desc']), (severity) => {
|
||||
table.addColumn({ text: severity.text });
|
||||
});
|
||||
_.each(stats, (severity_stats, group) => {
|
||||
if (_.includes(groupNames, group)) {
|
||||
let row = _.map(
|
||||
_.orderBy(_.toPairs(severity_stats), (s) => s[0], ['desc']),
|
||||
(s) => s[1]
|
||||
);
|
||||
row = _.concat([group], ...row);
|
||||
table.rows.push(row);
|
||||
}
|
||||
});
|
||||
return table;
|
||||
}
|
||||
}
|
||||
|
||||
function getTriggerStats(triggers) {
|
||||
const groups = _.uniq(_.flattenDeep(_.map(triggers, (trigger) => _.map(trigger.groups, 'name'))));
|
||||
// let severity = _.map(c.TRIGGER_SEVERITY, 'text');
|
||||
const stats = {};
|
||||
_.each(groups, (group) => {
|
||||
stats[group] = { 0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0 }; // severity:count
|
||||
});
|
||||
_.each(triggers, (trigger) => {
|
||||
_.each(trigger.groups, (group) => {
|
||||
stats[group.name][trigger.priority]++;
|
||||
});
|
||||
});
|
||||
return stats;
|
||||
}
|
||||
|
||||
function convertHistoryPoint(point) {
|
||||
// Value must be a number for properly work
|
||||
return [Number(point.value), point.clock * 1000 + Math.round(point.ns / 1000000)];
|
||||
}
|
||||
|
||||
function convertTrendPoint(valueType, point) {
|
||||
let value;
|
||||
switch (valueType) {
|
||||
case 'min':
|
||||
value = point.value_min;
|
||||
break;
|
||||
case 'max':
|
||||
value = point.value_max;
|
||||
break;
|
||||
case 'avg':
|
||||
value = point.value_avg;
|
||||
break;
|
||||
case 'sum':
|
||||
value = point.value_avg * point.num;
|
||||
break;
|
||||
case 'count':
|
||||
value = point.num;
|
||||
break;
|
||||
default:
|
||||
value = point.value_avg;
|
||||
}
|
||||
|
||||
return [Number(value), point.clock * 1000];
|
||||
}
|
||||
|
||||
export default {
|
||||
handleHistory,
|
||||
convertHistory,
|
||||
handleTrends,
|
||||
handleText,
|
||||
handleHistoryAsTable,
|
||||
handleSLAResponse,
|
||||
handleTriggersResponse,
|
||||
sortTimeseries,
|
||||
seriesToDataFrame,
|
||||
dataResponseToTimeSeries,
|
||||
itServiceResponseToTimeSeries,
|
||||
isConvertibleToWide,
|
||||
convertToWide,
|
||||
alignFrames,
|
||||
convertZabbixUnits,
|
||||
};
|
||||
312
src/datasource/specs/datasource.spec.ts
Normal file
312
src/datasource/specs/datasource.spec.ts
Normal file
@@ -0,0 +1,312 @@
|
||||
import _ from 'lodash';
|
||||
import { templateSrvMock, datasourceSrvMock } from '../../test-setup/mocks';
|
||||
import { replaceTemplateVars, ZabbixDatasource, zabbixTemplateFormat } from '../datasource';
|
||||
import { dateMath } from '@grafana/data';
|
||||
|
||||
jest.mock(
|
||||
'@grafana/runtime',
|
||||
() => ({
|
||||
getBackendSrv: () => ({
|
||||
datasourceRequest: jest.fn().mockResolvedValue({ data: { result: '' } }),
|
||||
fetch: () => ({
|
||||
toPromise: () => jest.fn().mockResolvedValue({ data: { result: '' } }),
|
||||
}),
|
||||
}),
|
||||
getTemplateSrv: () => ({
|
||||
replace: jest.fn().mockImplementation((query) => query),
|
||||
}),
|
||||
}),
|
||||
{ virtual: true }
|
||||
);
|
||||
|
||||
jest.mock('../components/AnnotationQueryEditor', () => ({
|
||||
AnnotationQueryEditor: () => {},
|
||||
}));
|
||||
|
||||
describe('ZabbixDatasource', () => {
|
||||
let ctx: any = {};
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.instanceSettings = {
|
||||
jsonData: {
|
||||
alerting: false,
|
||||
username: 'zabbix',
|
||||
password: 'zabbix',
|
||||
trends: true,
|
||||
trendsFrom: '14d',
|
||||
trendsRange: '7d',
|
||||
dbConnectionEnable: false,
|
||||
},
|
||||
};
|
||||
|
||||
ctx.options = {
|
||||
targets: [
|
||||
{
|
||||
group: { filter: '' },
|
||||
host: { filter: '' },
|
||||
application: { filter: '' },
|
||||
item: { filter: '' },
|
||||
},
|
||||
],
|
||||
range: {
|
||||
from: dateMath.parse('now-1h'),
|
||||
to: dateMath.parse('now'),
|
||||
},
|
||||
};
|
||||
|
||||
ctx.datasourceSrv = datasourceSrvMock;
|
||||
|
||||
ctx.ds = new ZabbixDatasource(ctx.instanceSettings);
|
||||
ctx.ds.templateSrv = templateSrvMock;
|
||||
});
|
||||
|
||||
describe('When querying text data', () => {
|
||||
beforeEach(() => {
|
||||
ctx.ds.replaceTemplateVars = (str) => str;
|
||||
ctx.ds.zabbix.zabbixAPI.getHistory = jest.fn().mockReturnValue(
|
||||
Promise.resolve([
|
||||
{ clock: '1500010200', itemid: '10100', ns: '900111000', value: 'Linux first' },
|
||||
{ clock: '1500010300', itemid: '10100', ns: '900111000', value: 'Linux 2nd' },
|
||||
{ clock: '1500010400', itemid: '10100', ns: '900111000', value: 'Linux last' },
|
||||
])
|
||||
);
|
||||
|
||||
ctx.ds.zabbix.getItemsFromTarget = jest.fn().mockReturnValue(
|
||||
Promise.resolve([
|
||||
{
|
||||
hosts: [{ hostid: '10001', name: 'Zabbix server' }],
|
||||
itemid: '10100',
|
||||
name: 'System information',
|
||||
key_: 'system.uname',
|
||||
},
|
||||
])
|
||||
);
|
||||
|
||||
ctx.options.targets = [
|
||||
{
|
||||
group: { filter: '' },
|
||||
host: { filter: 'Zabbix server' },
|
||||
application: { filter: '' },
|
||||
item: { filter: 'System information' },
|
||||
textFilter: '',
|
||||
useCaptureGroups: true,
|
||||
queryType: 2,
|
||||
resultFormat: 'table',
|
||||
options: {
|
||||
skipEmptyValues: false,
|
||||
},
|
||||
},
|
||||
];
|
||||
});
|
||||
|
||||
it('should return data in table format', (done) => {
|
||||
ctx.ds.query(ctx.options).then((result) => {
|
||||
expect(result.data.length).toBe(1);
|
||||
|
||||
let tableData = result.data[0];
|
||||
expect(tableData.columns).toEqual([
|
||||
{ text: 'Host' },
|
||||
{ text: 'Item' },
|
||||
{ text: 'Key' },
|
||||
{ text: 'Last value' },
|
||||
]);
|
||||
expect(tableData.rows).toEqual([['Zabbix server', 'System information', 'system.uname', 'Linux last']]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should extract value if regex with capture group is used', (done) => {
|
||||
ctx.options.targets[0].textFilter = 'Linux (.*)';
|
||||
ctx.ds.query(ctx.options).then((result) => {
|
||||
let tableData = result.data[0];
|
||||
expect(tableData.rows[0][3]).toEqual('last');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should skip item when last value is empty', () => {
|
||||
ctx.ds.zabbix.getItemsFromTarget = jest.fn().mockReturnValue(
|
||||
Promise.resolve([
|
||||
{
|
||||
hosts: [{ hostid: '10001', name: 'Zabbix server' }],
|
||||
itemid: '10100',
|
||||
name: 'System information',
|
||||
key_: 'system.uname',
|
||||
},
|
||||
{
|
||||
hosts: [{ hostid: '10002', name: 'Server02' }],
|
||||
itemid: '90109',
|
||||
name: 'System information',
|
||||
key_: 'system.uname',
|
||||
},
|
||||
])
|
||||
);
|
||||
|
||||
ctx.options.targets[0].options.skipEmptyValues = true;
|
||||
ctx.ds.zabbix.getHistory = jest.fn().mockReturnValue(
|
||||
Promise.resolve([
|
||||
{ clock: '1500010200', itemid: '10100', ns: '900111000', value: 'Linux first' },
|
||||
{ clock: '1500010300', itemid: '10100', ns: '900111000', value: 'Linux 2nd' },
|
||||
{ clock: '1500010400', itemid: '10100', ns: '900111000', value: 'Linux last' },
|
||||
{ clock: '1500010200', itemid: '90109', ns: '900111000', value: 'Non empty value' },
|
||||
{ clock: '1500010500', itemid: '90109', ns: '900111000', value: '' },
|
||||
])
|
||||
);
|
||||
return ctx.ds.query(ctx.options).then((result) => {
|
||||
let tableData = result.data[0];
|
||||
expect(tableData.rows.length).toBe(1);
|
||||
expect(tableData.rows[0][3]).toEqual('Linux last');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('When replacing template variables', () => {
|
||||
function testReplacingVariable(target, varValue, expectedResult, done) {
|
||||
ctx.ds.replaceTemplateVars = _.partial(replaceTemplateVars, {
|
||||
replace: jest.fn((target) => zabbixTemplateFormat(varValue)),
|
||||
});
|
||||
|
||||
let result = ctx.ds.replaceTemplateVars(target);
|
||||
expect(result).toBe(expectedResult);
|
||||
done();
|
||||
}
|
||||
|
||||
/*
|
||||
* Alphanumerics, spaces, dots, dashes and underscores
|
||||
* are allowed in Zabbix host name.
|
||||
* 'AaBbCc0123 .-_'
|
||||
*/
|
||||
it('should return properly escaped regex', (done) => {
|
||||
let target = '$host';
|
||||
let template_var_value = 'AaBbCc0123 .-_';
|
||||
let expected_result = '/^AaBbCc0123 \\.-_$/';
|
||||
|
||||
testReplacingVariable(target, template_var_value, expected_result, done);
|
||||
});
|
||||
|
||||
/*
|
||||
* Single-value variable
|
||||
* $host = backend01
|
||||
* $host => /^backend01|backend01$/
|
||||
*/
|
||||
it('should return proper regex for single value', (done) => {
|
||||
let target = '$host';
|
||||
let template_var_value = 'backend01';
|
||||
let expected_result = '/^backend01$/';
|
||||
|
||||
testReplacingVariable(target, template_var_value, expected_result, done);
|
||||
});
|
||||
|
||||
/*
|
||||
* Multi-value variable
|
||||
* $host = [backend01, backend02]
|
||||
* $host => /^(backend01|backend01)$/
|
||||
*/
|
||||
it('should return proper regex for multi-value', (done) => {
|
||||
let target = '$host';
|
||||
let template_var_value = ['backend01', 'backend02'];
|
||||
let expected_result = '/^(backend01|backend02)$/';
|
||||
|
||||
testReplacingVariable(target, template_var_value, expected_result, done);
|
||||
});
|
||||
});
|
||||
|
||||
describe('When invoking metricFindQuery() with legacy query', () => {
|
||||
beforeEach(() => {
|
||||
ctx.ds.replaceTemplateVars = (str) => str;
|
||||
ctx.ds.zabbix = {
|
||||
getGroups: jest.fn().mockReturnValue(Promise.resolve([])),
|
||||
getHosts: jest.fn().mockReturnValue(Promise.resolve([])),
|
||||
getApps: jest.fn().mockReturnValue(Promise.resolve([])),
|
||||
getItems: jest.fn().mockReturnValue(Promise.resolve([])),
|
||||
};
|
||||
});
|
||||
|
||||
it('should return groups', (done) => {
|
||||
const tests = [
|
||||
{ query: '*', expect: '/.*/' },
|
||||
{ query: 'Backend', expect: 'Backend' },
|
||||
{ query: 'Back*', expect: 'Back*' },
|
||||
];
|
||||
|
||||
for (const test of tests) {
|
||||
ctx.ds.metricFindQuery(test.query);
|
||||
expect(ctx.ds.zabbix.getGroups).toBeCalledWith(test.expect);
|
||||
ctx.ds.zabbix.getGroups.mockClear();
|
||||
}
|
||||
done();
|
||||
});
|
||||
|
||||
it('should return empty list for empty query', (done) => {
|
||||
ctx.ds.metricFindQuery('').then((result) => {
|
||||
expect(ctx.ds.zabbix.getGroups).toBeCalledTimes(0);
|
||||
ctx.ds.zabbix.getGroups.mockClear();
|
||||
|
||||
expect(result).toEqual([]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return hosts', (done) => {
|
||||
const tests = [
|
||||
{ query: '*.*', expect: ['/.*/', '/.*/'] },
|
||||
{ query: '.', expect: ['', ''] },
|
||||
{ query: 'Backend.*', expect: ['Backend', '/.*/'] },
|
||||
{ query: 'Back*.', expect: ['Back*', ''] },
|
||||
];
|
||||
|
||||
for (const test of tests) {
|
||||
ctx.ds.metricFindQuery(test.query);
|
||||
expect(ctx.ds.zabbix.getHosts).toBeCalledWith(test.expect[0], test.expect[1]);
|
||||
ctx.ds.zabbix.getHosts.mockClear();
|
||||
}
|
||||
done();
|
||||
});
|
||||
|
||||
it('should return applications', (done) => {
|
||||
const tests = [
|
||||
{ query: '*.*.*', expect: ['/.*/', '/.*/', '/.*/'] },
|
||||
{ query: '.*.', expect: ['', '/.*/', ''] },
|
||||
{ query: 'Backend.backend01.*', expect: ['Backend', 'backend01', '/.*/'] },
|
||||
{ query: 'Back*.*.', expect: ['Back*', '/.*/', ''] },
|
||||
];
|
||||
|
||||
for (const test of tests) {
|
||||
ctx.ds.metricFindQuery(test.query);
|
||||
expect(ctx.ds.zabbix.getApps).toBeCalledWith(test.expect[0], test.expect[1], test.expect[2]);
|
||||
ctx.ds.zabbix.getApps.mockClear();
|
||||
}
|
||||
done();
|
||||
});
|
||||
|
||||
it('should return items', (done) => {
|
||||
const tests = [
|
||||
{ query: '*.*.*.*', expect: ['/.*/', '/.*/', '', null, '/.*/'] },
|
||||
{ query: '.*.*.*', expect: ['', '/.*/', '', null, '/.*/'] },
|
||||
{ query: 'Backend.backend01.*.*', expect: ['Backend', 'backend01', '', null, '/.*/'] },
|
||||
{ query: 'Back*.*.cpu.*', expect: ['Back*', '/.*/', 'cpu', null, '/.*/'] },
|
||||
];
|
||||
|
||||
for (const test of tests) {
|
||||
ctx.ds.metricFindQuery(test.query);
|
||||
expect(ctx.ds.zabbix.getItems).toBeCalledWith(
|
||||
test.expect[0],
|
||||
test.expect[1],
|
||||
test.expect[2],
|
||||
test.expect[3],
|
||||
test.expect[4]
|
||||
);
|
||||
ctx.ds.zabbix.getItems.mockClear();
|
||||
}
|
||||
done();
|
||||
});
|
||||
|
||||
it('should invoke method with proper arguments', (done) => {
|
||||
let query = '*.*';
|
||||
|
||||
ctx.ds.metricFindQuery(query);
|
||||
expect(ctx.ds.zabbix.getHosts).toBeCalledWith('/.*/', '/.*/');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
53
src/datasource/specs/dbConnector.test.ts
Normal file
53
src/datasource/specs/dbConnector.test.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { DBConnector } from '../zabbix/connectors/dbConnector';
|
||||
|
||||
const loadDatasourceMock = jest.fn().mockResolvedValue({ id: 42, name: 'foo', meta: {} });
|
||||
const getAllMock = jest.fn().mockReturnValue([{ id: 42, name: 'foo', meta: {} }]);
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
getDataSourceSrv: () => ({
|
||||
get: loadDatasourceMock,
|
||||
getList: getAllMock
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('DBConnector', () => {
|
||||
const ctx: any = {};
|
||||
|
||||
describe('When init DB connector', () => {
|
||||
beforeEach(() => {
|
||||
ctx.options = {
|
||||
datasourceId: 42,
|
||||
datasourceName: undefined
|
||||
};
|
||||
|
||||
loadDatasourceMock.mockClear();
|
||||
getAllMock.mockClear();
|
||||
});
|
||||
|
||||
it('should try to load datasource by name first', () => {
|
||||
const dbConnector = new DBConnector({ datasourceName: 'bar' });
|
||||
dbConnector.loadDBDataSource();
|
||||
expect(getAllMock).not.toHaveBeenCalled();
|
||||
expect(loadDatasourceMock).toHaveBeenCalledWith('bar');
|
||||
});
|
||||
|
||||
it('should load datasource by id if name not present', () => {
|
||||
const dbConnector = new DBConnector({ datasourceId: 42 });
|
||||
dbConnector.loadDBDataSource();
|
||||
expect(getAllMock).toHaveBeenCalled();
|
||||
expect(loadDatasourceMock).toHaveBeenCalledWith('foo');
|
||||
});
|
||||
|
||||
it('should throw error if no name and id specified', () => {
|
||||
ctx.options = {};
|
||||
const dbConnector = new DBConnector(ctx.options);
|
||||
return expect(dbConnector.loadDBDataSource()).rejects.toBe('Data Source name should be specified');
|
||||
});
|
||||
|
||||
it('should throw error if datasource with given id is not found', () => {
|
||||
ctx.options.datasourceId = 45;
|
||||
const dbConnector = new DBConnector(ctx.options);
|
||||
return expect(dbConnector.loadDBDataSource()).rejects.toBe('Data Source with ID 45 not found');
|
||||
});
|
||||
});
|
||||
});
|
||||
139
src/datasource/specs/influxdbConnector.test.ts
Normal file
139
src/datasource/specs/influxdbConnector.test.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
import { InfluxDBConnector } from '../zabbix/connectors/influxdb/influxdbConnector';
|
||||
import { compactQuery } from '../utils';
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
getDataSourceSrv: jest.fn(() => ({
|
||||
get: jest.fn().mockResolvedValue({ id: 42, name: 'InfluxDB DS', meta: {} }),
|
||||
})),
|
||||
}));
|
||||
|
||||
describe('InfluxDBConnector', () => {
|
||||
let ctx: any = {};
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.options = { datasourceName: 'InfluxDB DS', retentionPolicy: 'longterm' };
|
||||
ctx.influxDBConnector = new InfluxDBConnector(ctx.options);
|
||||
ctx.influxDBConnector.invokeInfluxDBQuery = jest.fn().mockResolvedValue([]);
|
||||
ctx.defaultQueryParams = {
|
||||
itemids: ['123', '234'],
|
||||
range: { timeFrom: 15000, timeTill: 15100 },
|
||||
intervalSec: 5,
|
||||
table: 'history',
|
||||
aggFunction: 'MAX',
|
||||
};
|
||||
});
|
||||
|
||||
describe('When building InfluxDB query', () => {
|
||||
it('should build proper query', () => {
|
||||
const { itemids, range, intervalSec, table, aggFunction } = ctx.defaultQueryParams;
|
||||
const query = ctx.influxDBConnector.buildHistoryQuery(itemids, table, range, intervalSec, aggFunction);
|
||||
const expected = compactQuery(`SELECT MAX("value")
|
||||
FROM "history"
|
||||
WHERE ("itemid" = '123' OR "itemid" = '234')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
expect(query).toBe(expected);
|
||||
});
|
||||
|
||||
it('should use MEAN instead of AVG', () => {
|
||||
const { itemids, range, intervalSec, table } = ctx.defaultQueryParams;
|
||||
const aggFunction = 'avg';
|
||||
const query = ctx.influxDBConnector.buildHistoryQuery(itemids, table, range, intervalSec, aggFunction);
|
||||
const expected = compactQuery(`SELECT MEAN("value")
|
||||
FROM "history"
|
||||
WHERE ("itemid" = '123' OR "itemid" = '234')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
expect(query).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('When invoking InfluxDB query', () => {
|
||||
it('should query proper table depending on item type', () => {
|
||||
const { timeFrom, timeTill } = ctx.defaultQueryParams.range;
|
||||
const options = { intervalMs: 5000 };
|
||||
const items = [{ itemid: '123', value_type: 3 }];
|
||||
const expectedQuery = compactQuery(`SELECT MEAN("value")
|
||||
FROM "history_uint"
|
||||
WHERE ("itemid" = '123')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
ctx.influxDBConnector.getHistory(items, timeFrom, timeTill, options);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenCalledWith(expectedQuery);
|
||||
});
|
||||
|
||||
it('should split query if different item types are used', () => {
|
||||
const { timeFrom, timeTill } = ctx.defaultQueryParams.range;
|
||||
const options = { intervalMs: 5000 };
|
||||
const items = [
|
||||
{ itemid: '123', value_type: 0 },
|
||||
{ itemid: '234', value_type: 3 },
|
||||
];
|
||||
const sharedQueryPart = `AND "time" >= 15000s AND "time" <= 15100s GROUP BY time(5s), "itemid" fill(none)`;
|
||||
const expectedQueryFirst = compactQuery(`SELECT MEAN("value")
|
||||
FROM "history"
|
||||
WHERE ("itemid" = '123') ${sharedQueryPart}
|
||||
`);
|
||||
const expectedQuerySecond = compactQuery(`SELECT MEAN("value")
|
||||
FROM "history_uint"
|
||||
WHERE ("itemid" = '234') ${sharedQueryPart}
|
||||
`);
|
||||
ctx.influxDBConnector.getHistory(items, timeFrom, timeTill, options);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenCalledTimes(2);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenNthCalledWith(1, expectedQueryFirst);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenNthCalledWith(2, expectedQuerySecond);
|
||||
});
|
||||
|
||||
it('should use the same table for trends query if no retention policy set', () => {
|
||||
ctx.influxDBConnector.retentionPolicy = '';
|
||||
const { timeFrom, timeTill } = ctx.defaultQueryParams.range;
|
||||
const options = { intervalMs: 5000 };
|
||||
const items = [{ itemid: '123', value_type: 3 }];
|
||||
const expectedQuery = compactQuery(`SELECT MEAN("value")
|
||||
FROM "history_uint"
|
||||
WHERE ("itemid" = '123')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
ctx.influxDBConnector.getTrends(items, timeFrom, timeTill, options);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenCalledWith(expectedQuery);
|
||||
});
|
||||
|
||||
it('should use retention policy name for trends query if it was set', () => {
|
||||
const { timeFrom, timeTill } = ctx.defaultQueryParams.range;
|
||||
const options = { intervalMs: 5000 };
|
||||
const items = [{ itemid: '123', value_type: 3 }];
|
||||
const expectedQuery = compactQuery(`SELECT MEAN("value_avg")
|
||||
FROM "longterm"."history_uint"
|
||||
WHERE ("itemid" = '123')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
ctx.influxDBConnector.getTrends(items, timeFrom, timeTill, options);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenCalledWith(expectedQuery);
|
||||
});
|
||||
|
||||
it('should use proper value column if retention policy set (trends used)', () => {
|
||||
const { timeFrom, timeTill } = ctx.defaultQueryParams.range;
|
||||
const options = { intervalMs: 5000, consolidateBy: 'max' };
|
||||
const items = [{ itemid: '123', value_type: 3 }];
|
||||
const expectedQuery = compactQuery(`SELECT MAX("value_max")
|
||||
FROM "longterm"."history_uint"
|
||||
WHERE ("itemid" = '123')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
ctx.influxDBConnector.getTrends(items, timeFrom, timeTill, options);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenCalledWith(expectedQuery);
|
||||
});
|
||||
});
|
||||
});
|
||||
70
src/datasource/specs/migrations.test.ts
Normal file
70
src/datasource/specs/migrations.test.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import _ from 'lodash';
|
||||
import { migrateDSConfig, DS_CONFIG_SCHEMA } from '../migrations';
|
||||
|
||||
describe('Migrations', () => {
|
||||
let ctx: any = {};
|
||||
|
||||
describe('When migrating datasource config', () => {
|
||||
beforeEach(() => {
|
||||
ctx.jsonData = {
|
||||
dbConnection: {
|
||||
enable: true,
|
||||
datasourceId: 1,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
it('should change direct DB connection setting to flat style', () => {
|
||||
migrateDSConfig(ctx.jsonData);
|
||||
expect(ctx.jsonData).toMatchObject({
|
||||
dbConnectionEnable: true,
|
||||
dbConnectionDatasourceId: 1,
|
||||
schema: DS_CONFIG_SCHEMA,
|
||||
});
|
||||
});
|
||||
|
||||
it('should not touch anything if schema is up to date', () => {
|
||||
ctx.jsonData = {
|
||||
futureOptionOne: 'foo',
|
||||
futureOptionTwo: 'bar',
|
||||
schema: DS_CONFIG_SCHEMA,
|
||||
};
|
||||
migrateDSConfig(ctx.jsonData);
|
||||
expect(ctx.jsonData).toMatchObject({
|
||||
futureOptionOne: 'foo',
|
||||
futureOptionTwo: 'bar',
|
||||
schema: DS_CONFIG_SCHEMA,
|
||||
});
|
||||
expect(ctx.jsonData.dbConnectionEnable).toBeUndefined();
|
||||
expect(ctx.jsonData.dbConnectionDatasourceId).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('When handling provisioned datasource config', () => {
|
||||
beforeEach(() => {
|
||||
ctx.jsonData = {
|
||||
username: 'zabbix',
|
||||
password: 'zabbix',
|
||||
trends: true,
|
||||
trendsFrom: '7d',
|
||||
trendsRange: '4d',
|
||||
cacheTTL: '1h',
|
||||
alerting: true,
|
||||
addThresholds: false,
|
||||
alertingMinSeverity: 3,
|
||||
disableReadOnlyUsersAck: true,
|
||||
dbConnectionEnable: true,
|
||||
dbConnectionDatasourceName: 'MySQL Zabbix',
|
||||
dbConnectionRetentionPolicy: 'one_year',
|
||||
};
|
||||
});
|
||||
|
||||
it('should not touch anything if schema is up to date', () => {
|
||||
const originalConf = _.cloneDeep(ctx.jsonData);
|
||||
migrateDSConfig(ctx.jsonData);
|
||||
expect(ctx.jsonData).toMatchObject(originalConf);
|
||||
expect(ctx.jsonData.dbConnectionEnable).toBe(true);
|
||||
expect(ctx.jsonData.dbConnectionDatasourceName).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
44
src/datasource/specs/timeseries.spec.ts
Normal file
44
src/datasource/specs/timeseries.spec.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
// import _ from 'lodash';
|
||||
import ts from '../timeseries';
|
||||
|
||||
describe('timeseries processing functions', () => {
|
||||
|
||||
describe('sumSeries()', () => {
|
||||
it('should properly sum series', (done) => {
|
||||
let series = [
|
||||
[[0, 1], [1, 2], [1, 3]],
|
||||
[[2, 1], [3, 2], [4, 3]]
|
||||
];
|
||||
|
||||
let expected = [[2, 1], [4, 2], [5, 3]];
|
||||
|
||||
let result = ts.sumSeries(series);
|
||||
expect(result).toEqual(expected);
|
||||
done();
|
||||
});
|
||||
|
||||
it('should properly sum series with nulls', (done) => {
|
||||
// issue #286
|
||||
let series = [
|
||||
[[1, 1], [1, 2], [1, 3]],
|
||||
[[3, 2], [4, 3]]
|
||||
];
|
||||
|
||||
let expected = [[1, 1], [4, 2], [5, 3]];
|
||||
|
||||
let result = ts.sumSeries(series);
|
||||
expect(result).toEqual(expected);
|
||||
done();
|
||||
});
|
||||
|
||||
it('should properly offset metric', (done) => {
|
||||
let points = [[1, 1], [-4, 2], [2, 3]];
|
||||
|
||||
let expected = [[101, 1], [96, 2], [102, 3]];
|
||||
|
||||
let result = ts.offset(points, 100);
|
||||
expect(result).toEqual(expected);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
168
src/datasource/specs/utils.spec.ts
Normal file
168
src/datasource/specs/utils.spec.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
import _ from 'lodash';
|
||||
import * as utils from '../utils';
|
||||
|
||||
describe('Utils', () => {
|
||||
|
||||
describe('expandItemName()', () => {
|
||||
|
||||
it('should properly expand unquoted params', (done) => {
|
||||
let test_cases = [
|
||||
{
|
||||
name: `CPU $2 time`,
|
||||
key: `system.cpu.util[,user,avg1]`,
|
||||
expected: "CPU user time"
|
||||
},
|
||||
{
|
||||
name: `CPU $2 time - $3`,
|
||||
key: `system.cpu.util[,system,avg1]`,
|
||||
expected: "CPU system time - avg1"
|
||||
},
|
||||
{
|
||||
name: `CPU - $1 - $2 - $3`,
|
||||
key: `system.cpu.util[,system,avg1]`,
|
||||
expected: "CPU - - system - avg1"
|
||||
}
|
||||
];
|
||||
|
||||
_.each(test_cases, test_case => {
|
||||
let expandedName = utils.expandItemName(test_case.name, test_case.key);
|
||||
expect(expandedName).toBe(test_case.expected);
|
||||
});
|
||||
done();
|
||||
});
|
||||
|
||||
it('should properly expand quoted params with commas', (done) => {
|
||||
let test_cases = [
|
||||
{
|
||||
name: `CPU $2 time`,
|
||||
key: `system.cpu.util["type=user,value=avg",user]`,
|
||||
expected: "CPU user time"
|
||||
},
|
||||
{
|
||||
name: `CPU $1 time`,
|
||||
key: `system.cpu.util["type=user,value=avg","user"]`,
|
||||
expected: "CPU type=user,value=avg time"
|
||||
},
|
||||
{
|
||||
name: `CPU $1 time $3`,
|
||||
key: `system.cpu.util["type=user,value=avg",,"user"]`,
|
||||
expected: "CPU type=user,value=avg time user"
|
||||
},
|
||||
{
|
||||
name: `CPU $1 $2 $3`,
|
||||
key: `system.cpu.util["type=user,value=avg",time,"user"]`,
|
||||
expected: "CPU type=user,value=avg time user"
|
||||
}
|
||||
];
|
||||
|
||||
_.each(test_cases, test_case => {
|
||||
let expandedName = utils.expandItemName(test_case.name, test_case.key);
|
||||
expect(expandedName).toBe(test_case.expected);
|
||||
});
|
||||
done();
|
||||
});
|
||||
|
||||
it('should properly expand array params', (done) => {
|
||||
let test_cases = [
|
||||
{
|
||||
name: `CPU $2 - $3 time`,
|
||||
key: `system.cpu.util[,[user,system],avg1]`,
|
||||
expected: "CPU user,system - avg1 time"
|
||||
},
|
||||
{
|
||||
name: `CPU $2 - $3 time`,
|
||||
key: `system.cpu.util[,["user,system",iowait],avg1]`,
|
||||
expected: `CPU "user,system",iowait - avg1 time`
|
||||
},
|
||||
{
|
||||
name: `CPU - $2 - $3 - $4`,
|
||||
key: `system.cpu.util[,[],["user,system",iowait],avg1]`,
|
||||
expected: `CPU - - "user,system",iowait - avg1`
|
||||
}
|
||||
];
|
||||
|
||||
_.each(test_cases, test_case => {
|
||||
let expandedName = utils.expandItemName(test_case.name, test_case.key);
|
||||
expect(expandedName).toBe(test_case.expected);
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe('splitTemplateQuery()', () => {
|
||||
|
||||
// Backward compatibility
|
||||
it('should properly split query in old format', (done) => {
|
||||
let test_cases = [
|
||||
{
|
||||
query: `/alu/./tw-(nyc|que|brx|dwt|brk)-sta_(\w|\d)*-alu-[0-9{2}/`,
|
||||
expected: ['/alu/', '/tw-(nyc|que|brx|dwt|brk)-sta_(\w|\d)*-alu-[0-9{2}/']
|
||||
},
|
||||
{
|
||||
query: `a.b.c.d`,
|
||||
expected: ['a', 'b', 'c', 'd']
|
||||
}
|
||||
];
|
||||
|
||||
_.each(test_cases, test_case => {
|
||||
let splitQuery = utils.splitTemplateQuery(test_case.query);
|
||||
expect(splitQuery).toEqual(test_case.expected);
|
||||
});
|
||||
done();
|
||||
});
|
||||
|
||||
it('should properly split query', (done) => {
|
||||
let test_cases = [
|
||||
{
|
||||
query: `{alu}{/tw-(nyc|que|brx|dwt|brk)-sta_(\w|\d)*-alu-[0-9]*/}`,
|
||||
expected: ['alu', '/tw-(nyc|que|brx|dwt|brk)-sta_(\w|\d)*-alu-[0-9]*/']
|
||||
},
|
||||
{
|
||||
query: `{alu}{/tw-(nyc|que|brx|dwt|brk)-sta_(\w|\d)*-alu-[0-9]{2}/}`,
|
||||
expected: ['alu', '/tw-(nyc|que|brx|dwt|brk)-sta_(\w|\d)*-alu-[0-9]{2}/']
|
||||
},
|
||||
{
|
||||
query: `{a}{b}{c}{d}`,
|
||||
expected: ['a', 'b', 'c', 'd']
|
||||
},
|
||||
{
|
||||
query: `{a}{b.c.d}`,
|
||||
expected: ['a', 'b.c.d']
|
||||
}
|
||||
];
|
||||
|
||||
_.each(test_cases, test_case => {
|
||||
let splitQuery = utils.splitTemplateQuery(test_case.query);
|
||||
expect(splitQuery).toEqual(test_case.expected);
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getArrayDepth()', () => {
|
||||
it('should calculate proper array depth', () => {
|
||||
const test_cases = [
|
||||
{
|
||||
array: [],
|
||||
depth: 1
|
||||
},
|
||||
{
|
||||
array: [1, 2, 3],
|
||||
depth: 1
|
||||
},
|
||||
{
|
||||
array: [[1, 2], [3, 4]],
|
||||
depth: 2
|
||||
},
|
||||
{
|
||||
array: [[[1, 2], [3, 4]], [[1, 2], [3, 4]]],
|
||||
depth: 3
|
||||
},
|
||||
];
|
||||
|
||||
for (const test_case of test_cases) {
|
||||
expect(utils.getArrayDepth(test_case.array)).toBe(test_case.depth);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
621
src/datasource/timeseries.ts
Normal file
621
src/datasource/timeseries.ts
Normal file
@@ -0,0 +1,621 @@
|
||||
/**
|
||||
* timeseries.js
|
||||
*
|
||||
* This module contains functions for working with time series.
|
||||
*
|
||||
* datapoints - array of points where point is [value, timestamp]. In almost all cases (if other wasn't
|
||||
* explicitly said) we assume datapoints are sorted by timestamp. Timestamp is the number of milliseconds
|
||||
* since 1 January 1970 00:00:00 UTC.
|
||||
*
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import * as utils from './utils';
|
||||
import * as c from './constants';
|
||||
import { TimeSeriesPoints, TimeSeriesValue } from '@grafana/data';
|
||||
|
||||
const POINT_VALUE = 0;
|
||||
const POINT_TIMESTAMP = 1;
|
||||
|
||||
const HOUR_MS = 3600 * 1000;
|
||||
|
||||
/**
|
||||
* Downsample time series by using given function (avg, min, max).
|
||||
*/
|
||||
function downsample(datapoints, time_to, ms_interval, func) {
|
||||
const downsampledSeries = [];
|
||||
const timeWindow = {
|
||||
from: time_to * 1000 - ms_interval,
|
||||
to: time_to * 1000,
|
||||
};
|
||||
|
||||
let points_sum = 0;
|
||||
let points_num = 0;
|
||||
let value_avg = 0;
|
||||
let frame = [];
|
||||
|
||||
for (let i = datapoints.length - 1; i >= 0; i -= 1) {
|
||||
if (timeWindow.from < datapoints[i][1] && datapoints[i][1] <= timeWindow.to) {
|
||||
points_sum += datapoints[i][0];
|
||||
points_num++;
|
||||
frame.push(datapoints[i][0]);
|
||||
} else {
|
||||
value_avg = points_num ? points_sum / points_num : 0;
|
||||
|
||||
if (func === 'max') {
|
||||
downsampledSeries.push([_.max(frame), timeWindow.to]);
|
||||
} else if (func === 'min') {
|
||||
downsampledSeries.push([_.min(frame), timeWindow.to]);
|
||||
} else {
|
||||
downsampledSeries.push([value_avg, timeWindow.to]);
|
||||
}
|
||||
|
||||
// Shift time window
|
||||
timeWindow.to = timeWindow.from;
|
||||
timeWindow.from -= ms_interval;
|
||||
|
||||
points_sum = 0;
|
||||
points_num = 0;
|
||||
frame = [];
|
||||
|
||||
// Process point again
|
||||
i++;
|
||||
}
|
||||
}
|
||||
return downsampledSeries.reverse();
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects interval between data points and aligns time series. If there's no value in the interval, puts null as a value.
|
||||
*/
|
||||
export function align(datapoints: TimeSeriesPoints, interval?: number): TimeSeriesPoints {
|
||||
if (!interval) {
|
||||
interval = detectSeriesInterval(datapoints);
|
||||
}
|
||||
|
||||
if (interval <= 0 || datapoints.length <= 1) {
|
||||
return datapoints;
|
||||
}
|
||||
|
||||
const aligned_ts: TimeSeriesPoints = [];
|
||||
let frame_ts = getPointTimeFrame(datapoints[0][POINT_TIMESTAMP], interval);
|
||||
let point_frame_ts = frame_ts;
|
||||
let point: TimeSeriesValue[];
|
||||
for (let i = 0; i < datapoints.length; i++) {
|
||||
point = datapoints[i];
|
||||
point_frame_ts = getPointTimeFrame(point[POINT_TIMESTAMP], interval);
|
||||
|
||||
if (point_frame_ts > frame_ts) {
|
||||
// Move frame window to next non-empty interval and fill empty by null
|
||||
while (frame_ts < point_frame_ts) {
|
||||
aligned_ts.push([null, frame_ts]);
|
||||
frame_ts += interval;
|
||||
}
|
||||
}
|
||||
|
||||
aligned_ts.push([point[POINT_VALUE], point_frame_ts]);
|
||||
frame_ts += interval;
|
||||
}
|
||||
return aligned_ts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects interval between data points in milliseconds.
|
||||
*/
|
||||
function detectSeriesInterval(datapoints: TimeSeriesPoints): number {
|
||||
if (datapoints.length < 2) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
let deltas = [];
|
||||
for (let i = 1; i < datapoints.length; i++) {
|
||||
// Get deltas (in seconds)
|
||||
const d = (datapoints[i][POINT_TIMESTAMP] - datapoints[i - 1][POINT_TIMESTAMP]) / 1000;
|
||||
deltas.push(Math.round(d));
|
||||
}
|
||||
|
||||
// Use 50th percentile (median) as an interval
|
||||
deltas = _.sortBy(deltas);
|
||||
const intervalSec = deltas[Math.floor(deltas.length * 0.5)];
|
||||
return intervalSec * 1000;
|
||||
}
|
||||
|
||||
export function fillTrendsWithNulls(datapoints: TimeSeriesPoints): TimeSeriesPoints {
|
||||
if (datapoints.length <= 1) {
|
||||
return datapoints;
|
||||
}
|
||||
|
||||
const interval = HOUR_MS;
|
||||
const filled_ts: TimeSeriesPoints = [];
|
||||
let frame_ts = datapoints[0][POINT_TIMESTAMP];
|
||||
let point_frame_ts = frame_ts;
|
||||
let point: TimeSeriesValue[];
|
||||
for (let i = 0; i < datapoints.length; i++) {
|
||||
point = datapoints[i];
|
||||
point_frame_ts = point[POINT_TIMESTAMP];
|
||||
|
||||
if (point_frame_ts > frame_ts) {
|
||||
// Move frame window to next non-empty interval and fill empty by null
|
||||
while (frame_ts < point_frame_ts) {
|
||||
filled_ts.push([null, frame_ts]);
|
||||
frame_ts += interval;
|
||||
}
|
||||
}
|
||||
|
||||
filled_ts.push(point);
|
||||
frame_ts += interval;
|
||||
}
|
||||
return filled_ts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Group points by given time interval
|
||||
* datapoints: [[<value>, <unixtime>], ...]
|
||||
*/
|
||||
function groupBy(datapoints, interval, groupByCallback) {
|
||||
const ms_interval = utils.parseInterval(interval);
|
||||
|
||||
// Calculate frame timestamps
|
||||
const frames = _.groupBy(datapoints, (point) => {
|
||||
// Calculate time for group of points
|
||||
return Math.floor(point[1] / ms_interval) * ms_interval;
|
||||
});
|
||||
|
||||
// frame: { '<unixtime>': [[<value>, <unixtime>], ...] }
|
||||
// return [{ '<unixtime>': <value> }, { '<unixtime>': <value> }, ...]
|
||||
const grouped = _.mapValues(frames, (frame) => {
|
||||
const points = _.map(frame, (point) => {
|
||||
return point[0];
|
||||
});
|
||||
return groupByCallback(points);
|
||||
});
|
||||
|
||||
// Convert points to Grafana format
|
||||
return sortByTime(
|
||||
_.map(grouped, (value, timestamp) => {
|
||||
return [Number(value), Number(timestamp)];
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export function groupBy_perf(datapoints, interval, groupByCallback) {
|
||||
if (datapoints.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (interval === c.RANGE_VARIABLE_VALUE) {
|
||||
return groupByRange(datapoints, groupByCallback);
|
||||
}
|
||||
|
||||
const ms_interval = utils.parseInterval(interval);
|
||||
const grouped_series = [];
|
||||
let frame_values = [];
|
||||
let frame_value;
|
||||
let frame_ts = datapoints.length ? getPointTimeFrame(datapoints[0][POINT_TIMESTAMP], ms_interval) : 0;
|
||||
let point_frame_ts = frame_ts;
|
||||
let point;
|
||||
|
||||
for (let i = 0; i < datapoints.length; i++) {
|
||||
point = datapoints[i];
|
||||
point_frame_ts = getPointTimeFrame(point[POINT_TIMESTAMP], ms_interval);
|
||||
if (point_frame_ts === frame_ts) {
|
||||
frame_values.push(point[POINT_VALUE]);
|
||||
} else if (point_frame_ts > frame_ts) {
|
||||
frame_value = groupByCallback(frame_values);
|
||||
grouped_series.push([frame_value, frame_ts]);
|
||||
|
||||
// Move frame window to next non-empty interval and fill empty by null
|
||||
frame_ts += ms_interval;
|
||||
while (frame_ts < point_frame_ts) {
|
||||
grouped_series.push([null, frame_ts]);
|
||||
frame_ts += ms_interval;
|
||||
}
|
||||
frame_values = [point[POINT_VALUE]];
|
||||
}
|
||||
}
|
||||
|
||||
frame_value = groupByCallback(frame_values);
|
||||
grouped_series.push([frame_value, frame_ts]);
|
||||
|
||||
return grouped_series;
|
||||
}
|
||||
|
||||
export function groupByRange(datapoints, groupByCallback) {
|
||||
const frame_values = [];
|
||||
const frame_start = datapoints[0][POINT_TIMESTAMP];
|
||||
const frame_end = datapoints[datapoints.length - 1][POINT_TIMESTAMP];
|
||||
let point;
|
||||
for (let i = 0; i < datapoints.length; i++) {
|
||||
point = datapoints[i];
|
||||
frame_values.push(point[POINT_VALUE]);
|
||||
}
|
||||
const frame_value = groupByCallback(frame_values);
|
||||
return [
|
||||
[frame_value, frame_start],
|
||||
[frame_value, frame_end],
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* Summarize set of time series into one.
|
||||
* @param {datapoints[]} timeseries array of time series
|
||||
*/
|
||||
function sumSeries(timeseries) {
|
||||
// Calculate new points for interpolation
|
||||
let new_timestamps = _.uniq(
|
||||
_.map(_.flatten(timeseries), (point) => {
|
||||
return point[1];
|
||||
})
|
||||
);
|
||||
new_timestamps = _.sortBy(new_timestamps);
|
||||
|
||||
const interpolated_timeseries = _.map(timeseries, (series) => {
|
||||
series = fillZeroes(series, new_timestamps);
|
||||
const timestamps = _.map(series, (point) => {
|
||||
return point[1];
|
||||
});
|
||||
const new_points = _.map(_.difference(new_timestamps, timestamps), (timestamp) => {
|
||||
return [null, timestamp];
|
||||
});
|
||||
const new_series = series.concat(new_points);
|
||||
return sortByTime(new_series);
|
||||
});
|
||||
|
||||
_.each(interpolated_timeseries, interpolateSeries);
|
||||
|
||||
const new_timeseries = [];
|
||||
let sum;
|
||||
for (let i = new_timestamps.length - 1; i >= 0; i--) {
|
||||
sum = 0;
|
||||
for (let j = interpolated_timeseries.length - 1; j >= 0; j--) {
|
||||
sum += interpolated_timeseries[j][i][0];
|
||||
}
|
||||
new_timeseries.push([sum, new_timestamps[i]]);
|
||||
}
|
||||
|
||||
return sortByTime(new_timeseries);
|
||||
}
|
||||
|
||||
function scale(datapoints, factor) {
|
||||
return _.map(datapoints, (point) => {
|
||||
return [point[0] * factor, point[1]];
|
||||
});
|
||||
}
|
||||
|
||||
function scale_perf(datapoints, factor) {
|
||||
for (let i = 0; i < datapoints.length; i++) {
|
||||
datapoints[i] = [datapoints[i][POINT_VALUE] * factor, datapoints[i][POINT_TIMESTAMP]];
|
||||
}
|
||||
|
||||
return datapoints;
|
||||
}
|
||||
|
||||
function offset(datapoints, delta) {
|
||||
for (let i = 0; i < datapoints.length; i++) {
|
||||
datapoints[i] = [datapoints[i][POINT_VALUE] + delta, datapoints[i][POINT_TIMESTAMP]];
|
||||
}
|
||||
|
||||
return datapoints;
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple delta. Calculate value delta between points.
|
||||
* @param {*} datapoints
|
||||
*/
|
||||
function delta(datapoints) {
|
||||
const newSeries = [];
|
||||
let deltaValue;
|
||||
for (let i = 1; i < datapoints.length; i++) {
|
||||
deltaValue = datapoints[i][0] - datapoints[i - 1][0];
|
||||
newSeries.push([deltaValue, datapoints[i][1]]);
|
||||
}
|
||||
return newSeries;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates rate per second. Resistant to counter reset.
|
||||
* @param {*} datapoints
|
||||
*/
|
||||
function rate(datapoints) {
|
||||
const newSeries = [];
|
||||
let point, point_prev;
|
||||
let valueDelta = 0;
|
||||
let timeDelta = 0;
|
||||
for (let i = 1; i < datapoints.length; i++) {
|
||||
point = datapoints[i];
|
||||
point_prev = datapoints[i - 1];
|
||||
|
||||
// Convert ms to seconds
|
||||
timeDelta = (point[POINT_TIMESTAMP] - point_prev[POINT_TIMESTAMP]) / 1000;
|
||||
|
||||
// Handle counter reset - use previous value
|
||||
if (point[POINT_VALUE] >= point_prev[POINT_VALUE]) {
|
||||
valueDelta = (point[POINT_VALUE] - point_prev[POINT_VALUE]) / timeDelta;
|
||||
}
|
||||
|
||||
newSeries.push([valueDelta, point[POINT_TIMESTAMP]]);
|
||||
}
|
||||
return newSeries;
|
||||
}
|
||||
|
||||
function simpleMovingAverage(datapoints: TimeSeriesPoints, n: number): TimeSeriesPoints {
|
||||
// It's not possible to calculate MA if n greater than number of points
|
||||
n = Math.min(n, datapoints.length);
|
||||
|
||||
const sma = [];
|
||||
let w_sum;
|
||||
let w_avg = null;
|
||||
let w_count = 0;
|
||||
|
||||
// Initial window
|
||||
for (let j = n; j > 0; j--) {
|
||||
if (datapoints[n - j][POINT_VALUE] !== null) {
|
||||
w_avg += datapoints[n - j][POINT_VALUE];
|
||||
w_count++;
|
||||
}
|
||||
}
|
||||
if (w_count > 0) {
|
||||
w_avg = w_avg / w_count;
|
||||
} else {
|
||||
w_avg = null;
|
||||
}
|
||||
sma.push([w_avg, datapoints[n - 1][POINT_TIMESTAMP]]);
|
||||
|
||||
for (let i = n; i < datapoints.length; i++) {
|
||||
// Insert next value
|
||||
if (datapoints[i][POINT_VALUE] !== null) {
|
||||
w_sum = w_avg * w_count;
|
||||
w_avg = (w_sum + datapoints[i][POINT_VALUE]) / (w_count + 1);
|
||||
w_count++;
|
||||
}
|
||||
// Remove left side point
|
||||
if (datapoints[i - n][POINT_VALUE] !== null) {
|
||||
w_sum = w_avg * w_count;
|
||||
if (w_count > 1) {
|
||||
w_avg = (w_sum - datapoints[i - n][POINT_VALUE]) / (w_count - 1);
|
||||
w_count--;
|
||||
} else {
|
||||
w_avg = null;
|
||||
w_count = 0;
|
||||
}
|
||||
}
|
||||
sma.push([w_avg, datapoints[i][POINT_TIMESTAMP]]);
|
||||
}
|
||||
return sma;
|
||||
}
|
||||
|
||||
function expMovingAverage(datapoints: TimeSeriesPoints, n: number): TimeSeriesPoints {
|
||||
// It's not possible to calculate MA if n greater than number of points
|
||||
n = Math.min(n, datapoints.length);
|
||||
|
||||
let ema = [datapoints[0]];
|
||||
let ema_prev = datapoints[0][POINT_VALUE];
|
||||
let ema_cur;
|
||||
let a;
|
||||
|
||||
if (n > 1) {
|
||||
// Calculate a from window size
|
||||
a = 2 / (n + 1);
|
||||
|
||||
// Initial window, use simple moving average
|
||||
let w_avg = null;
|
||||
let w_count = 0;
|
||||
for (let j = n; j > 0; j--) {
|
||||
if (datapoints[n - j][POINT_VALUE] !== null) {
|
||||
w_avg += datapoints[n - j][POINT_VALUE];
|
||||
w_count++;
|
||||
}
|
||||
}
|
||||
if (w_count > 0) {
|
||||
w_avg = w_avg / w_count;
|
||||
// Actually, we should set timestamp from datapoints[n-1] and start calculation of EMA from n.
|
||||
// But in order to start EMA from first point (not from Nth) we should expand time range and request N additional
|
||||
// points outside left side of range. We can't do that, so this trick is used for pretty view of first N points.
|
||||
// We calculate AVG for first N points, but then start from 2nd point, not from Nth. In general, it means we
|
||||
// assume that previous N values (0-N, 0-(N-1), ..., 0-1) have the same average value as a first N values.
|
||||
ema = [[w_avg, datapoints[0][POINT_TIMESTAMP]]];
|
||||
ema_prev = w_avg;
|
||||
n = 1;
|
||||
}
|
||||
} else {
|
||||
// Use predefined a and start from 1st point (use it as initial EMA value)
|
||||
a = n;
|
||||
n = 1;
|
||||
}
|
||||
|
||||
for (let i = n; i < datapoints.length; i++) {
|
||||
if (datapoints[i][POINT_VALUE] !== null) {
|
||||
ema_cur = a * datapoints[i][POINT_VALUE] + (1 - a) * ema_prev;
|
||||
ema_prev = ema_cur;
|
||||
ema.push([ema_cur, datapoints[i][POINT_TIMESTAMP]]);
|
||||
} else {
|
||||
ema.push([null, datapoints[i][POINT_TIMESTAMP]]);
|
||||
}
|
||||
}
|
||||
return ema;
|
||||
}
|
||||
|
||||
function PERCENTILE(n, values) {
|
||||
const sorted = _.sortBy(values);
|
||||
return sorted[Math.floor((sorted.length * n) / 100)];
|
||||
}
|
||||
|
||||
function COUNT(values) {
|
||||
return values.length;
|
||||
}
|
||||
|
||||
function SUM(values) {
|
||||
let sum = null;
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
if (values[i] !== null) {
|
||||
sum += values[i];
|
||||
}
|
||||
}
|
||||
return sum;
|
||||
}
|
||||
|
||||
function AVERAGE(values) {
|
||||
const values_non_null = getNonNullValues(values);
|
||||
if (values_non_null.length === 0) {
|
||||
return null;
|
||||
}
|
||||
return SUM(values_non_null) / values_non_null.length;
|
||||
}
|
||||
|
||||
function getNonNullValues(values) {
|
||||
const values_non_null = [];
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
if (values[i] !== null) {
|
||||
values_non_null.push(values[i]);
|
||||
}
|
||||
}
|
||||
return values_non_null;
|
||||
}
|
||||
|
||||
function MIN(values) {
|
||||
return _.min(values);
|
||||
}
|
||||
|
||||
function MAX(values) {
|
||||
return _.max(values);
|
||||
}
|
||||
|
||||
function MEDIAN(values) {
|
||||
const sorted = _.sortBy(values);
|
||||
return sorted[Math.floor(sorted.length / 2)];
|
||||
}
|
||||
|
||||
///////////////////////
|
||||
// Utility functions //
|
||||
///////////////////////
|
||||
|
||||
/**
|
||||
* For given point calculate corresponding time frame.
|
||||
*
|
||||
* |__*_|_*__|___*| -> |*___|*___|*___|
|
||||
*
|
||||
* @param {*} timestamp
|
||||
* @param {*} ms_interval
|
||||
*/
|
||||
function getPointTimeFrame(timestamp, ms_interval) {
|
||||
return Math.floor(timestamp / ms_interval) * ms_interval;
|
||||
}
|
||||
|
||||
function sortByTime(series) {
|
||||
return _.sortBy(series, (point) => {
|
||||
return point[1];
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Fill empty front and end of series by zeroes.
|
||||
*
|
||||
* | *** | | *** |
|
||||
* |___ ___| -> |*** ***|
|
||||
* @param {*} series
|
||||
* @param {*} timestamps
|
||||
*/
|
||||
function fillZeroes(series, timestamps) {
|
||||
const prepend = [];
|
||||
const append = [];
|
||||
let new_point;
|
||||
for (let i = 0; i < timestamps.length; i++) {
|
||||
if (timestamps[i] < series[0][POINT_TIMESTAMP]) {
|
||||
new_point = [0, timestamps[i]];
|
||||
prepend.push(new_point);
|
||||
} else if (timestamps[i] > series[series.length - 1][POINT_TIMESTAMP]) {
|
||||
new_point = [0, timestamps[i]];
|
||||
append.push(new_point);
|
||||
}
|
||||
}
|
||||
return _.concat(_.concat(prepend, series), append);
|
||||
}
|
||||
|
||||
/**
|
||||
* Interpolate series with gaps
|
||||
*/
|
||||
function interpolateSeries(series) {
|
||||
let left, right;
|
||||
|
||||
// Interpolate series
|
||||
for (let i = series.length - 1; i >= 0; i--) {
|
||||
if (!series[i][0]) {
|
||||
left = findNearestLeft(series, i);
|
||||
right = findNearestRight(series, i);
|
||||
if (!left) {
|
||||
left = right;
|
||||
}
|
||||
if (!right) {
|
||||
right = left;
|
||||
}
|
||||
series[i][0] = linearInterpolation(series[i][1], left, right);
|
||||
}
|
||||
}
|
||||
return series;
|
||||
}
|
||||
|
||||
function linearInterpolation(timestamp, left, right) {
|
||||
if (left[1] === right[1]) {
|
||||
return (left[0] + right[0]) / 2;
|
||||
} else {
|
||||
return left[0] + ((right[0] - left[0]) / (right[1] - left[1])) * (timestamp - left[1]);
|
||||
}
|
||||
}
|
||||
|
||||
function findNearestRight(series, pointIndex) {
|
||||
for (let i = pointIndex; i < series.length; i++) {
|
||||
if (series[i][0] !== null) {
|
||||
return series[i];
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function findNearestLeft(series, pointIndex) {
|
||||
for (let i = pointIndex; i > 0; i--) {
|
||||
if (series[i][0] !== null) {
|
||||
return series[i];
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function flattenDatapoints(datapoints) {
|
||||
const depth = utils.getArrayDepth(datapoints);
|
||||
if (depth <= 2) {
|
||||
// Don't process if datapoints already flattened
|
||||
return datapoints;
|
||||
}
|
||||
return _.flatten(datapoints);
|
||||
}
|
||||
|
||||
////////////
|
||||
// Export //
|
||||
////////////
|
||||
|
||||
const exportedFunctions = {
|
||||
downsample,
|
||||
groupBy,
|
||||
groupBy_perf,
|
||||
groupByRange,
|
||||
sumSeries,
|
||||
scale,
|
||||
offset,
|
||||
scale_perf,
|
||||
delta,
|
||||
rate,
|
||||
simpleMovingAverage,
|
||||
expMovingAverage,
|
||||
SUM,
|
||||
COUNT,
|
||||
AVERAGE,
|
||||
MIN,
|
||||
MAX,
|
||||
MEDIAN,
|
||||
PERCENTILE,
|
||||
sortByTime,
|
||||
flattenDatapoints,
|
||||
align,
|
||||
};
|
||||
|
||||
export default exportedFunctions;
|
||||
391
src/datasource/types.ts
Normal file
391
src/datasource/types.ts
Normal file
@@ -0,0 +1,391 @@
|
||||
import { BusEventWithPayload, DataQuery, DataSourceJsonData, DataSourceRef, SelectableValue } from '@grafana/data';
|
||||
|
||||
export interface ZabbixDSOptions extends DataSourceJsonData {
|
||||
username: string;
|
||||
password?: string;
|
||||
trends: boolean;
|
||||
trendsFrom: string;
|
||||
trendsRange: string;
|
||||
cacheTTL: string;
|
||||
timeout?: number;
|
||||
dbConnectionEnable: boolean;
|
||||
dbConnectionDatasourceId?: number;
|
||||
dbConnectionDatasourceName?: string;
|
||||
dbConnectionRetentionPolicy?: string;
|
||||
disableReadOnlyUsersAck: boolean;
|
||||
disableDataAlignment: boolean;
|
||||
}
|
||||
|
||||
export interface ZabbixSecureJSONData {
|
||||
password?: string;
|
||||
}
|
||||
|
||||
export interface ZabbixConnectionInfo {
|
||||
zabbixVersion: string;
|
||||
dbConnectorStatus: {
|
||||
dsType: string;
|
||||
dsName: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface ZabbixConnectionTestQuery {
|
||||
datasourceId: number;
|
||||
queryType: string;
|
||||
}
|
||||
|
||||
export interface ZabbixMetricsQuery extends DataQuery {
|
||||
queryType: string;
|
||||
datasourceId?: number;
|
||||
group?: { filter: string; name?: string };
|
||||
host?: { filter: string; name?: string };
|
||||
application?: { filter: string; name?: string };
|
||||
itemTag?: { filter: string; name?: string };
|
||||
item?: { filter: string; name?: string };
|
||||
textFilter?: string;
|
||||
mode?: number;
|
||||
itemids?: string;
|
||||
useCaptureGroups?: boolean;
|
||||
proxy?: { filter: string };
|
||||
trigger?: { filter: string };
|
||||
itServiceFilter?: string;
|
||||
slaProperty?: any;
|
||||
slaInterval?: string;
|
||||
tags?: { filter: string };
|
||||
triggers?: { minSeverity: number; acknowledged: number; count: boolean };
|
||||
functions?: MetricFunc[];
|
||||
options?: ZabbixQueryOptions;
|
||||
// Problems
|
||||
showProblems?: ShowProblemTypes;
|
||||
// Deprecated
|
||||
hostFilter?: string;
|
||||
itemFilter?: string;
|
||||
}
|
||||
|
||||
export interface ZabbixQueryOptions {
|
||||
showDisabledItems?: boolean;
|
||||
skipEmptyValues?: boolean;
|
||||
disableDataAlignment?: boolean;
|
||||
useZabbixValueMapping?: boolean;
|
||||
// Problems options
|
||||
minSeverity?: number;
|
||||
sortProblems?: string;
|
||||
acknowledged?: number;
|
||||
hostsInMaintenance?: boolean;
|
||||
hostProxy?: boolean;
|
||||
limit?: number;
|
||||
useTimeRange?: boolean;
|
||||
severities?: number[];
|
||||
|
||||
// Annotations
|
||||
showOkEvents?: boolean;
|
||||
hideAcknowledged?: boolean;
|
||||
showHostname?: boolean;
|
||||
}
|
||||
|
||||
export interface MetricFunc {
|
||||
text: string;
|
||||
params: Array<string | number>;
|
||||
def: FuncDef;
|
||||
added?: boolean;
|
||||
}
|
||||
|
||||
export interface FuncDef {
|
||||
name: string;
|
||||
params: ParamDef[];
|
||||
defaultParams: Array<string | number>;
|
||||
category?: string;
|
||||
shortName?: any;
|
||||
fake?: boolean;
|
||||
version?: string;
|
||||
description?: string;
|
||||
/**
|
||||
* True if the function was not found on the list of available function descriptions.
|
||||
*/
|
||||
unknown?: boolean;
|
||||
}
|
||||
|
||||
export type ParamDef = {
|
||||
name: string;
|
||||
type: string;
|
||||
options?: Array<string | number>;
|
||||
multiple?: boolean;
|
||||
optional?: boolean;
|
||||
version?: string;
|
||||
};
|
||||
|
||||
// The paths of these files have moved around in Grafana and they don't resolve properly
|
||||
// either. Safer not to bother trying to import them just for type hinting.
|
||||
|
||||
export interface TemplateSrv {
|
||||
variables: {
|
||||
name: string;
|
||||
};
|
||||
|
||||
highlightVariablesAsHtml(str: any): any;
|
||||
|
||||
replace(target: any, scopedVars?: any, format?: any): any;
|
||||
}
|
||||
|
||||
export interface DashboardSrv {
|
||||
dash: any;
|
||||
}
|
||||
|
||||
// Grafana types from backend code
|
||||
|
||||
type RowValues = object[];
|
||||
type TimePoint = [number?, number?];
|
||||
type TimeSeriesPoints = TimePoint[];
|
||||
type TimeSeriesSlice = TimeSeries[];
|
||||
|
||||
interface TimeSeries {
|
||||
name: string;
|
||||
points: TimeSeriesPoints;
|
||||
tags: { [key: string]: string };
|
||||
}
|
||||
|
||||
interface TableColumn {
|
||||
text: string;
|
||||
}
|
||||
|
||||
interface Table {
|
||||
columns: TableColumn[];
|
||||
rows: RowValues[];
|
||||
}
|
||||
|
||||
interface QueryResult {
|
||||
error: string;
|
||||
refId: string;
|
||||
meta: any;
|
||||
series: TimeSeriesSlice[];
|
||||
tables: Table[];
|
||||
}
|
||||
|
||||
export interface TSDBResponse {
|
||||
results: { [key: string]: QueryResult };
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface VariableQueryProps {
|
||||
query: LegacyVariableQuery;
|
||||
onChange: (query: VariableQuery, definition: string) => void;
|
||||
datasource: any;
|
||||
templateSrv: any;
|
||||
}
|
||||
|
||||
export interface VariableQueryData extends VariableQuery {
|
||||
selectedQueryType: SelectableValue<VariableQueryTypes>;
|
||||
legacyQuery?: string;
|
||||
}
|
||||
|
||||
export interface VariableQuery {
|
||||
queryType: VariableQueryTypes;
|
||||
group?: string;
|
||||
host?: string;
|
||||
application?: string;
|
||||
itemTag?: string;
|
||||
item?: string;
|
||||
}
|
||||
|
||||
export type LegacyVariableQuery = VariableQuery | string;
|
||||
|
||||
export enum VariableQueryTypes {
|
||||
Group = 'group',
|
||||
Host = 'host',
|
||||
Application = 'application',
|
||||
ItemTag = 'itemTag',
|
||||
Item = 'item',
|
||||
ItemValues = 'itemValues',
|
||||
}
|
||||
|
||||
export enum ShowProblemTypes {
|
||||
Problems = 'problems',
|
||||
Recent = 'recent',
|
||||
History = 'history',
|
||||
}
|
||||
|
||||
export interface ProblemDTO {
|
||||
triggerid?: string;
|
||||
eventid?: string;
|
||||
timestamp: number;
|
||||
lastchange?: string;
|
||||
lastchangeUnix?: number;
|
||||
|
||||
/** Name of the trigger. */
|
||||
name?: string;
|
||||
|
||||
/** Same as a name. */
|
||||
description?: string;
|
||||
|
||||
/** Whether the trigger is in OK or problem state. */
|
||||
value?: string;
|
||||
|
||||
datasource?: DataSourceRef | string;
|
||||
comments?: string;
|
||||
host?: string;
|
||||
hostTechName?: string;
|
||||
proxy?: string;
|
||||
severity?: string;
|
||||
priority?: string;
|
||||
|
||||
acknowledged?: '1' | '0';
|
||||
acknowledges?: ZBXAcknowledge[];
|
||||
|
||||
groups?: ZBXGroup[];
|
||||
hosts?: ZBXHost[];
|
||||
items?: ZBXItem[];
|
||||
alerts?: ZBXAlert[];
|
||||
tags?: ZBXTag[];
|
||||
url?: string;
|
||||
|
||||
expression?: string;
|
||||
correlation_mode?: string;
|
||||
correlation_tag?: string;
|
||||
suppressed?: string;
|
||||
suppression_data?: any[];
|
||||
state?: string;
|
||||
maintenance?: boolean;
|
||||
manual_close?: string;
|
||||
error?: string;
|
||||
|
||||
showAckButton?: boolean;
|
||||
type?: string;
|
||||
}
|
||||
|
||||
export interface ZBXProblem {
|
||||
acknowledged?: '1' | '0';
|
||||
acknowledges?: ZBXAcknowledge[];
|
||||
clock: string;
|
||||
ns: string;
|
||||
correlationid?: string;
|
||||
datasource?: string;
|
||||
name?: string;
|
||||
eventid?: string;
|
||||
maintenance?: boolean;
|
||||
object?: string;
|
||||
objectid?: string;
|
||||
opdata?: any;
|
||||
r_eventid?: string;
|
||||
r_clock?: string;
|
||||
r_ns?: string;
|
||||
severity?: string;
|
||||
showAckButton?: boolean;
|
||||
source?: string;
|
||||
suppressed?: string;
|
||||
suppression_data?: any[];
|
||||
tags?: ZBXTag[];
|
||||
userid?: string;
|
||||
}
|
||||
|
||||
export interface ZBXTrigger {
|
||||
acknowledges?: ZBXAcknowledge[];
|
||||
showAckButton?: boolean;
|
||||
alerts?: ZBXAlert[];
|
||||
age?: string;
|
||||
color?: string;
|
||||
comments?: string;
|
||||
correlation_mode?: string;
|
||||
correlation_tag?: string;
|
||||
datasource?: string;
|
||||
description?: string;
|
||||
error?: string;
|
||||
expression?: string;
|
||||
flags?: string;
|
||||
groups?: ZBXGroup[];
|
||||
host?: string;
|
||||
hostTechName?: string;
|
||||
hosts?: ZBXHost[];
|
||||
items?: ZBXItem[];
|
||||
lastEvent?: ZBXEvent;
|
||||
lastchange?: string;
|
||||
lastchangeUnix?: number;
|
||||
maintenance?: boolean;
|
||||
manual_close?: string;
|
||||
priority?: string;
|
||||
proxy?: string;
|
||||
recovery_expression?: string;
|
||||
recovery_mode?: string;
|
||||
severity?: string;
|
||||
state?: string;
|
||||
status?: string;
|
||||
tags?: ZBXTag[];
|
||||
templateid?: string;
|
||||
triggerid?: string;
|
||||
/** Whether the trigger can generate multiple problem events. */
|
||||
type?: string;
|
||||
url?: string;
|
||||
value?: string;
|
||||
}
|
||||
|
||||
export interface ZBXGroup {
|
||||
groupid: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface ZBXHost {
|
||||
hostid: string;
|
||||
name: string;
|
||||
host: string;
|
||||
maintenance_status?: string;
|
||||
proxy_hostid?: string;
|
||||
proxy?: any;
|
||||
}
|
||||
|
||||
export interface ZBXItem {
|
||||
itemid: string;
|
||||
name: string;
|
||||
key_: string;
|
||||
lastvalue?: string;
|
||||
tags?: ZBXItemTag[];
|
||||
}
|
||||
|
||||
export interface ZBXItemTag {
|
||||
tag: string;
|
||||
value?: string;
|
||||
}
|
||||
|
||||
export interface ZBXEvent {
|
||||
eventid: string;
|
||||
clock: string;
|
||||
ns?: string;
|
||||
value?: string;
|
||||
name?: string;
|
||||
source?: string;
|
||||
object?: string;
|
||||
objectid?: string;
|
||||
severity?: string;
|
||||
hosts?: ZBXHost[];
|
||||
acknowledged?: '1' | '0';
|
||||
acknowledges?: ZBXAcknowledge[];
|
||||
tags?: ZBXTag[];
|
||||
suppressed?: string;
|
||||
}
|
||||
|
||||
export interface ZBXTag {
|
||||
tag: string;
|
||||
value?: string;
|
||||
}
|
||||
|
||||
export interface ZBXAcknowledge {
|
||||
acknowledgeid: string;
|
||||
eventid: string;
|
||||
userid: string;
|
||||
action: string;
|
||||
clock: string;
|
||||
time: string;
|
||||
message?: string;
|
||||
user: string;
|
||||
alias: string;
|
||||
name: string;
|
||||
surname: string;
|
||||
}
|
||||
|
||||
export interface ZBXAlert {
|
||||
eventid: string;
|
||||
clock: string;
|
||||
message: string;
|
||||
error: string;
|
||||
}
|
||||
|
||||
export class ZBXQueryUpdatedEvent extends BusEventWithPayload<any> {
|
||||
static type = 'zbx-query-updated';
|
||||
}
|
||||
527
src/datasource/utils.ts
Normal file
527
src/datasource/utils.ts
Normal file
@@ -0,0 +1,527 @@
|
||||
import _ from 'lodash';
|
||||
// eslint-disable-next-line
|
||||
import moment from 'moment';
|
||||
import * as c from './constants';
|
||||
import { VariableQuery, VariableQueryTypes, ZBXItemTag } from './types';
|
||||
import { DataFrame, FieldType, getValueFormats, MappingType, rangeUtil, ValueMapping } from '@grafana/data';
|
||||
|
||||
/*
|
||||
* This regex matches 3 types of variable reference with an optional format specifier
|
||||
* \$(\w+) $var1
|
||||
* \[\[([\s\S]+?)(?::(\w+))?\]\] [[var2]] or [[var2:fmt2]]
|
||||
* \${(\w+)(?::(\w+))?} ${var3} or ${var3:fmt3}
|
||||
*/
|
||||
export const variableRegex = /\$(\w+)|\[\[([\s\S]+?)(?::(\w+))?\]\]|\${(\w+)(?:\.([^:^\}]+))?(?::(\w+))?}/g;
|
||||
|
||||
/**
|
||||
* Expand Zabbix item name
|
||||
*
|
||||
* @param {string} name item name, ie "CPU $2 time"
|
||||
* @param {string} key item key, ie system.cpu.util[,system,avg1]
|
||||
* @return {string} expanded name, ie "CPU system time"
|
||||
*/
|
||||
export function expandItemName(name: string, key: string): string {
|
||||
// extract params from key:
|
||||
// "system.cpu.util[,system,avg1]" --> ["", "system", "avg1"]
|
||||
const key_params_str = key.substring(key.indexOf('[') + 1, key.lastIndexOf(']'));
|
||||
const key_params = splitKeyParams(key_params_str);
|
||||
|
||||
// replace item parameters
|
||||
for (let i = key_params.length; i >= 1; i--) {
|
||||
name = name.replace('$' + i, key_params[i - 1]);
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
||||
export function expandItems(items) {
|
||||
_.forEach(items, (item) => {
|
||||
item.item = item.name;
|
||||
item.name = expandItemName(item.item, item.key_);
|
||||
return item;
|
||||
});
|
||||
return items;
|
||||
}
|
||||
|
||||
function splitKeyParams(paramStr) {
|
||||
const params = [];
|
||||
let quoted = false;
|
||||
let in_array = false;
|
||||
const split_symbol = ',';
|
||||
let param = '';
|
||||
|
||||
_.forEach(paramStr, (symbol) => {
|
||||
if (symbol === '"' && in_array) {
|
||||
param += symbol;
|
||||
} else if (symbol === '"' && quoted) {
|
||||
quoted = false;
|
||||
} else if (symbol === '"' && !quoted) {
|
||||
quoted = true;
|
||||
} else if (symbol === '[' && !quoted) {
|
||||
in_array = true;
|
||||
} else if (symbol === ']' && !quoted) {
|
||||
in_array = false;
|
||||
} else if (symbol === split_symbol && !quoted && !in_array) {
|
||||
params.push(param);
|
||||
param = '';
|
||||
} else {
|
||||
param += symbol;
|
||||
}
|
||||
});
|
||||
|
||||
params.push(param);
|
||||
return params;
|
||||
}
|
||||
|
||||
const MACRO_PATTERN = /{\$[A-Z0-9_\.]+}/g;
|
||||
|
||||
export function containsMacro(itemName) {
|
||||
return MACRO_PATTERN.test(itemName);
|
||||
}
|
||||
|
||||
export function replaceMacro(item, macros, isTriggerItem?) {
|
||||
let itemName = isTriggerItem ? item.url : item.name;
|
||||
const item_macros = itemName.match(MACRO_PATTERN);
|
||||
_.forEach(item_macros, (macro) => {
|
||||
const host_macros = _.filter(macros, (m) => {
|
||||
if (m.hostid) {
|
||||
if (isTriggerItem) {
|
||||
// Trigger item can have multiple hosts
|
||||
// Check all trigger host ids against macro host id
|
||||
let hostIdFound = false;
|
||||
_.forEach(item.hosts, (h) => {
|
||||
if (h.hostid === m.hostid) {
|
||||
hostIdFound = true;
|
||||
}
|
||||
});
|
||||
return hostIdFound;
|
||||
} else {
|
||||
// Check app host id against macro host id
|
||||
return m.hostid === item.hostid;
|
||||
}
|
||||
} else {
|
||||
// Add global macros
|
||||
return true;
|
||||
}
|
||||
});
|
||||
|
||||
const macro_def = _.find(host_macros, { macro: macro });
|
||||
if (macro_def && macro_def.value) {
|
||||
const macro_value = macro_def.value;
|
||||
const macro_regex = new RegExp(escapeMacro(macro));
|
||||
itemName = itemName.replace(macro_regex, macro_value);
|
||||
}
|
||||
});
|
||||
|
||||
return itemName;
|
||||
}
|
||||
|
||||
function escapeMacro(macro) {
|
||||
macro = macro.replace(/\$/, '\\$');
|
||||
return macro;
|
||||
}
|
||||
|
||||
export function parseLegacyVariableQuery(query: string): VariableQuery {
|
||||
let queryType: VariableQueryTypes;
|
||||
const parts = [];
|
||||
|
||||
// Split query. Query structure: group.host.app.item
|
||||
_.each(splitTemplateQuery(query), (part) => {
|
||||
// Replace wildcard to regex
|
||||
if (part === '*') {
|
||||
part = '/.*/';
|
||||
}
|
||||
parts.push(part);
|
||||
});
|
||||
const template = _.zipObject(['group', 'host', 'app', 'item'], parts);
|
||||
|
||||
if (parts.length === 4 && template.app === '/.*/') {
|
||||
// Search for all items, even it's not belong to any application
|
||||
template.app = '';
|
||||
}
|
||||
|
||||
switch (parts.length) {
|
||||
case 1:
|
||||
queryType = VariableQueryTypes.Group;
|
||||
break;
|
||||
case 2:
|
||||
queryType = VariableQueryTypes.Host;
|
||||
break;
|
||||
case 3:
|
||||
queryType = VariableQueryTypes.Application;
|
||||
break;
|
||||
case 4:
|
||||
queryType = VariableQueryTypes.Item;
|
||||
break;
|
||||
}
|
||||
|
||||
const variableQuery: VariableQuery = {
|
||||
queryType,
|
||||
group: template.group || '',
|
||||
host: template.host || '',
|
||||
application: template.app || '',
|
||||
item: template.item || '',
|
||||
};
|
||||
|
||||
return variableQuery;
|
||||
}
|
||||
|
||||
/**
|
||||
* Split template query to parts of zabbix entities
|
||||
* group.host.app.item -> [group, host, app, item]
|
||||
* {group}{host.com} -> [group, host.com]
|
||||
*/
|
||||
export function splitTemplateQuery(query) {
|
||||
const splitPattern = /\{[^\{\}]*\}|\{\/.*\/\}/g;
|
||||
let split;
|
||||
|
||||
if (isContainsBraces(query)) {
|
||||
const result = query.match(splitPattern);
|
||||
split = _.map(result, (part) => {
|
||||
return _.trim(part, '{}');
|
||||
});
|
||||
} else {
|
||||
split = query.split('.');
|
||||
}
|
||||
|
||||
return split;
|
||||
}
|
||||
|
||||
function isContainsBraces(query) {
|
||||
const bracesPattern = /^\{.+\}$/;
|
||||
return bracesPattern.test(query);
|
||||
}
|
||||
|
||||
// Pattern for testing regex
|
||||
export const regexPattern = /^\/(.*)\/([gmi]*)$/m;
|
||||
|
||||
export function isRegex(str) {
|
||||
return regexPattern.test(str);
|
||||
}
|
||||
|
||||
export function isTemplateVariable(str, templateVariables) {
|
||||
const variablePattern = /^\$\w+/;
|
||||
if (variablePattern.test(str)) {
|
||||
const variables = _.map(templateVariables, (variable) => {
|
||||
return '$' + variable.name;
|
||||
});
|
||||
return _.includes(variables, str);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function getRangeScopedVars(range) {
|
||||
const msRange = range.to.diff(range.from);
|
||||
const sRange = Math.round(msRange / 1000);
|
||||
const regularRange = rangeUtil.secondsToHms(msRange / 1000);
|
||||
return {
|
||||
__range_ms: { text: msRange, value: msRange },
|
||||
__range_s: { text: sRange, value: sRange },
|
||||
__range: { text: regularRange, value: regularRange },
|
||||
__range_series: { text: c.RANGE_VARIABLE_VALUE, value: c.RANGE_VARIABLE_VALUE },
|
||||
};
|
||||
}
|
||||
|
||||
export function buildRegex(str) {
|
||||
const matches = str.match(regexPattern);
|
||||
const pattern = matches[1];
|
||||
const flags = matches[2] !== '' ? matches[2] : undefined;
|
||||
return new RegExp(pattern, flags);
|
||||
}
|
||||
|
||||
// Need for template variables replace
|
||||
// From Grafana's templateSrv.js
|
||||
export function escapeRegex(value) {
|
||||
return value.replace(/[\\^$*+?.()|[\]{}\/]/g, '\\$&');
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses Zabbix item update interval (returns milliseconds). Returns 0 in case of custom intervals.
|
||||
*/
|
||||
export function parseItemInterval(interval: string): number {
|
||||
const normalizedInterval = normalizeZabbixInterval(interval);
|
||||
if (normalizedInterval) {
|
||||
return parseInterval(normalizedInterval);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
export function normalizeZabbixInterval(interval: string): string {
|
||||
const intervalPattern = /(^[\d]+)(y|M|w|d|h|m|s)?/g;
|
||||
const parsedInterval = intervalPattern.exec(interval);
|
||||
if (!parsedInterval || !interval || (parsedInterval.length > 2 && !parsedInterval[2])) {
|
||||
return '';
|
||||
}
|
||||
return parsedInterval[1] + (parsedInterval.length > 2 ? parsedInterval[2] : 's');
|
||||
}
|
||||
|
||||
// Returns interval in milliseconds
|
||||
export function parseInterval(interval: string): number {
|
||||
const intervalPattern = /(^[\d]+)(y|M|w|d|h|m|s)/g;
|
||||
const momentInterval: any[] = intervalPattern.exec(interval);
|
||||
const duration = moment.duration(Number(momentInterval[1]), momentInterval[2]);
|
||||
return duration.valueOf() as number;
|
||||
}
|
||||
|
||||
export function parseTimeShiftInterval(interval) {
|
||||
const intervalPattern = /^([\+\-]*)([\d]+)(y|M|w|d|h|m|s)/g;
|
||||
const momentInterval: any[] = intervalPattern.exec(interval);
|
||||
let duration: any = 0;
|
||||
|
||||
if (momentInterval[1] === '+') {
|
||||
duration = 0 - (moment.duration(Number(momentInterval[2]), momentInterval[3]).valueOf() as any);
|
||||
} else {
|
||||
duration = moment.duration(Number(momentInterval[2]), momentInterval[3]).valueOf();
|
||||
}
|
||||
|
||||
return duration;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format acknowledges.
|
||||
*
|
||||
* @param {array} acknowledges array of Zabbix acknowledge objects
|
||||
* @return {string} HTML-formatted table
|
||||
*/
|
||||
export function formatAcknowledges(acknowledges) {
|
||||
if (acknowledges.length) {
|
||||
let formatted_acknowledges =
|
||||
'<br><br>Acknowledges:<br><table><tr><td><b>Time</b></td>' + '<td><b>User</b></td><td><b>Comments</b></td></tr>';
|
||||
_.each(
|
||||
_.map(acknowledges, (ack) => {
|
||||
const timestamp = moment.unix(ack.clock);
|
||||
return (
|
||||
'<tr><td><i>' +
|
||||
timestamp.format('DD MMM YYYY HH:mm:ss') +
|
||||
'</i></td><td>' +
|
||||
ack.alias +
|
||||
' (' +
|
||||
ack.name +
|
||||
' ' +
|
||||
ack.surname +
|
||||
')' +
|
||||
'</td><td>' +
|
||||
ack.message +
|
||||
'</td></tr>'
|
||||
);
|
||||
}),
|
||||
(ack) => {
|
||||
formatted_acknowledges = formatted_acknowledges.concat(ack);
|
||||
}
|
||||
);
|
||||
formatted_acknowledges = formatted_acknowledges.concat('</table>');
|
||||
return formatted_acknowledges;
|
||||
} else {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
export function convertToZabbixAPIUrl(url) {
|
||||
const zabbixAPIUrlPattern = /.*api_jsonrpc.php$/;
|
||||
const trimSlashPattern = /(.*?)[\/]*$/;
|
||||
if (url.match(zabbixAPIUrlPattern)) {
|
||||
return url;
|
||||
} else {
|
||||
return url.replace(trimSlashPattern, '$1');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap function to prevent multiple calls
|
||||
* when waiting for result.
|
||||
*/
|
||||
export function callOnce(func, promiseKeeper) {
|
||||
return function () {
|
||||
if (!promiseKeeper) {
|
||||
promiseKeeper = Promise.resolve(
|
||||
func
|
||||
.apply(this, arguments)
|
||||
.then((result) => {
|
||||
promiseKeeper = null;
|
||||
return result;
|
||||
})
|
||||
.catch((err) => {
|
||||
promiseKeeper = null;
|
||||
throw err;
|
||||
})
|
||||
);
|
||||
}
|
||||
return promiseKeeper;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply function one by one: `sequence([a(), b(), c()]) = c(b(a()))`
|
||||
* @param {*} funcsArray functions to apply
|
||||
*/
|
||||
export function sequence(funcsArray) {
|
||||
return function (result) {
|
||||
for (let i = 0; i < funcsArray.length; i++) {
|
||||
result = funcsArray[i].call(this, result);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
}
|
||||
|
||||
const versionPattern = /^(\d+)(?:\.(\d+))?(?:\.(\d+))?(?:-([0-9A-Za-z\.]+))?/;
|
||||
|
||||
export function isValidVersion(version) {
|
||||
return versionPattern.exec(version);
|
||||
}
|
||||
|
||||
export function parseVersion(version: string) {
|
||||
const match = versionPattern.exec(version);
|
||||
if (!match) {
|
||||
return null;
|
||||
}
|
||||
const major = Number(match[1]);
|
||||
const minor = Number(match[2] || 0);
|
||||
const patch = Number(match[3] || 0);
|
||||
const meta = match[4];
|
||||
return { major, minor, patch, meta };
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces any space-like symbols (tabs, new lines, spaces) by single whitespace.
|
||||
*/
|
||||
export function compactQuery(query) {
|
||||
return query.replace(/\s+/g, ' ').trim();
|
||||
}
|
||||
|
||||
export function getArrayDepth(a, level = 0) {
|
||||
if (a.length === 0) {
|
||||
return 1;
|
||||
}
|
||||
const elem = a[0];
|
||||
if (_.isArray(elem)) {
|
||||
return getArrayDepth(elem, level + 1);
|
||||
}
|
||||
return level + 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether its argument represents a numeric value.
|
||||
*/
|
||||
export function isNumeric(n: any): boolean {
|
||||
return !isNaN(parseFloat(n)) && isFinite(n);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses tags string into array of {tag: value} objects
|
||||
*/
|
||||
export function parseTags(tagStr: string): any[] {
|
||||
if (!tagStr) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let tags: any[] = _.map(tagStr.split(','), (tag) => tag.trim());
|
||||
tags = _.map(tags, (tag) => {
|
||||
const tagParts = tag.split(':');
|
||||
return { tag: tagParts[0]?.trim(), value: tagParts[1]?.trim() };
|
||||
});
|
||||
return tags;
|
||||
}
|
||||
|
||||
// Parses string representation of tag into the object
|
||||
export function parseItemTag(tagStr: string): ZBXItemTag {
|
||||
const itemTag: ZBXItemTag = { tag: '', value: '' };
|
||||
const tagParts = tagStr.split(': ');
|
||||
itemTag.tag = tagParts[0];
|
||||
if (tagParts[1]) {
|
||||
itemTag.value = tagParts[1];
|
||||
}
|
||||
return itemTag;
|
||||
}
|
||||
|
||||
export function itemTagToString(t: ZBXItemTag): string {
|
||||
return t.value ? `${t.tag}: ${t.value}` : t.tag;
|
||||
}
|
||||
|
||||
export function mustArray(result: any): any[] {
|
||||
return result || [];
|
||||
}
|
||||
|
||||
const getUnitsMap = () => ({
|
||||
'%': 'percent',
|
||||
b: 'decbits', // bits(SI)
|
||||
bps: 'bps', // bits/sec(SI)
|
||||
B: 'bytes', // bytes(IEC)
|
||||
Bps: 'binBps', // bytes/sec(IEC)
|
||||
// 'unixtime': 'dateTimeAsSystem',
|
||||
uptime: 'dtdhms',
|
||||
qps: 'qps', // requests/sec (rps)
|
||||
iops: 'iops', // I/O ops/sec (iops)
|
||||
Hz: 'hertz', // Hertz (1/s)
|
||||
V: 'volt', // Volt (V)
|
||||
C: 'celsius', // Celsius (°C)
|
||||
RPM: 'rotrpm', // Revolutions per minute (rpm)
|
||||
dBm: 'dBm', // Decibel-milliwatt (dBm)
|
||||
});
|
||||
|
||||
const getKnownGrafanaUnits = () => {
|
||||
const units = {};
|
||||
const categories = getValueFormats();
|
||||
for (const category of categories) {
|
||||
for (const unitDesc of category.submenu) {
|
||||
const unit = unitDesc.value;
|
||||
units[unit] = unit;
|
||||
}
|
||||
}
|
||||
return units;
|
||||
};
|
||||
|
||||
const unitsMap = getUnitsMap();
|
||||
const knownGrafanaUnits = getKnownGrafanaUnits();
|
||||
|
||||
export function convertZabbixUnit(zabbixUnit: string): string {
|
||||
let unit = unitsMap[zabbixUnit];
|
||||
if (!unit) {
|
||||
unit = knownGrafanaUnits[zabbixUnit];
|
||||
}
|
||||
return unit;
|
||||
}
|
||||
|
||||
export function getValueMapping(item, valueMappings: any[]): ValueMapping[] | null {
|
||||
const { valuemapid } = item;
|
||||
const mapping = valueMappings?.find((m) => m.valuemapid === valuemapid);
|
||||
if (!mapping) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (mapping.mappings as any[]).map((m, i) => {
|
||||
const valueMapping: ValueMapping = {
|
||||
// id: i,
|
||||
type: MappingType.ValueToText,
|
||||
options: {
|
||||
value: m.value,
|
||||
text: m.newvalue,
|
||||
},
|
||||
};
|
||||
return valueMapping;
|
||||
});
|
||||
}
|
||||
|
||||
export function isProblemsDataFrame(data: DataFrame): boolean {
|
||||
return (
|
||||
data.fields.length && data.fields[0].type === FieldType.other && data.fields[0].config.custom['type'] === 'problems'
|
||||
);
|
||||
}
|
||||
|
||||
// Swap n and k elements.
|
||||
export function swap<T>(list: T[], n: number, k: number): T[] {
|
||||
if (list === null || list.length < 2 || k > list.length - 1 || k < 0 || n > list.length - 1 || n < 0) {
|
||||
return list;
|
||||
}
|
||||
|
||||
const newList: T[] = new Array(list.length);
|
||||
for (let i = 0; i < list.length; i++) {
|
||||
if (i === n) {
|
||||
newList[i] = list[k];
|
||||
} else if (i === k) {
|
||||
newList[i] = list[n];
|
||||
} else {
|
||||
newList[i] = list[i];
|
||||
}
|
||||
}
|
||||
return newList;
|
||||
}
|
||||
88
src/datasource/zabbix/connectors/dbConnector.ts
Normal file
88
src/datasource/zabbix/connectors/dbConnector.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import _ from 'lodash';
|
||||
import { getDataSourceSrv } from '@grafana/runtime';
|
||||
|
||||
export const DEFAULT_QUERY_LIMIT = 10000;
|
||||
|
||||
export const HISTORY_TO_TABLE_MAP = {
|
||||
'0': 'history',
|
||||
'1': 'history_str',
|
||||
'2': 'history_log',
|
||||
'3': 'history_uint',
|
||||
'4': 'history_text',
|
||||
};
|
||||
|
||||
export const TREND_TO_TABLE_MAP = {
|
||||
'0': 'trends',
|
||||
'3': 'trends_uint',
|
||||
};
|
||||
|
||||
export const consolidateByFunc = {
|
||||
avg: 'AVG',
|
||||
min: 'MIN',
|
||||
max: 'MAX',
|
||||
sum: 'SUM',
|
||||
count: 'COUNT',
|
||||
};
|
||||
|
||||
export const consolidateByTrendColumns = {
|
||||
avg: 'value_avg',
|
||||
min: 'value_min',
|
||||
max: 'value_max',
|
||||
sum: 'num*value_avg', // sum of sums inside the one-hour trend period
|
||||
};
|
||||
|
||||
/**
|
||||
* Base class for external history database connectors. Subclasses should implement `getHistory()`, `getTrends()` and
|
||||
* `testDataSource()` methods, which describe how to fetch data from source other than Zabbix API.
|
||||
*/
|
||||
export class DBConnector {
|
||||
protected datasourceId: any;
|
||||
private datasourceName: any;
|
||||
protected datasourceTypeId: any;
|
||||
// private datasourceTypeName: any;
|
||||
|
||||
constructor(options) {
|
||||
this.datasourceId = options.datasourceId;
|
||||
this.datasourceName = options.datasourceName;
|
||||
this.datasourceTypeId = null;
|
||||
// this.datasourceTypeName = null;
|
||||
}
|
||||
|
||||
static loadDatasource(dsId, dsName) {
|
||||
if (!dsName && dsId !== undefined) {
|
||||
const ds = _.find(getDataSourceSrv().getList(), { id: dsId });
|
||||
if (!ds) {
|
||||
return Promise.reject(`Data Source with ID ${dsId} not found`);
|
||||
}
|
||||
dsName = ds.name;
|
||||
}
|
||||
if (dsName) {
|
||||
return getDataSourceSrv().get(dsName);
|
||||
} else {
|
||||
return Promise.reject(`Data Source name should be specified`);
|
||||
}
|
||||
}
|
||||
|
||||
loadDBDataSource() {
|
||||
return DBConnector.loadDatasource(this.datasourceId, this.datasourceName).then((ds) => {
|
||||
this.datasourceTypeId = ds.meta.id;
|
||||
// this.datasourceTypeName = ds.meta.name;
|
||||
if (!this.datasourceName) {
|
||||
this.datasourceName = ds.name;
|
||||
}
|
||||
if (!this.datasourceId) {
|
||||
this.datasourceId = ds.id;
|
||||
}
|
||||
return ds;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
DBConnector,
|
||||
DEFAULT_QUERY_LIMIT,
|
||||
HISTORY_TO_TABLE_MAP,
|
||||
TREND_TO_TABLE_MAP,
|
||||
consolidateByFunc,
|
||||
consolidateByTrendColumns,
|
||||
};
|
||||
165
src/datasource/zabbix/connectors/influxdb/influxdbConnector.ts
Normal file
165
src/datasource/zabbix/connectors/influxdb/influxdbConnector.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
dataFrameToJSON,
|
||||
Field,
|
||||
FieldType,
|
||||
MutableDataFrame,
|
||||
TIME_SERIES_TIME_FIELD_NAME,
|
||||
} from '@grafana/data';
|
||||
import _ from 'lodash';
|
||||
import { compactQuery } from '../../../utils';
|
||||
import { consolidateByTrendColumns, DBConnector, HISTORY_TO_TABLE_MAP } from '../dbConnector';
|
||||
|
||||
const consolidateByFunc = {
|
||||
avg: 'MEAN',
|
||||
min: 'MIN',
|
||||
max: 'MAX',
|
||||
sum: 'SUM',
|
||||
count: 'COUNT',
|
||||
};
|
||||
|
||||
export class InfluxDBConnector extends DBConnector {
|
||||
private retentionPolicy: any;
|
||||
private influxDS: any;
|
||||
|
||||
constructor(options) {
|
||||
super(options);
|
||||
this.retentionPolicy = options.retentionPolicy;
|
||||
super.loadDBDataSource().then((ds) => {
|
||||
this.influxDS = ds;
|
||||
return ds;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to invoke test query for one of Zabbix database tables.
|
||||
*/
|
||||
testDataSource() {
|
||||
return this.influxDS.testDatasource().then((result) => {
|
||||
if (result.status && result.status === 'error') {
|
||||
return Promise.reject({
|
||||
data: {
|
||||
message: `InfluxDB connection error: ${result.message}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
getHistory(items, timeFrom, timeTill, options) {
|
||||
const { intervalMs, retentionPolicy } = options;
|
||||
let { consolidateBy } = options;
|
||||
const intervalSec = Math.ceil(intervalMs / 1000);
|
||||
|
||||
const range = { timeFrom, timeTill };
|
||||
consolidateBy = consolidateBy || 'avg';
|
||||
|
||||
// Group items by value type and perform request for each value type
|
||||
const grouped_items = _.groupBy(items, 'value_type');
|
||||
const promises = _.map(grouped_items, (items, value_type) => {
|
||||
const itemids = _.map(items, 'itemid');
|
||||
const table = HISTORY_TO_TABLE_MAP[value_type];
|
||||
const query = this.buildHistoryQuery(itemids, table, range, intervalSec, consolidateBy, retentionPolicy);
|
||||
return this.invokeInfluxDBQuery(query);
|
||||
});
|
||||
|
||||
return Promise.all(promises)
|
||||
.then(_.flatten)
|
||||
.then((results) => {
|
||||
return handleInfluxHistoryResponse(results);
|
||||
});
|
||||
}
|
||||
|
||||
getTrends(items, timeFrom, timeTill, options) {
|
||||
options.retentionPolicy = this.retentionPolicy;
|
||||
return this.getHistory(items, timeFrom, timeTill, options);
|
||||
}
|
||||
|
||||
buildHistoryQuery(itemids, table, range, intervalSec, aggFunction, retentionPolicy) {
|
||||
const { timeFrom, timeTill } = range;
|
||||
const measurement = retentionPolicy ? `"${retentionPolicy}"."${table}"` : `"${table}"`;
|
||||
let value = 'value';
|
||||
if (retentionPolicy) {
|
||||
value = consolidateByTrendColumns[aggFunction] || 'value_avg';
|
||||
}
|
||||
const aggregation = consolidateByFunc[aggFunction] || aggFunction;
|
||||
const where_clause = this.buildWhereClause(itemids);
|
||||
const query = `SELECT ${aggregation}("${value}")
|
||||
FROM ${measurement}
|
||||
WHERE ${where_clause}
|
||||
AND "time" >= ${timeFrom}s
|
||||
AND "time" <= ${timeTill}s
|
||||
GROUP BY time(${intervalSec}s), "itemid" fill(none)`;
|
||||
return compactQuery(query);
|
||||
}
|
||||
|
||||
buildWhereClause(itemids) {
|
||||
const itemidsWhere = itemids.map((itemid) => `"itemid" = '${itemid}'`).join(' OR ');
|
||||
return `(${itemidsWhere})`;
|
||||
}
|
||||
|
||||
async invokeInfluxDBQuery(query) {
|
||||
const data = await this.influxDS._seriesQuery(query).toPromise();
|
||||
return data?.results || [];
|
||||
}
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function handleInfluxHistoryResponse(results) {
|
||||
if (!results) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const frames: DataFrame[] = [];
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
const result = results[i];
|
||||
|
||||
if (result.error) {
|
||||
const error = `InfluxDB error: ${result.error}`;
|
||||
return Promise.reject(new Error(error));
|
||||
}
|
||||
|
||||
if (!result || !result.series) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const influxSeriesList = results[i].series;
|
||||
|
||||
for (let y = 0; y < influxSeriesList.length; y++) {
|
||||
const influxSeries = influxSeriesList[y];
|
||||
const tsBuffer = [];
|
||||
const valuesBuffer = [];
|
||||
if (influxSeries.values) {
|
||||
for (i = 0; i < influxSeries.values.length; i++) {
|
||||
tsBuffer.push(influxSeries.values[i][0]);
|
||||
valuesBuffer.push(influxSeries.values[i][1]);
|
||||
}
|
||||
}
|
||||
const timeFiled: Field<number> = {
|
||||
name: TIME_SERIES_TIME_FIELD_NAME,
|
||||
type: FieldType.time,
|
||||
config: {},
|
||||
values: new ArrayVector(tsBuffer),
|
||||
};
|
||||
|
||||
const valueFiled: Field<number | null> = {
|
||||
name: influxSeries?.tags?.itemid,
|
||||
type: FieldType.number,
|
||||
config: {},
|
||||
values: new ArrayVector(valuesBuffer),
|
||||
};
|
||||
|
||||
frames.push(
|
||||
new MutableDataFrame({
|
||||
name: influxSeries?.tags?.itemid,
|
||||
fields: [timeFiled, valueFiled],
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return frames.map((f) => dataFrameToJSON(f));
|
||||
}
|
||||
46
src/datasource/zabbix/connectors/sql/mysql.ts
Normal file
46
src/datasource/zabbix/connectors/sql/mysql.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
/**
|
||||
* MySQL queries
|
||||
*/
|
||||
|
||||
function historyQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction) {
|
||||
const time_expression = `clock DIV ${intervalSec} * ${intervalSec}`;
|
||||
return `
|
||||
SELECT CAST(itemid AS CHAR) AS metric, ${time_expression} AS time_sec, ${aggFunction}(value) AS value
|
||||
FROM ${table}
|
||||
WHERE itemid IN (${itemids})
|
||||
AND clock
|
||||
> ${timeFrom}
|
||||
AND clock
|
||||
< ${timeTill}
|
||||
GROUP BY ${time_expression}, metric
|
||||
ORDER BY time_sec ASC
|
||||
`;
|
||||
}
|
||||
|
||||
function trendsQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction, valueColumn) {
|
||||
const time_expression = `clock DIV ${intervalSec} * ${intervalSec}`;
|
||||
return `
|
||||
SELECT CAST(itemid AS CHAR) AS metric, ${time_expression} AS time_sec, ${aggFunction}(${valueColumn}) AS value
|
||||
FROM ${table}
|
||||
WHERE itemid IN (${itemids})
|
||||
AND clock
|
||||
> ${timeFrom}
|
||||
AND clock
|
||||
< ${timeTill}
|
||||
GROUP BY ${time_expression}, metric
|
||||
ORDER BY time_sec ASC
|
||||
`;
|
||||
}
|
||||
|
||||
function testQuery() {
|
||||
return `SELECT CAST(itemid AS CHAR) AS metric, clock AS time_sec, value_avg AS value
|
||||
FROM trends_uint LIMIT 1`;
|
||||
}
|
||||
|
||||
const mysql = {
|
||||
historyQuery,
|
||||
trendsQuery,
|
||||
testQuery,
|
||||
};
|
||||
|
||||
export default mysql;
|
||||
52
src/datasource/zabbix/connectors/sql/postgres.ts
Normal file
52
src/datasource/zabbix/connectors/sql/postgres.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
/**
|
||||
* Postgres queries
|
||||
*/
|
||||
|
||||
const ITEMID_FORMAT = 'FM99999999999999999999';
|
||||
|
||||
function historyQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction) {
|
||||
const time_expression = `clock / ${intervalSec} * ${intervalSec}`;
|
||||
return `
|
||||
SELECT to_char(itemid, '${ITEMID_FORMAT}') AS metric, ${time_expression} AS time, ${aggFunction}(value) AS value
|
||||
FROM ${table}
|
||||
WHERE itemid IN (${itemids})
|
||||
AND clock
|
||||
> ${timeFrom}
|
||||
AND clock
|
||||
< ${timeTill}
|
||||
GROUP BY 1, 2
|
||||
ORDER BY time ASC
|
||||
`;
|
||||
}
|
||||
|
||||
function trendsQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction, valueColumn) {
|
||||
const time_expression = `clock / ${intervalSec} * ${intervalSec}`;
|
||||
return `
|
||||
SELECT to_char(itemid, '${ITEMID_FORMAT}') AS metric, ${time_expression} AS time, ${aggFunction}(${valueColumn}) AS value
|
||||
FROM ${table}
|
||||
WHERE itemid IN (${itemids})
|
||||
AND clock
|
||||
> ${timeFrom}
|
||||
AND clock
|
||||
< ${timeTill}
|
||||
GROUP BY 1, 2
|
||||
ORDER BY time ASC
|
||||
`;
|
||||
}
|
||||
|
||||
const TEST_QUERY = `
|
||||
SELECT to_char(itemid, '${ITEMID_FORMAT}') AS metric, clock AS time, value_avg AS value
|
||||
FROM trends_uint LIMIT 1
|
||||
`;
|
||||
|
||||
function testQuery() {
|
||||
return TEST_QUERY;
|
||||
}
|
||||
|
||||
const postgres = {
|
||||
historyQuery,
|
||||
trendsQuery,
|
||||
testQuery,
|
||||
};
|
||||
|
||||
export default postgres;
|
||||
140
src/datasource/zabbix/connectors/sql/sqlConnector.ts
Normal file
140
src/datasource/zabbix/connectors/sql/sqlConnector.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
import _ from 'lodash';
|
||||
import { getBackendSrv } from '@grafana/runtime';
|
||||
import { compactQuery } from '../../../utils';
|
||||
import mysql from './mysql';
|
||||
import postgres from './postgres';
|
||||
import dbConnector, {
|
||||
DBConnector,
|
||||
DEFAULT_QUERY_LIMIT,
|
||||
HISTORY_TO_TABLE_MAP,
|
||||
TREND_TO_TABLE_MAP,
|
||||
} from '../dbConnector';
|
||||
|
||||
const supportedDatabases = {
|
||||
mysql: 'mysql',
|
||||
postgres: 'postgres',
|
||||
};
|
||||
|
||||
export class SQLConnector extends DBConnector {
|
||||
private limit: number;
|
||||
private sqlDialect: any;
|
||||
|
||||
constructor(options) {
|
||||
super(options);
|
||||
|
||||
this.limit = options.limit || DEFAULT_QUERY_LIMIT;
|
||||
this.sqlDialect = null;
|
||||
|
||||
super.loadDBDataSource().then(() => {
|
||||
this.loadSQLDialect();
|
||||
});
|
||||
}
|
||||
|
||||
loadSQLDialect() {
|
||||
if (this.datasourceTypeId === supportedDatabases.postgres) {
|
||||
this.sqlDialect = postgres;
|
||||
} else {
|
||||
this.sqlDialect = mysql;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to invoke test query for one of Zabbix database tables.
|
||||
*/
|
||||
testDataSource() {
|
||||
const testQuery = this.sqlDialect.testQuery();
|
||||
return this.invokeSQLQuery(testQuery);
|
||||
}
|
||||
|
||||
getHistory(items, timeFrom, timeTill, options) {
|
||||
const { aggFunction, intervalSec } = getAggFunc(timeFrom, timeTill, options);
|
||||
|
||||
// Group items by value type and perform request for each value type
|
||||
const grouped_items = _.groupBy(items, 'value_type');
|
||||
const promises = _.map(grouped_items, (items, value_type) => {
|
||||
const itemids = _.map(items, 'itemid').join(', ');
|
||||
const table = HISTORY_TO_TABLE_MAP[value_type];
|
||||
let query = this.sqlDialect.historyQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction);
|
||||
|
||||
query = compactQuery(query);
|
||||
return this.invokeSQLQuery(query);
|
||||
});
|
||||
|
||||
return Promise.all(promises).then((results) => {
|
||||
return _.flatten(results);
|
||||
});
|
||||
}
|
||||
|
||||
getTrends(items, timeFrom, timeTill, options) {
|
||||
const { consolidateBy } = options;
|
||||
const { aggFunction, intervalSec } = getAggFunc(timeFrom, timeTill, options);
|
||||
|
||||
// Group items by value type and perform request for each value type
|
||||
const grouped_items = _.groupBy(items, 'value_type');
|
||||
const promises = _.map(grouped_items, (items, value_type) => {
|
||||
const itemids = _.map(items, 'itemid').join(', ');
|
||||
const table = TREND_TO_TABLE_MAP[value_type];
|
||||
let valueColumn = _.includes(['avg', 'min', 'max', 'sum'], consolidateBy) ? consolidateBy : 'avg';
|
||||
valueColumn = dbConnector.consolidateByTrendColumns[valueColumn];
|
||||
let query = this.sqlDialect.trendsQuery(
|
||||
itemids,
|
||||
table,
|
||||
timeFrom,
|
||||
timeTill,
|
||||
intervalSec,
|
||||
aggFunction,
|
||||
valueColumn
|
||||
);
|
||||
|
||||
query = compactQuery(query);
|
||||
return this.invokeSQLQuery(query);
|
||||
});
|
||||
|
||||
return Promise.all(promises).then((results) => {
|
||||
return _.flatten(results);
|
||||
});
|
||||
}
|
||||
|
||||
invokeSQLQuery(query) {
|
||||
const queryDef = {
|
||||
refId: 'A',
|
||||
format: 'time_series',
|
||||
datasourceId: this.datasourceId,
|
||||
rawSql: query,
|
||||
maxDataPoints: this.limit,
|
||||
};
|
||||
|
||||
return getBackendSrv()
|
||||
.datasourceRequest({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
queries: [queryDef],
|
||||
},
|
||||
})
|
||||
.then((response) => {
|
||||
const results = response.data.results;
|
||||
if (results['A']) {
|
||||
return results['A'].frames;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function getAggFunc(timeFrom, timeTill, options) {
|
||||
const { intervalMs } = options;
|
||||
let { consolidateBy } = options;
|
||||
let intervalSec = Math.ceil(intervalMs / 1000);
|
||||
|
||||
// The interval must match the time range exactly n times, otherwise
|
||||
// the resulting first and last data points will yield invalid values in the
|
||||
// calculated average value in downsampleSeries - when using consolidateBy(avg)
|
||||
const numOfIntervals = Math.ceil((timeTill - timeFrom) / intervalSec);
|
||||
intervalSec = Math.ceil((timeTill - timeFrom) / numOfIntervals);
|
||||
|
||||
consolidateBy = consolidateBy || 'avg';
|
||||
const aggFunction = dbConnector.consolidateByFunc[consolidateBy];
|
||||
return { aggFunction, intervalSec };
|
||||
}
|
||||
52
src/datasource/zabbix/connectors/zabbix_api/types.ts
Normal file
52
src/datasource/zabbix/connectors/zabbix_api/types.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
export interface JSONRPCRequest {
|
||||
jsonrpc: '2.0' | string;
|
||||
method: string;
|
||||
id: number;
|
||||
auth?: string | null;
|
||||
params?: JSONRPCRequestParams;
|
||||
}
|
||||
|
||||
export interface JSONRPCResponse<T> {
|
||||
jsonrpc: '2.0' | string;
|
||||
id: number;
|
||||
result?: T;
|
||||
error?: JSONRPCError;
|
||||
}
|
||||
|
||||
export interface JSONRPCError {
|
||||
code?: number;
|
||||
message?: string;
|
||||
data?: string;
|
||||
}
|
||||
|
||||
export type JSONRPCRequestParams = {[key: string]: any};
|
||||
|
||||
export type HTTPMethod = 'GET' | 'POST' | 'PUT' | 'PATCH' | 'DELETE' | 'HEAD' | 'CONNECT' | 'OPTIONS' | 'TRACE';
|
||||
|
||||
export type GFRequestOptions = {[key: string]: any};
|
||||
|
||||
export interface ZabbixRequestResponse {
|
||||
data?: JSONRPCResponse<any>;
|
||||
}
|
||||
|
||||
export type ZabbixAPIResponse<T> = Promise<T>;
|
||||
|
||||
export type APILoginResponse = string;
|
||||
|
||||
export interface ZBXScript {
|
||||
scriptid: string;
|
||||
name?: string;
|
||||
command?: string;
|
||||
host_access?: string;
|
||||
usrgrpid?: string;
|
||||
groupid?: string;
|
||||
description?: string;
|
||||
confirmation?: string;
|
||||
type?: string;
|
||||
execute_on?: string;
|
||||
}
|
||||
|
||||
export interface APIExecuteScriptResponse {
|
||||
response: 'success' | 'failed';
|
||||
value?: string;
|
||||
}
|
||||
@@ -0,0 +1,773 @@
|
||||
import _ from 'lodash';
|
||||
import semver from 'semver';
|
||||
import kbn from 'grafana/app/core/utils/kbn';
|
||||
import * as utils from '../../../utils';
|
||||
import { MIN_SLA_INTERVAL, ZBX_ACK_ACTION_ADD_MESSAGE, ZBX_ACK_ACTION_NONE } from '../../../constants';
|
||||
import { ShowProblemTypes, ZBXProblem } from '../../../types';
|
||||
import { APIExecuteScriptResponse, JSONRPCError, ZBXScript } from './types';
|
||||
import { BackendSrvRequest, getBackendSrv } from '@grafana/runtime';
|
||||
import { rangeUtil } from '@grafana/data';
|
||||
|
||||
const DEFAULT_ZABBIX_VERSION = '3.0.0';
|
||||
|
||||
// Backward compatibility. Since Grafana 7.2 roundInterval() func was moved to @grafana/data package
|
||||
const roundInterval: (interval: number) => number = rangeUtil?.roundInterval || kbn.roundInterval || kbn.round_interval;
|
||||
|
||||
/**
|
||||
* Zabbix API Wrapper.
|
||||
* Creates Zabbix API instance with given parameters (url, credentials and other).
|
||||
* Wraps API calls and provides high-level methods.
|
||||
*/
|
||||
export class ZabbixAPIConnector {
|
||||
backendAPIUrl: string;
|
||||
requestOptions: { basicAuth: any; withCredentials: boolean };
|
||||
getTrend: (items: any, timeFrom: any, timeTill: any) => Promise<any[]>;
|
||||
version: string;
|
||||
getVersionPromise: Promise<string>;
|
||||
datasourceId: number;
|
||||
|
||||
constructor(basicAuth: any, withCredentials: boolean, datasourceId: number) {
|
||||
this.datasourceId = datasourceId;
|
||||
this.backendAPIUrl = `/api/datasources/${this.datasourceId}/resources/zabbix-api`;
|
||||
|
||||
this.requestOptions = {
|
||||
basicAuth: basicAuth,
|
||||
withCredentials: withCredentials,
|
||||
};
|
||||
|
||||
this.getTrend = this.getTrend_ZBXNEXT1193;
|
||||
//getTrend = getTrend_30;
|
||||
|
||||
this.initVersion();
|
||||
}
|
||||
|
||||
//////////////////////////
|
||||
// Core method wrappers //
|
||||
//////////////////////////
|
||||
|
||||
request(method: string, params?: any) {
|
||||
if (!this.version) {
|
||||
return this.initVersion().then(() => this.request(method, params));
|
||||
}
|
||||
|
||||
return this.backendAPIRequest(method, params);
|
||||
}
|
||||
|
||||
async backendAPIRequest(method: string, params: any = {}) {
|
||||
const requestOptions: BackendSrvRequest = {
|
||||
url: this.backendAPIUrl,
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
hideFromInspector: false,
|
||||
data: {
|
||||
datasourceId: this.datasourceId,
|
||||
method,
|
||||
params,
|
||||
},
|
||||
};
|
||||
|
||||
// Set request options for basic auth
|
||||
if (this.requestOptions.basicAuth || this.requestOptions.withCredentials) {
|
||||
requestOptions.withCredentials = true;
|
||||
}
|
||||
if (this.requestOptions.basicAuth) {
|
||||
requestOptions.headers.Authorization = this.requestOptions.basicAuth;
|
||||
}
|
||||
|
||||
const response = await getBackendSrv().fetch<any>(requestOptions).toPromise();
|
||||
return response?.data?.result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Zabbix API version
|
||||
*/
|
||||
getVersion() {
|
||||
return this.backendAPIRequest('apiinfo.version');
|
||||
}
|
||||
|
||||
initVersion(): Promise<string> {
|
||||
if (!this.getVersionPromise) {
|
||||
this.getVersionPromise = Promise.resolve(
|
||||
this.getVersion().then((version) => {
|
||||
if (version) {
|
||||
console.log(`Zabbix version detected: ${version}`);
|
||||
} else {
|
||||
console.log(`Failed to detect Zabbix version, use default ${DEFAULT_ZABBIX_VERSION}`);
|
||||
}
|
||||
|
||||
this.version = version || DEFAULT_ZABBIX_VERSION;
|
||||
this.getVersionPromise = null;
|
||||
return version;
|
||||
})
|
||||
);
|
||||
}
|
||||
return this.getVersionPromise;
|
||||
}
|
||||
|
||||
isZabbix54OrHigher() {
|
||||
return semver.gte(this.version, '5.4.0');
|
||||
}
|
||||
|
||||
////////////////////////////////
|
||||
// Zabbix API method wrappers //
|
||||
////////////////////////////////
|
||||
|
||||
acknowledgeEvent(eventid: string, message: string, action?: number, severity?: number) {
|
||||
if (!action) {
|
||||
action = semver.gte(this.version, '4.0.0') ? ZBX_ACK_ACTION_ADD_MESSAGE : ZBX_ACK_ACTION_NONE;
|
||||
}
|
||||
|
||||
const params: any = {
|
||||
eventids: eventid,
|
||||
message: message,
|
||||
action: action,
|
||||
};
|
||||
|
||||
if (severity !== undefined) {
|
||||
params.severity = severity;
|
||||
}
|
||||
|
||||
return this.request('event.acknowledge', params);
|
||||
}
|
||||
|
||||
getGroups() {
|
||||
const params = {
|
||||
output: ['name', 'groupid'],
|
||||
sortfield: 'name',
|
||||
real_hosts: true,
|
||||
};
|
||||
|
||||
return this.request('hostgroup.get', params);
|
||||
}
|
||||
|
||||
getHosts(groupids) {
|
||||
const params: any = {
|
||||
output: ['hostid', 'name', 'host'],
|
||||
sortfield: 'name',
|
||||
};
|
||||
if (groupids) {
|
||||
params.groupids = groupids;
|
||||
}
|
||||
|
||||
return this.request('host.get', params);
|
||||
}
|
||||
|
||||
async getApps(hostids): Promise<any[]> {
|
||||
if (this.isZabbix54OrHigher()) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const params = {
|
||||
output: 'extend',
|
||||
hostids: hostids,
|
||||
};
|
||||
|
||||
return this.request('application.get', params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Zabbix items
|
||||
* @param {[type]} hostids host ids
|
||||
* @param {[type]} appids application ids
|
||||
* @param {String} itemtype 'num' or 'text'
|
||||
* @return {[type]} array of items
|
||||
*/
|
||||
getItems(hostids, appids, itemtype) {
|
||||
const params: any = {
|
||||
output: ['itemid', 'name', 'key_', 'value_type', 'hostid', 'status', 'state', 'units', 'valuemapid', 'delay'],
|
||||
sortfield: 'name',
|
||||
webitems: true,
|
||||
filter: {},
|
||||
selectHosts: ['hostid', 'name', 'host'],
|
||||
};
|
||||
if (hostids) {
|
||||
params.hostids = hostids;
|
||||
}
|
||||
if (appids) {
|
||||
params.applicationids = appids;
|
||||
}
|
||||
if (itemtype === 'num') {
|
||||
// Return only numeric metrics
|
||||
params.filter.value_type = [0, 3];
|
||||
}
|
||||
if (itemtype === 'text') {
|
||||
// Return only text metrics
|
||||
params.filter.value_type = [1, 2, 4];
|
||||
}
|
||||
|
||||
if (this.isZabbix54OrHigher()) {
|
||||
params.selectTags = 'extend';
|
||||
}
|
||||
|
||||
return this.request('item.get', params).then(utils.expandItems);
|
||||
}
|
||||
|
||||
getItemsByIDs(itemids) {
|
||||
const params: any = {
|
||||
itemids: itemids,
|
||||
output: ['itemid', 'name', 'key_', 'value_type', 'hostid', 'status', 'state', 'units', 'valuemapid', 'delay'],
|
||||
webitems: true,
|
||||
selectHosts: ['hostid', 'name'],
|
||||
};
|
||||
|
||||
if (this.isZabbix54OrHigher()) {
|
||||
params.selectTags = 'extend';
|
||||
}
|
||||
|
||||
return this.request('item.get', params).then((items) => utils.expandItems(items));
|
||||
}
|
||||
|
||||
getMacros(hostids) {
|
||||
const params = {
|
||||
output: 'extend',
|
||||
hostids: hostids,
|
||||
};
|
||||
|
||||
return this.request('usermacro.get', params);
|
||||
}
|
||||
|
||||
getGlobalMacros() {
|
||||
const params = {
|
||||
output: 'extend',
|
||||
globalmacro: true,
|
||||
};
|
||||
|
||||
return this.request('usermacro.get', params);
|
||||
}
|
||||
|
||||
getLastValue(itemid) {
|
||||
const params = {
|
||||
output: ['lastvalue'],
|
||||
itemids: itemid,
|
||||
};
|
||||
return this.request('item.get', params).then((items) => (items.length ? items[0].lastvalue : null));
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform history query from Zabbix API
|
||||
*
|
||||
* @param {Array} items Array of Zabbix item objects
|
||||
* @param {Number} timeFrom Time in seconds
|
||||
* @param {Number} timeTill Time in seconds
|
||||
* @return {Array} Array of Zabbix history objects
|
||||
*/
|
||||
getHistory(items, timeFrom, timeTill) {
|
||||
// Group items by value type and perform request for each value type
|
||||
const grouped_items = _.groupBy(items, 'value_type');
|
||||
const promises = _.map(grouped_items, (items, value_type) => {
|
||||
const itemids = _.map(items, 'itemid');
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
history: value_type,
|
||||
itemids: itemids,
|
||||
sortfield: 'clock',
|
||||
sortorder: 'ASC',
|
||||
time_from: timeFrom,
|
||||
};
|
||||
|
||||
// Relative queries (e.g. last hour) don't include an end time
|
||||
if (timeTill) {
|
||||
params.time_till = timeTill;
|
||||
}
|
||||
|
||||
return this.request('history.get', params);
|
||||
});
|
||||
|
||||
return Promise.all(promises).then(_.flatten);
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform trends query from Zabbix API
|
||||
* Use trends api extension from ZBXNEXT-1193 patch.
|
||||
*
|
||||
* @param {Array} items Array of Zabbix item objects
|
||||
* @param {Number} time_from Time in seconds
|
||||
* @param {Number} time_till Time in seconds
|
||||
* @return {Array} Array of Zabbix trend objects
|
||||
*/
|
||||
getTrend_ZBXNEXT1193(items, timeFrom, timeTill) {
|
||||
// Group items by value type and perform request for each value type
|
||||
const grouped_items = _.groupBy(items, 'value_type');
|
||||
const promises = _.map(grouped_items, (items, value_type) => {
|
||||
const itemids = _.map(items, 'itemid');
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
trend: value_type,
|
||||
itemids: itemids,
|
||||
sortfield: 'clock',
|
||||
sortorder: 'ASC',
|
||||
time_from: timeFrom,
|
||||
};
|
||||
|
||||
// Relative queries (e.g. last hour) don't include an end time
|
||||
if (timeTill) {
|
||||
params.time_till = timeTill;
|
||||
}
|
||||
|
||||
return this.request('trend.get', params);
|
||||
});
|
||||
|
||||
return Promise.all(promises).then(_.flatten);
|
||||
}
|
||||
|
||||
getTrend_30(items, time_from, time_till, value_type) {
|
||||
const self = this;
|
||||
const itemids = _.map(items, 'itemid');
|
||||
|
||||
const params: any = {
|
||||
output: ['itemid', 'clock', value_type],
|
||||
itemids: itemids,
|
||||
time_from: time_from,
|
||||
};
|
||||
|
||||
// Relative queries (e.g. last hour) don't include an end time
|
||||
if (time_till) {
|
||||
params.time_till = time_till;
|
||||
}
|
||||
|
||||
return self.request('trend.get', params);
|
||||
}
|
||||
|
||||
getITService(serviceids?) {
|
||||
const params = {
|
||||
output: 'extend',
|
||||
serviceids: serviceids,
|
||||
};
|
||||
return this.request('service.get', params);
|
||||
}
|
||||
|
||||
getSLA(serviceids, timeRange, options) {
|
||||
const [timeFrom, timeTo] = timeRange;
|
||||
let intervals = [{ from: timeFrom, to: timeTo }];
|
||||
if (options.slaInterval === 'auto') {
|
||||
const interval = getSLAInterval(options.intervalMs);
|
||||
intervals = buildSLAIntervals(timeRange, interval);
|
||||
} else if (options.slaInterval !== 'none') {
|
||||
const interval = utils.parseInterval(options.slaInterval) / 1000;
|
||||
intervals = buildSLAIntervals(timeRange, interval);
|
||||
}
|
||||
|
||||
const params: any = {
|
||||
serviceids,
|
||||
intervals,
|
||||
};
|
||||
|
||||
return this.request('service.getsla', params);
|
||||
}
|
||||
|
||||
async getSLA60(serviceids, timeRange, options) {
|
||||
const [timeFrom, timeTo] = timeRange;
|
||||
let intervals = [{ from: timeFrom, to: timeTo }];
|
||||
if (options.slaInterval === 'auto') {
|
||||
const interval = getSLAInterval(options.intervalMs);
|
||||
intervals = buildSLAIntervals(timeRange, interval);
|
||||
} else if (options.slaInterval !== 'none') {
|
||||
const interval = utils.parseInterval(options.slaInterval) / 1000;
|
||||
intervals = buildSLAIntervals(timeRange, interval);
|
||||
}
|
||||
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
serviceids,
|
||||
};
|
||||
|
||||
const slaObjects = await this.request('sla.get', params);
|
||||
if (slaObjects.length === 0) {
|
||||
return {};
|
||||
}
|
||||
const sla = slaObjects[0];
|
||||
|
||||
// const periods = intervals.map(interval => ({
|
||||
// period_from: interval.from,
|
||||
// period_to: interval.to,
|
||||
// }));
|
||||
const sliParams: any = {
|
||||
slaid: sla.slaid,
|
||||
serviceids,
|
||||
period_from: timeFrom,
|
||||
period_to: timeTo,
|
||||
periods: Math.min(intervals.length, 100),
|
||||
};
|
||||
|
||||
const sliResponse = await this.request('sla.getsli', sliParams);
|
||||
if (sliResponse.length === 0) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const slaLikeResponse: any = {};
|
||||
sliResponse.serviceids.forEach((serviceid) => {
|
||||
slaLikeResponse[serviceid] = {
|
||||
sla: [],
|
||||
};
|
||||
});
|
||||
sliResponse.sli.forEach((sliItem, i) => {
|
||||
sliItem.forEach((sli, j) => {
|
||||
slaLikeResponse[sliResponse.serviceids[j]].sla.push({
|
||||
downtimeTime: sli.downtime,
|
||||
okTime: sli.uptime,
|
||||
sla: sli.sli,
|
||||
from: sliResponse.periods[i].period_from,
|
||||
to: sliResponse.periods[i].period_to,
|
||||
});
|
||||
});
|
||||
});
|
||||
return slaLikeResponse;
|
||||
}
|
||||
|
||||
getProblems(groupids, hostids, applicationids, options): Promise<ZBXProblem[]> {
|
||||
const { timeFrom, timeTo, recent, severities, limit, acknowledged, tags } = options;
|
||||
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
selectAcknowledges: 'extend',
|
||||
selectSuppressionData: 'extend',
|
||||
selectTags: 'extend',
|
||||
source: '0',
|
||||
object: '0',
|
||||
sortfield: ['eventid'],
|
||||
sortorder: 'DESC',
|
||||
evaltype: '0',
|
||||
// preservekeys: '1',
|
||||
groupids,
|
||||
hostids,
|
||||
applicationids,
|
||||
recent,
|
||||
};
|
||||
|
||||
if (severities) {
|
||||
params.severities = severities;
|
||||
}
|
||||
|
||||
if (acknowledged !== undefined) {
|
||||
params.acknowledged = acknowledged;
|
||||
}
|
||||
|
||||
if (tags) {
|
||||
params.tags = tags;
|
||||
}
|
||||
|
||||
if (limit) {
|
||||
params.limit = limit;
|
||||
}
|
||||
|
||||
if (timeFrom || timeTo) {
|
||||
params.time_from = timeFrom;
|
||||
params.time_till = timeTo;
|
||||
}
|
||||
|
||||
return this.request('problem.get', params).then(utils.mustArray);
|
||||
}
|
||||
|
||||
getTriggersByIds(triggerids: string[]) {
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
triggerids: triggerids,
|
||||
expandDescription: true,
|
||||
expandData: true,
|
||||
expandComment: true,
|
||||
monitored: true,
|
||||
skipDependent: true,
|
||||
selectGroups: ['name', 'groupid'],
|
||||
selectHosts: ['hostid', 'name', 'host', 'maintenance_status', 'proxy_hostid'],
|
||||
selectItems: ['itemid', 'name', 'key_', 'lastvalue'],
|
||||
// selectLastEvent: 'extend',
|
||||
// selectTags: 'extend',
|
||||
preservekeys: '1',
|
||||
};
|
||||
|
||||
return this.request('trigger.get', params).then(utils.mustArray);
|
||||
}
|
||||
|
||||
getTriggers(groupids, hostids, applicationids, options) {
|
||||
const { showTriggers, maintenance, timeFrom, timeTo } = options;
|
||||
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
groupids: groupids,
|
||||
hostids: hostids,
|
||||
applicationids: applicationids,
|
||||
expandDescription: true,
|
||||
expandData: true,
|
||||
expandComment: true,
|
||||
monitored: true,
|
||||
skipDependent: true,
|
||||
//only_true: true,
|
||||
filter: {
|
||||
value: 1,
|
||||
},
|
||||
selectGroups: ['groupid', 'name'],
|
||||
selectHosts: ['hostid', 'name', 'host', 'maintenance_status', 'proxy_hostid'],
|
||||
selectItems: ['itemid', 'name', 'key_', 'lastvalue'],
|
||||
selectLastEvent: 'extend',
|
||||
selectTags: 'extend',
|
||||
};
|
||||
|
||||
if (showTriggers === ShowProblemTypes.Problems) {
|
||||
params.filter.value = 1;
|
||||
} else if (showTriggers === ShowProblemTypes.Recent || showTriggers === ShowProblemTypes.History) {
|
||||
params.filter.value = [0, 1];
|
||||
}
|
||||
|
||||
if (maintenance) {
|
||||
params.maintenance = true;
|
||||
}
|
||||
|
||||
if (timeFrom || timeTo) {
|
||||
params.lastChangeSince = timeFrom;
|
||||
params.lastChangeTill = timeTo;
|
||||
}
|
||||
|
||||
return this.request('trigger.get', params);
|
||||
}
|
||||
|
||||
getEvents(objectids, timeFrom, timeTo, showEvents, limit) {
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
time_from: timeFrom,
|
||||
time_till: timeTo,
|
||||
objectids: objectids,
|
||||
select_acknowledges: 'extend',
|
||||
selectHosts: 'extend',
|
||||
value: showEvents,
|
||||
};
|
||||
|
||||
if (limit) {
|
||||
params.limit = limit;
|
||||
params.sortfield = 'clock';
|
||||
params.sortorder = 'DESC';
|
||||
}
|
||||
|
||||
return this.request('event.get', params).then(utils.mustArray);
|
||||
}
|
||||
|
||||
getEventsHistory(groupids, hostids, applicationids, options) {
|
||||
const { timeFrom, timeTo, severities, limit, value } = options;
|
||||
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
time_from: timeFrom,
|
||||
time_till: timeTo,
|
||||
value: '1',
|
||||
source: '0',
|
||||
object: '0',
|
||||
evaltype: '0',
|
||||
sortfield: ['eventid'],
|
||||
sortorder: 'DESC',
|
||||
select_acknowledges: 'extend',
|
||||
selectTags: 'extend',
|
||||
selectSuppressionData: ['maintenanceid', 'suppress_until'],
|
||||
groupids,
|
||||
hostids,
|
||||
applicationids,
|
||||
};
|
||||
|
||||
if (limit) {
|
||||
params.limit = limit;
|
||||
}
|
||||
|
||||
if (severities) {
|
||||
params.severities = severities;
|
||||
}
|
||||
|
||||
if (value) {
|
||||
params.value = value;
|
||||
}
|
||||
|
||||
return this.request('event.get', params).then(utils.mustArray);
|
||||
}
|
||||
|
||||
getExtendedEventData(eventids) {
|
||||
const params = {
|
||||
output: 'extend',
|
||||
eventids: eventids,
|
||||
preservekeys: true,
|
||||
select_acknowledges: 'extend',
|
||||
selectTags: 'extend',
|
||||
sortfield: 'clock',
|
||||
sortorder: 'DESC',
|
||||
};
|
||||
|
||||
return this.request('event.get', params);
|
||||
}
|
||||
|
||||
getEventAlerts(eventids) {
|
||||
const params = {
|
||||
eventids: eventids,
|
||||
output: ['alertid', 'eventid', 'message', 'clock', 'error'],
|
||||
selectUsers: true,
|
||||
};
|
||||
|
||||
return this.request('alert.get', params);
|
||||
}
|
||||
|
||||
getAcknowledges(eventids) {
|
||||
const params = {
|
||||
output: 'extend',
|
||||
eventids: eventids,
|
||||
preservekeys: true,
|
||||
select_acknowledges: 'extend',
|
||||
sortfield: 'clock',
|
||||
sortorder: 'DESC',
|
||||
};
|
||||
|
||||
return this.request('event.get', params).then((events) => {
|
||||
return _.filter(events, (event) => event.acknowledges.length);
|
||||
});
|
||||
}
|
||||
|
||||
getAlerts(itemids, timeFrom, timeTo) {
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
itemids: itemids,
|
||||
expandDescription: true,
|
||||
expandData: true,
|
||||
expandComment: true,
|
||||
monitored: true,
|
||||
skipDependent: true,
|
||||
//only_true: true,
|
||||
// filter: {
|
||||
// value: 1
|
||||
// },
|
||||
selectLastEvent: 'extend',
|
||||
};
|
||||
|
||||
if (timeFrom || timeTo) {
|
||||
params.lastChangeSince = timeFrom;
|
||||
params.lastChangeTill = timeTo;
|
||||
}
|
||||
|
||||
return this.request('trigger.get', params);
|
||||
}
|
||||
|
||||
getHostAlerts(hostids, applicationids, options) {
|
||||
const { minSeverity, acknowledged, count, timeFrom, timeTo } = options;
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
hostids: hostids,
|
||||
min_severity: minSeverity,
|
||||
filter: { value: 1 },
|
||||
expandDescription: true,
|
||||
expandData: true,
|
||||
expandComment: true,
|
||||
monitored: true,
|
||||
skipDependent: true,
|
||||
selectLastEvent: 'extend',
|
||||
selectGroups: 'extend',
|
||||
selectHosts: ['hostid', 'host', 'name'],
|
||||
};
|
||||
|
||||
if (count && acknowledged !== 0 && acknowledged !== 1) {
|
||||
params.countOutput = true;
|
||||
}
|
||||
|
||||
if (applicationids && applicationids.length) {
|
||||
params.applicationids = applicationids;
|
||||
}
|
||||
|
||||
if (timeFrom || timeTo) {
|
||||
params.lastChangeSince = timeFrom;
|
||||
params.lastChangeTill = timeTo;
|
||||
}
|
||||
|
||||
return this.request('trigger.get', params).then((triggers) => {
|
||||
if (!count || acknowledged === 0 || acknowledged === 1) {
|
||||
triggers = filterTriggersByAcknowledge(triggers, acknowledged);
|
||||
if (count) {
|
||||
triggers = triggers.length;
|
||||
}
|
||||
}
|
||||
return triggers;
|
||||
});
|
||||
}
|
||||
|
||||
getProxies() {
|
||||
const params = {
|
||||
output: ['proxyid', 'host'],
|
||||
};
|
||||
|
||||
return this.request('proxy.get', params);
|
||||
}
|
||||
|
||||
getScripts(hostids: string[], options?: any): Promise<ZBXScript[]> {
|
||||
const params: any = {
|
||||
output: 'extend',
|
||||
hostids,
|
||||
};
|
||||
|
||||
return this.request('script.get', params).then(utils.mustArray);
|
||||
}
|
||||
|
||||
executeScript(hostid: string, scriptid: string): Promise<APIExecuteScriptResponse> {
|
||||
const params: any = {
|
||||
hostid,
|
||||
scriptid,
|
||||
};
|
||||
|
||||
return this.request('script.execute', params);
|
||||
}
|
||||
|
||||
getValueMappings() {
|
||||
const params = {
|
||||
output: 'extend',
|
||||
selectMappings: 'extend',
|
||||
};
|
||||
|
||||
return this.request('valuemap.get', params);
|
||||
}
|
||||
}
|
||||
|
||||
function filterTriggersByAcknowledge(triggers, acknowledged) {
|
||||
if (acknowledged === 0) {
|
||||
return _.filter(triggers, (trigger) => trigger.lastEvent.acknowledged === '0');
|
||||
} else if (acknowledged === 1) {
|
||||
return _.filter(triggers, (trigger) => trigger.lastEvent.acknowledged === '1');
|
||||
} else {
|
||||
return triggers;
|
||||
}
|
||||
}
|
||||
|
||||
function getSLAInterval(intervalMs) {
|
||||
// Too many intervals may cause significant load on the database, so decrease number of resulting points
|
||||
const resolutionRatio = 100;
|
||||
const interval = roundInterval(intervalMs * resolutionRatio) / 1000;
|
||||
return Math.max(interval, MIN_SLA_INTERVAL);
|
||||
}
|
||||
|
||||
function buildSLAIntervals(timeRange, interval) {
|
||||
let [timeFrom, timeTo] = timeRange;
|
||||
const intervals = [];
|
||||
|
||||
// Align time range with calculated interval
|
||||
timeFrom = Math.floor(timeFrom / interval) * interval;
|
||||
timeTo = Math.ceil(timeTo / interval) * interval;
|
||||
|
||||
for (let i = timeFrom; i <= timeTo - interval; i += interval) {
|
||||
intervals.push({
|
||||
from: i,
|
||||
to: i + interval,
|
||||
});
|
||||
}
|
||||
|
||||
return intervals;
|
||||
}
|
||||
|
||||
// Define zabbix API exception type
|
||||
export class ZabbixAPIError {
|
||||
code: number;
|
||||
name: string;
|
||||
data: string;
|
||||
message: string;
|
||||
|
||||
constructor(error: JSONRPCError) {
|
||||
this.code = error.code || null;
|
||||
this.name = error.message || '';
|
||||
this.data = error.data || '';
|
||||
this.message = 'Zabbix API Error: ' + this.name + ' ' + this.data;
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.name + ' ' + this.data;
|
||||
}
|
||||
}
|
||||
123
src/datasource/zabbix/proxy/cachingProxy.ts
Normal file
123
src/datasource/zabbix/proxy/cachingProxy.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
/**
|
||||
* This module allows to deduplicate function calls with the same params and
|
||||
* cache result of function call.
|
||||
*/
|
||||
|
||||
export class CachingProxy {
|
||||
cacheEnabled: boolean;
|
||||
ttl: number;
|
||||
cache: any;
|
||||
promises: any;
|
||||
|
||||
constructor(cacheOptions) {
|
||||
this.cacheEnabled = cacheOptions.enabled;
|
||||
this.ttl = cacheOptions.ttl || 600000; // 10 minutes by default
|
||||
|
||||
// Internal objects for data storing
|
||||
this.cache = {};
|
||||
this.promises = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that result is present in the cache and is up to date or send request otherwise.
|
||||
*/
|
||||
cacheRequest(func, funcName, funcScope) {
|
||||
return cacheRequest(func, funcName, funcScope, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap request to prevent multiple calls with same params when request is waiting for response.
|
||||
*/
|
||||
proxify(func, funcName, funcScope) {
|
||||
if (!this.promises[funcName]) {
|
||||
this.promises[funcName] = {};
|
||||
}
|
||||
const promiseKeeper = this.promises[funcName];
|
||||
return callOnce(func, promiseKeeper, funcScope);
|
||||
}
|
||||
|
||||
proxifyWithCache(func, funcName, funcScope) {
|
||||
const proxified = this.proxify(func, funcName, funcScope);
|
||||
return this.cacheRequest(proxified, funcName, funcScope);
|
||||
}
|
||||
|
||||
_isExpired(cacheObject) {
|
||||
if (cacheObject) {
|
||||
const object_age = Date.now() - cacheObject.timestamp;
|
||||
return !(cacheObject.timestamp && object_age < this.ttl);
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap request to prevent multiple calls
|
||||
* with same params when waiting for result.
|
||||
*/
|
||||
function callOnce(func, promiseKeeper, funcScope) {
|
||||
// tslint:disable-next-line: only-arrow-functions
|
||||
return function () {
|
||||
const hash = getRequestHash(arguments);
|
||||
if (!promiseKeeper[hash]) {
|
||||
promiseKeeper[hash] = Promise.resolve(
|
||||
func
|
||||
.apply(funcScope, arguments)
|
||||
.then((result) => {
|
||||
promiseKeeper[hash] = null;
|
||||
return result;
|
||||
})
|
||||
.catch((err) => {
|
||||
promiseKeeper[hash] = null;
|
||||
throw err;
|
||||
})
|
||||
);
|
||||
}
|
||||
return promiseKeeper[hash];
|
||||
};
|
||||
}
|
||||
|
||||
function cacheRequest(func, funcName, funcScope, self) {
|
||||
// tslint:disable-next-line: only-arrow-functions
|
||||
return function () {
|
||||
if (!self.cache[funcName]) {
|
||||
self.cache[funcName] = {};
|
||||
}
|
||||
|
||||
const cacheObject = self.cache[funcName];
|
||||
const hash = getRequestHash(arguments);
|
||||
if (self.cacheEnabled && !self._isExpired(cacheObject[hash])) {
|
||||
return Promise.resolve(cacheObject[hash].value);
|
||||
} else {
|
||||
return func.apply(funcScope, arguments).then((result) => {
|
||||
if (result !== undefined) {
|
||||
cacheObject[hash] = {
|
||||
value: result,
|
||||
timestamp: Date.now(),
|
||||
};
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function getRequestHash(args) {
|
||||
const argsJson = JSON.stringify(args);
|
||||
return getHash(argsJson);
|
||||
}
|
||||
|
||||
function getHash(str: string): number {
|
||||
let hash = 0,
|
||||
i,
|
||||
chr,
|
||||
len;
|
||||
if (str.length !== 0) {
|
||||
for (i = 0, len = str.length; i < len; i++) {
|
||||
chr = str.charCodeAt(i);
|
||||
hash = (hash << 5) - hash + chr;
|
||||
hash |= 0; // Convert to 32bit integer
|
||||
}
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
24
src/datasource/zabbix/types.ts
Normal file
24
src/datasource/zabbix/types.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
export interface ZabbixConnector {
|
||||
getHistory: (items, timeFrom, timeTill) => Promise<any>;
|
||||
getTrend: (items, timeFrom, timeTill) => Promise<any>;
|
||||
getItemsByIDs: (itemids) => Promise<any>;
|
||||
getEvents: (objectids, timeFrom, timeTo, showEvents, limit?) => Promise<any>;
|
||||
getAlerts: (itemids, timeFrom?, timeTo?) => Promise<any>;
|
||||
getHostAlerts: (hostids, applicationids, options?) => Promise<any>;
|
||||
getAcknowledges: (eventids) => Promise<any>;
|
||||
getITService: (serviceids?) => Promise<any>;
|
||||
acknowledgeEvent: (eventid, message) => Promise<any>;
|
||||
getProxies: () => Promise<any>;
|
||||
getEventAlerts: (eventids) => Promise<any>;
|
||||
getExtendedEventData: (eventids) => Promise<any>;
|
||||
getMacros: (hostids: any[]) => Promise<any>;
|
||||
getVersion: () => Promise<string>;
|
||||
|
||||
getGroups: (groupFilter?) => any;
|
||||
getHosts: (groupFilter?, hostFilter?) => any;
|
||||
getApps: (groupFilter?, hostFilter?, appFilter?) => any;
|
||||
getItems: (groupFilter?, hostFilter?, appFilter?, itemTagFilter?, itemFilter?, options?) => any;
|
||||
getSLA: (itservices, timeRange, target, options?) => any;
|
||||
|
||||
supportsApplications: () => boolean;
|
||||
}
|
||||
108
src/datasource/zabbix/zabbix.test.js
Normal file
108
src/datasource/zabbix/zabbix.test.js
Normal file
@@ -0,0 +1,108 @@
|
||||
import { Zabbix } from './zabbix';
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
getBackendSrv: () => ({
|
||||
datasourceRequest: jest.fn().mockResolvedValue({data: {result: ''}}),
|
||||
}),
|
||||
getBackendSrv: () => ({
|
||||
datasourceRequest: jest.fn().mockResolvedValue({ data: { result: '' } }),
|
||||
fetch: () => ({
|
||||
toPromise: () => jest.fn().mockResolvedValue({ data: { result: '' } })
|
||||
}),
|
||||
}),
|
||||
}), {virtual: true});
|
||||
|
||||
describe('Zabbix', () => {
|
||||
let ctx = {};
|
||||
let zabbix;
|
||||
let options = {
|
||||
url: 'http://localhost',
|
||||
username: 'zabbix',
|
||||
password: 'zabbix',
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.options = options;
|
||||
// ctx.backendSrv = mocks.backendSrvMock;
|
||||
// ctx.datasourceSrv = mocks.datasourceSrvMock;
|
||||
zabbix = new Zabbix(ctx.options);
|
||||
});
|
||||
|
||||
describe('When querying proxies', () => {
|
||||
beforeEach(() => {
|
||||
zabbix.zabbixAPI.getProxies = jest.fn().mockResolvedValue([
|
||||
{ host: 'proxy-foo', proxyid: '10101' },
|
||||
{ host: 'proxy-bar', proxyid: '10102' },
|
||||
]);
|
||||
});
|
||||
|
||||
it("should return all proxies if filter set to /.*/", done => {
|
||||
zabbix.getFilteredProxies('/.*/').then(proxies => {
|
||||
expect(proxies).toMatchObject([{ host: 'proxy-foo' }, { host: 'proxy-bar' }]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should return matched proxies if regex filter used", done => {
|
||||
zabbix.getFilteredProxies('/.*-foo/').then(proxies => {
|
||||
expect(proxies).toMatchObject([{ host: 'proxy-foo' }]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should return matched proxies if simple filter used", done => {
|
||||
zabbix.getFilteredProxies('proxy-bar').then(proxies => {
|
||||
expect(proxies).toMatchObject([{ host: 'proxy-bar' }]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should return empty list for empty filter", done => {
|
||||
zabbix.getFilteredProxies('').then(proxies => {
|
||||
expect(proxies).toEqual([]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('When filtering triggers by proxy', () => {
|
||||
beforeEach(() => {
|
||||
zabbix.zabbixAPI.getProxies = jest.fn().mockResolvedValue([
|
||||
{ host: 'proxy-foo', proxyid: '10101' },
|
||||
{ host: 'proxy-bar', proxyid: '10102' },
|
||||
]);
|
||||
ctx.triggers = [
|
||||
{ triggerid: '1', hosts: [{ name: 'backend01', proxy_hostid: '0' }] },
|
||||
{ triggerid: '2', hosts: [{ name: 'backend02', proxy_hostid: '0' }] },
|
||||
{ triggerid: '3', hosts: [{ name: 'frontend01', proxy_hostid: '10101' }] },
|
||||
{ triggerid: '4', hosts: [{ name: 'frontend02', proxy_hostid: '10101' }] },
|
||||
{ triggerid: '5', hosts: [{ name: 'db01', proxy_hostid: '10102' }] },
|
||||
{ triggerid: '6', hosts: [{ name: 'db02', proxy_hostid: '10102' }] },
|
||||
];
|
||||
});
|
||||
|
||||
it("should return all triggers for empty filter", done => {
|
||||
zabbix.filterTriggersByProxy(ctx.triggers, '').then(triggers => {
|
||||
const triggerids = triggers.map(t => t.triggerid);
|
||||
expect(triggerids).toEqual(['1', '2', '3', '4', '5', '6']);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should return triggers belonging proxy matched regex filter", done => {
|
||||
zabbix.filterTriggersByProxy(ctx.triggers, '/.*-foo/').then(triggers => {
|
||||
const triggerids = triggers.map(t => t.triggerid);
|
||||
expect(triggerids).toEqual(['3', '4']);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should return triggers belonging proxy matched name filter", done => {
|
||||
zabbix.filterTriggersByProxy(ctx.triggers, 'proxy-bar').then(triggers => {
|
||||
const triggerids = triggers.map(t => t.triggerid);
|
||||
expect(triggerids).toEqual(['5', '6']);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
645
src/datasource/zabbix/zabbix.ts
Normal file
645
src/datasource/zabbix/zabbix.ts
Normal file
@@ -0,0 +1,645 @@
|
||||
import _ from 'lodash';
|
||||
// eslint-disable-next-line
|
||||
import moment from 'moment';
|
||||
import semver from 'semver';
|
||||
import * as utils from '../utils';
|
||||
import responseHandler from '../responseHandler';
|
||||
import { CachingProxy } from './proxy/cachingProxy';
|
||||
import { DBConnector } from './connectors/dbConnector';
|
||||
import { ZabbixAPIConnector } from './connectors/zabbix_api/zabbixAPIConnector';
|
||||
import { SQLConnector } from './connectors/sql/sqlConnector';
|
||||
import { InfluxDBConnector } from './connectors/influxdb/influxdbConnector';
|
||||
import { ZabbixConnector } from './types';
|
||||
import { joinTriggersWithEvents, joinTriggersWithProblems } from '../problemsHandler';
|
||||
import { ProblemDTO, ZBXItem, ZBXItemTag } from '../types';
|
||||
|
||||
interface AppsResponse extends Array<any> {
|
||||
appFilterEmpty?: boolean;
|
||||
hostids?: any[];
|
||||
}
|
||||
|
||||
const REQUESTS_TO_PROXYFY = [
|
||||
'getHistory',
|
||||
'getTrend',
|
||||
'getGroups',
|
||||
'getHosts',
|
||||
'getApps',
|
||||
'getItems',
|
||||
'getMacros',
|
||||
'getItemsByIDs',
|
||||
'getEvents',
|
||||
'getAlerts',
|
||||
'getHostAlerts',
|
||||
'getAcknowledges',
|
||||
'getITService',
|
||||
'getSLA',
|
||||
'getProxies',
|
||||
'getEventAlerts',
|
||||
'getExtendedEventData',
|
||||
'getProblems',
|
||||
'getEventsHistory',
|
||||
'getTriggersByIds',
|
||||
'getScripts',
|
||||
'getValueMappings',
|
||||
];
|
||||
|
||||
const REQUESTS_TO_CACHE = [
|
||||
'getGroups',
|
||||
'getHosts',
|
||||
'getApps',
|
||||
'getItems',
|
||||
'getMacros',
|
||||
'getItemsByIDs',
|
||||
'getITService',
|
||||
'getProxies',
|
||||
'getValueMappings',
|
||||
];
|
||||
|
||||
const REQUESTS_TO_BIND = [
|
||||
'getHistory',
|
||||
'getTrend',
|
||||
'getMacros',
|
||||
'getItemsByIDs',
|
||||
'getEvents',
|
||||
'getAlerts',
|
||||
'getHostAlerts',
|
||||
'getAcknowledges',
|
||||
'getITService',
|
||||
'acknowledgeEvent',
|
||||
'getProxies',
|
||||
'getEventAlerts',
|
||||
'getExtendedEventData',
|
||||
'getScripts',
|
||||
'executeScript',
|
||||
'getValueMappings',
|
||||
];
|
||||
|
||||
export class Zabbix implements ZabbixConnector {
|
||||
enableDirectDBConnection: boolean;
|
||||
cachingProxy: CachingProxy;
|
||||
zabbixAPI: ZabbixAPIConnector;
|
||||
getHistoryDB: any;
|
||||
dbConnector: any;
|
||||
getTrendsDB: any;
|
||||
version: string;
|
||||
|
||||
getHistory: (items, timeFrom, timeTill) => Promise<any>;
|
||||
getTrend: (items, timeFrom, timeTill) => Promise<any>;
|
||||
getItemsByIDs: (itemids) => Promise<any>;
|
||||
getEvents: (objectids, timeFrom, timeTo, showEvents, limit?) => Promise<any>;
|
||||
getAlerts: (itemids, timeFrom?, timeTo?) => Promise<any>;
|
||||
getHostAlerts: (hostids, applicationids, options?) => Promise<any>;
|
||||
getAcknowledges: (eventids) => Promise<any>;
|
||||
getITService: (serviceids?) => Promise<any>;
|
||||
acknowledgeEvent: (eventid, message) => Promise<any>;
|
||||
getProxies: () => Promise<any>;
|
||||
getEventAlerts: (eventids) => Promise<any>;
|
||||
getExtendedEventData: (eventids) => Promise<any>;
|
||||
getMacros: (hostids: any[]) => Promise<any>;
|
||||
getValueMappings: () => Promise<any>;
|
||||
|
||||
constructor(options) {
|
||||
const {
|
||||
basicAuth,
|
||||
withCredentials,
|
||||
cacheTTL,
|
||||
enableDirectDBConnection,
|
||||
dbConnectionDatasourceId,
|
||||
dbConnectionDatasourceName,
|
||||
dbConnectionRetentionPolicy,
|
||||
datasourceId,
|
||||
} = options;
|
||||
|
||||
this.enableDirectDBConnection = enableDirectDBConnection;
|
||||
|
||||
// Initialize caching proxy for requests
|
||||
const cacheOptions = {
|
||||
enabled: true,
|
||||
ttl: cacheTTL,
|
||||
};
|
||||
this.cachingProxy = new CachingProxy(cacheOptions);
|
||||
|
||||
this.zabbixAPI = new ZabbixAPIConnector(basicAuth, withCredentials, datasourceId);
|
||||
|
||||
this.proxifyRequests();
|
||||
this.cacheRequests();
|
||||
this.bindRequests();
|
||||
|
||||
if (enableDirectDBConnection) {
|
||||
const connectorOptions: any = { dbConnectionRetentionPolicy };
|
||||
this.initDBConnector(dbConnectionDatasourceId, dbConnectionDatasourceName, connectorOptions).then(() => {
|
||||
this.getHistoryDB = this.cachingProxy.proxifyWithCache(
|
||||
this.dbConnector.getHistory,
|
||||
'getHistory',
|
||||
this.dbConnector
|
||||
);
|
||||
this.getTrendsDB = this.cachingProxy.proxifyWithCache(
|
||||
this.dbConnector.getTrends,
|
||||
'getTrends',
|
||||
this.dbConnector
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
initDBConnector(datasourceId, datasourceName, options) {
|
||||
return DBConnector.loadDatasource(datasourceId, datasourceName).then((ds) => {
|
||||
const connectorOptions: any = { datasourceId, datasourceName };
|
||||
if (ds.type === 'influxdb') {
|
||||
connectorOptions.retentionPolicy = options.dbConnectionRetentionPolicy;
|
||||
this.dbConnector = new InfluxDBConnector(connectorOptions);
|
||||
} else {
|
||||
this.dbConnector = new SQLConnector(connectorOptions);
|
||||
}
|
||||
return this.dbConnector;
|
||||
});
|
||||
}
|
||||
|
||||
proxifyRequests() {
|
||||
for (const request of REQUESTS_TO_PROXYFY) {
|
||||
this.zabbixAPI[request] = this.cachingProxy.proxify(this.zabbixAPI[request], request, this.zabbixAPI);
|
||||
}
|
||||
}
|
||||
|
||||
cacheRequests() {
|
||||
for (const request of REQUESTS_TO_CACHE) {
|
||||
this.zabbixAPI[request] = this.cachingProxy.cacheRequest(this.zabbixAPI[request], request, this.zabbixAPI);
|
||||
}
|
||||
}
|
||||
|
||||
bindRequests() {
|
||||
for (const request of REQUESTS_TO_BIND) {
|
||||
this[request] = this.zabbixAPI[request].bind(this.zabbixAPI);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform test query for Zabbix API and external history DB.
|
||||
* @return {object} test result object:
|
||||
* ```
|
||||
* {
|
||||
* zabbixVersion,
|
||||
* dbConnectorStatus: {
|
||||
* dsType,
|
||||
* dsName
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
testDataSource() {
|
||||
let zabbixVersion;
|
||||
let dbConnectorStatus;
|
||||
return this.getVersion()
|
||||
.then((version) => {
|
||||
zabbixVersion = version;
|
||||
return this.getAllGroups();
|
||||
})
|
||||
.then(() => {
|
||||
if (this.enableDirectDBConnection) {
|
||||
return this.dbConnector.testDataSource();
|
||||
} else {
|
||||
return Promise.resolve();
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
return Promise.reject(error);
|
||||
})
|
||||
.then((testResult) => {
|
||||
if (testResult) {
|
||||
dbConnectorStatus = {
|
||||
dsType: this.dbConnector.datasourceTypeName,
|
||||
dsName: this.dbConnector.datasourceName,
|
||||
};
|
||||
}
|
||||
return { zabbixVersion, dbConnectorStatus };
|
||||
});
|
||||
}
|
||||
|
||||
async getVersion() {
|
||||
if (!this.version) {
|
||||
if (this.zabbixAPI.version) {
|
||||
this.version = this.zabbixAPI.version;
|
||||
} else {
|
||||
this.version = await this.zabbixAPI.initVersion();
|
||||
}
|
||||
}
|
||||
return this.version;
|
||||
}
|
||||
|
||||
supportsApplications() {
|
||||
const version = this.version || this.zabbixAPI.version;
|
||||
return version ? semver.lt(version, '5.4.0') : true;
|
||||
}
|
||||
|
||||
supportSLA() {
|
||||
const version = this.version || this.zabbixAPI.version;
|
||||
return version ? semver.gte(version, '6.0.0') : true;
|
||||
}
|
||||
|
||||
isZabbix54OrHigher() {
|
||||
const version = this.version || this.zabbixAPI.version;
|
||||
return version ? semver.gte(version, '5.4.0') : false;
|
||||
}
|
||||
|
||||
getItemsFromTarget(target, options) {
|
||||
const parts = ['group', 'host', 'application', 'itemTag', 'item'];
|
||||
const filters = _.map(parts, (p) => target[p].filter);
|
||||
return this.getItems(...filters, options);
|
||||
}
|
||||
|
||||
getHostsFromTarget(target) {
|
||||
const parts = ['group', 'host', 'application'];
|
||||
const filters = _.map(parts, (p) => target[p].filter);
|
||||
return Promise.all([this.getHosts(...filters), this.getApps(...filters)]).then((results) => {
|
||||
const hosts = results[0];
|
||||
let apps: AppsResponse = results[1];
|
||||
if (apps.appFilterEmpty) {
|
||||
apps = [];
|
||||
}
|
||||
return [hosts, apps];
|
||||
});
|
||||
}
|
||||
|
||||
getAllGroups() {
|
||||
return this.zabbixAPI.getGroups();
|
||||
}
|
||||
|
||||
getGroups(groupFilter) {
|
||||
return this.getAllGroups().then((groups) => findByFilter(groups, groupFilter));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of host belonging to given groups.
|
||||
*/
|
||||
getAllHosts(groupFilter) {
|
||||
return this.getGroups(groupFilter).then((groups) => {
|
||||
const groupids = _.map(groups, 'groupid');
|
||||
return this.zabbixAPI.getHosts(groupids);
|
||||
});
|
||||
}
|
||||
|
||||
getHosts(groupFilter?, hostFilter?) {
|
||||
return this.getAllHosts(groupFilter).then((hosts) => findByFilter(hosts, hostFilter));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of applications belonging to given groups and hosts.
|
||||
*/
|
||||
async getAllApps(groupFilter, hostFilter) {
|
||||
await this.getVersion();
|
||||
if (!this.supportsApplications()) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return this.getHosts(groupFilter, hostFilter).then((hosts) => {
|
||||
const hostids = _.map(hosts, 'hostid');
|
||||
return this.zabbixAPI.getApps(hostids);
|
||||
});
|
||||
}
|
||||
|
||||
async getApps(groupFilter?, hostFilter?, appFilter?): Promise<AppsResponse> {
|
||||
await this.getVersion();
|
||||
const skipAppFilter = !this.supportsApplications();
|
||||
|
||||
return this.getHosts(groupFilter, hostFilter).then((hosts) => {
|
||||
const hostids = _.map(hosts, 'hostid');
|
||||
if (appFilter && !skipAppFilter) {
|
||||
return this.zabbixAPI.getApps(hostids).then((apps) => filterByQuery(apps, appFilter));
|
||||
} else {
|
||||
const appsResponse: AppsResponse = hostids;
|
||||
appsResponse.hostids = hostids;
|
||||
appsResponse.appFilterEmpty = true;
|
||||
return Promise.resolve(appsResponse);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async getItemTags(groupFilter?, hostFilter?, itemTagFilter?) {
|
||||
const items = await this.getAllItems(groupFilter, hostFilter, null, null, {});
|
||||
let tags: ZBXItemTag[] = _.flatten(
|
||||
items.map((item: ZBXItem) => {
|
||||
if (item.tags) {
|
||||
return item.tags;
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
})
|
||||
);
|
||||
tags = _.uniqBy(tags, (t) => t.tag + t.value || '');
|
||||
const tagsStr = tags.map((t) => ({ name: utils.itemTagToString(t) }));
|
||||
return findByFilter(tagsStr, itemTagFilter);
|
||||
}
|
||||
|
||||
async getAllItems(groupFilter, hostFilter, appFilter, itemTagFilter, options: any = {}) {
|
||||
const apps = await this.getApps(groupFilter, hostFilter, appFilter);
|
||||
let items: any[];
|
||||
|
||||
if (this.isZabbix54OrHigher()) {
|
||||
items = await this.zabbixAPI.getItems(apps.hostids, undefined, options.itemtype);
|
||||
if (itemTagFilter) {
|
||||
items = filterItemsByTag(items, itemTagFilter);
|
||||
}
|
||||
} else {
|
||||
if (apps.appFilterEmpty) {
|
||||
items = await this.zabbixAPI.getItems(apps.hostids, undefined, options.itemtype);
|
||||
} else {
|
||||
const appids = _.map(apps, 'applicationid');
|
||||
items = await this.zabbixAPI.getItems(undefined, appids, options.itemtype);
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.showDisabledItems) {
|
||||
items = _.filter(items, { status: '0' });
|
||||
}
|
||||
|
||||
return await this.expandUserMacro(items, false);
|
||||
}
|
||||
|
||||
expandUserMacro(items, isTriggerItem) {
|
||||
const hostids = getHostIds(items);
|
||||
return this.getMacros(hostids).then((macros) => {
|
||||
_.forEach(items, (item) => {
|
||||
if (utils.containsMacro(isTriggerItem ? item.url : item.name)) {
|
||||
if (isTriggerItem) {
|
||||
item.url = utils.replaceMacro(item, macros, isTriggerItem);
|
||||
} else {
|
||||
item.name = utils.replaceMacro(item, macros);
|
||||
}
|
||||
}
|
||||
});
|
||||
return items;
|
||||
});
|
||||
}
|
||||
|
||||
getItems(groupFilter?, hostFilter?, appFilter?, itemTagFilter?, itemFilter?, options = {}) {
|
||||
return this.getAllItems(groupFilter, hostFilter, appFilter, itemTagFilter, options).then((items) =>
|
||||
filterByQuery(items, itemFilter)
|
||||
);
|
||||
}
|
||||
|
||||
getItemValues(groupFilter?, hostFilter?, appFilter?, itemFilter?, options: any = {}) {
|
||||
return this.getItems(groupFilter, hostFilter, appFilter, null, itemFilter, options).then((items) => {
|
||||
let timeRange = [moment().subtract(2, 'h').unix(), moment().unix()];
|
||||
if (options.range) {
|
||||
timeRange = [options.range.from.unix(), options.range.to.unix()];
|
||||
}
|
||||
const [timeFrom, timeTo] = timeRange;
|
||||
|
||||
return this.zabbixAPI.getHistory(items, timeFrom, timeTo).then((history) => {
|
||||
if (history) {
|
||||
const values = _.uniq(history.map((v) => v.value));
|
||||
return values.map((value) => ({ name: value }));
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
getITServices(itServiceFilter) {
|
||||
return this.zabbixAPI.getITService().then((itServices) => findByFilter(itServices, itServiceFilter));
|
||||
}
|
||||
|
||||
getProblems(groupFilter, hostFilter, appFilter, proxyFilter?, options?): Promise<ProblemDTO[]> {
|
||||
const promises = [
|
||||
this.getGroups(groupFilter),
|
||||
this.getHosts(groupFilter, hostFilter),
|
||||
this.getApps(groupFilter, hostFilter, appFilter),
|
||||
];
|
||||
|
||||
return Promise.all(promises)
|
||||
.then((results) => {
|
||||
const [filteredGroups, filteredHosts, filteredApps] = results;
|
||||
const query: any = {};
|
||||
|
||||
if (appFilter) {
|
||||
query.applicationids = _.flatten(_.map(filteredApps, 'applicationid'));
|
||||
}
|
||||
if (hostFilter && hostFilter !== '/.*/') {
|
||||
query.hostids = _.map(filteredHosts, 'hostid');
|
||||
}
|
||||
if (groupFilter) {
|
||||
query.groupids = _.map(filteredGroups, 'groupid');
|
||||
}
|
||||
|
||||
return query;
|
||||
})
|
||||
.then((query) => this.zabbixAPI.getProblems(query.groupids, query.hostids, query.applicationids, options))
|
||||
.then((problems) => {
|
||||
const triggerids = problems?.map((problem) => problem.objectid);
|
||||
return Promise.all([Promise.resolve(problems), this.zabbixAPI.getTriggersByIds(triggerids)]);
|
||||
})
|
||||
.then(([problems, triggers]) => joinTriggersWithProblems(problems, triggers))
|
||||
.then((triggers) => this.filterTriggersByProxy(triggers, proxyFilter));
|
||||
// .then(triggers => this.expandUserMacro.bind(this)(triggers, true));
|
||||
}
|
||||
|
||||
getProblemsHistory(groupFilter, hostFilter, appFilter, proxyFilter?, options?): Promise<ProblemDTO[]> {
|
||||
const { valueFromEvent } = options;
|
||||
|
||||
const promises = [
|
||||
this.getGroups(groupFilter),
|
||||
this.getHosts(groupFilter, hostFilter),
|
||||
this.getApps(groupFilter, hostFilter, appFilter),
|
||||
];
|
||||
|
||||
return Promise.all(promises)
|
||||
.then((results) => {
|
||||
const [filteredGroups, filteredHosts, filteredApps] = results;
|
||||
const query: any = {};
|
||||
|
||||
if (appFilter) {
|
||||
query.applicationids = _.flatten(_.map(filteredApps, 'applicationid'));
|
||||
}
|
||||
if (hostFilter) {
|
||||
query.hostids = _.map(filteredHosts, 'hostid');
|
||||
}
|
||||
if (groupFilter) {
|
||||
query.groupids = _.map(filteredGroups, 'groupid');
|
||||
}
|
||||
|
||||
return query;
|
||||
})
|
||||
.then((query) => this.zabbixAPI.getEventsHistory(query.groupids, query.hostids, query.applicationids, options))
|
||||
.then((problems) => {
|
||||
const triggerids = problems?.map((problem) => problem.objectid);
|
||||
return Promise.all([Promise.resolve(problems), this.zabbixAPI.getTriggersByIds(triggerids)]);
|
||||
})
|
||||
.then(([problems, triggers]) => joinTriggersWithEvents(problems, triggers, { valueFromEvent }))
|
||||
.then((triggers) => this.filterTriggersByProxy(triggers, proxyFilter));
|
||||
// .then(triggers => this.expandUserMacro.bind(this)(triggers, true));
|
||||
}
|
||||
|
||||
filterTriggersByProxy(triggers, proxyFilter) {
|
||||
return this.getFilteredProxies(proxyFilter).then((proxies) => {
|
||||
if (proxyFilter && proxyFilter !== '/.*/' && triggers) {
|
||||
const proxy_ids = proxies.map((proxy) => proxy.proxyid);
|
||||
triggers = triggers.filter((trigger) => {
|
||||
for (let i = 0; i < trigger.hosts.length; i++) {
|
||||
const host = trigger.hosts[i];
|
||||
if (proxy_ids.includes(host.proxy_hostid)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
return triggers;
|
||||
});
|
||||
}
|
||||
|
||||
getFilteredProxies(proxyFilter) {
|
||||
return this.zabbixAPI.getProxies().then((proxies) => {
|
||||
proxies.forEach((proxy) => (proxy.name = proxy.host));
|
||||
return findByFilter(proxies, proxyFilter);
|
||||
});
|
||||
}
|
||||
|
||||
getHistoryTS(items, timeRange, request) {
|
||||
const [timeFrom, timeTo] = timeRange;
|
||||
if (this.enableDirectDBConnection) {
|
||||
return this.getHistoryDB(items, timeFrom, timeTo, request).then((history) =>
|
||||
responseHandler.dataResponseToTimeSeries(history, items, request)
|
||||
);
|
||||
} else {
|
||||
return this.zabbixAPI
|
||||
.getHistory(items, timeFrom, timeTo)
|
||||
.then((history) => responseHandler.handleHistory(history, items));
|
||||
}
|
||||
}
|
||||
|
||||
getTrends(items, timeRange, request) {
|
||||
const [timeFrom, timeTo] = timeRange;
|
||||
if (this.enableDirectDBConnection) {
|
||||
return this.getTrendsDB(items, timeFrom, timeTo, request).then((history) =>
|
||||
responseHandler.dataResponseToTimeSeries(history, items, request)
|
||||
);
|
||||
} else {
|
||||
const valueType = request.consolidateBy || request.valueType;
|
||||
return this.zabbixAPI
|
||||
.getTrend(items, timeFrom, timeTo)
|
||||
.then((history) => responseHandler.handleTrends(history, items, valueType))
|
||||
.then(responseHandler.sortTimeseries); // Sort trend data, issue #202
|
||||
}
|
||||
}
|
||||
|
||||
getHistoryText(items, timeRange, target) {
|
||||
const [timeFrom, timeTo] = timeRange;
|
||||
if (items.length) {
|
||||
return this.zabbixAPI.getHistory(items, timeFrom, timeTo).then((history) => {
|
||||
if (target.resultFormat === 'table') {
|
||||
return responseHandler.handleHistoryAsTable(history, items, target);
|
||||
} else {
|
||||
return responseHandler.handleText(history, items, target);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
return Promise.resolve([]);
|
||||
}
|
||||
}
|
||||
|
||||
async getSLA(itservices, timeRange, target, options) {
|
||||
const itServiceIds = _.map(itservices, 'serviceid');
|
||||
if (this.supportSLA()) {
|
||||
const slaResponse = await this.zabbixAPI.getSLA60(itServiceIds, timeRange, options);
|
||||
return _.map(itServiceIds, (serviceid) => {
|
||||
const itservice = _.find(itservices, { serviceid: serviceid });
|
||||
return responseHandler.handleSLAResponse(itservice, target.slaProperty, slaResponse);
|
||||
});
|
||||
}
|
||||
const slaResponse = await this.zabbixAPI.getSLA(itServiceIds, timeRange, options);
|
||||
return _.map(itServiceIds, (serviceid) => {
|
||||
const itservice = _.find(itservices, { serviceid: serviceid });
|
||||
return responseHandler.handleSLAResponse(itservice, target.slaProperty, slaResponse);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Find group, host, app or item by given name.
|
||||
* @param list list of groups, apps or other
|
||||
* @param name visible name
|
||||
* @return array with finded element or empty array
|
||||
*/
|
||||
function findByName(list, name) {
|
||||
const finded = _.find(list, { name: name });
|
||||
if (finded) {
|
||||
return [finded];
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Different hosts can contains applications and items with same name.
|
||||
* For this reason use _.filter, which return all elements instead _.find,
|
||||
* which return only first finded.
|
||||
* @param {[type]} list list of elements
|
||||
* @param {[type]} name app name
|
||||
* @return {[type]} array with finded element or empty array
|
||||
*/
|
||||
function filterByName(list, name) {
|
||||
const finded = _.filter(list, { name: name });
|
||||
if (finded) {
|
||||
return finded;
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function filterByRegex(list, regex) {
|
||||
const filterPattern = utils.buildRegex(regex);
|
||||
return _.filter(list, (zbx_obj) => {
|
||||
return filterPattern.test(zbx_obj.name);
|
||||
});
|
||||
}
|
||||
|
||||
function findByFilter(list, filter) {
|
||||
if (utils.isRegex(filter)) {
|
||||
return filterByRegex(list, filter);
|
||||
} else {
|
||||
return findByName(list, filter);
|
||||
}
|
||||
}
|
||||
|
||||
function filterByQuery(list, filter) {
|
||||
if (utils.isRegex(filter)) {
|
||||
return filterByRegex(list, filter);
|
||||
} else {
|
||||
return filterByName(list, filter);
|
||||
}
|
||||
}
|
||||
|
||||
function getHostIds(items) {
|
||||
const hostIds = _.map(items, (item) => {
|
||||
return _.map(item.hosts, 'hostid');
|
||||
});
|
||||
return _.uniq(_.flatten(hostIds));
|
||||
}
|
||||
|
||||
function filterItemsByTag(items: any[], itemTagFilter: string) {
|
||||
if (utils.isRegex(itemTagFilter)) {
|
||||
const filterPattern = utils.buildRegex(itemTagFilter);
|
||||
return items.filter((item) => {
|
||||
if (item.tags) {
|
||||
const tags: string[] = item.tags.map((t) => utils.itemTagToString(t));
|
||||
return tags.some((tag) => {
|
||||
return filterPattern.test(tag);
|
||||
});
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
return items.filter((item) => {
|
||||
if (item.tags) {
|
||||
const tags: string[] = item.tags.map((t) => utils.itemTagToString(t));
|
||||
return tags.includes(itemTagFilter);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user