Data queries (alerting)
This commit is contained in:
@@ -3,10 +3,8 @@ package main
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
simplejson "github.com/bitly/go-simplejson"
|
||||
"github.com/grafana/grafana_plugin_model/go/datasource"
|
||||
@@ -15,6 +13,7 @@ import (
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
)
|
||||
|
||||
// ZabbixPlugin implements the Grafana backend interface and forwards queries to the ZabbixDatasourceInstance
|
||||
@@ -29,27 +28,6 @@ type ZabbixDatasource struct {
|
||||
logger log.Logger
|
||||
}
|
||||
|
||||
func NewDatasource(logger log.Logger, mux *http.ServeMux) *ZabbixDatasource {
|
||||
variableName := "GFX_ZABBIX_DATA_PATH"
|
||||
path, exist := os.LookupEnv(variableName)
|
||||
if !exist {
|
||||
logger.Error("could not read environment variable", variableName)
|
||||
} else {
|
||||
logger.Debug("environment variable for storage found", "variable", variableName, "value", path)
|
||||
}
|
||||
|
||||
ds := &ZabbixDatasource{
|
||||
logger: logger,
|
||||
datasourceCache: NewCache(10*time.Minute, 10*time.Minute),
|
||||
}
|
||||
|
||||
mux.HandleFunc("/", ds.rootHandler)
|
||||
mux.HandleFunc("/zabbix-api", ds.zabbixAPIHandler)
|
||||
// mux.Handle("/scenarios", getScenariosHandler(logger))
|
||||
|
||||
return ds
|
||||
}
|
||||
|
||||
// CheckHealth checks if the plugin is running properly
|
||||
func (ds *ZabbixDatasource) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {
|
||||
res := &backend.CheckHealthResult{}
|
||||
@@ -67,11 +45,34 @@ func (ds *ZabbixDatasource) CheckHealth(ctx context.Context, req *backend.CheckH
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (gds *ZabbixDatasource) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
|
||||
func (ds *ZabbixDatasource) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
|
||||
qdr := backend.NewQueryDataResponse()
|
||||
|
||||
zabbixDS, err := ds.GetDatasource(req.PluginContext)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, q := range req.Queries {
|
||||
res := backend.DataResponse{}
|
||||
query, err := ReadQuery(q)
|
||||
ds.logger.Debug("DS query", "query", q)
|
||||
ds.logger.Debug("DS query parsed", "query", query)
|
||||
if err != nil {
|
||||
res.Error = err
|
||||
} else if len(query.Functions) > 0 {
|
||||
res.Error = errors.New("Zabbix queries with functions are not supported")
|
||||
} else if query.Mode != 0 {
|
||||
res.Error = errors.New("Non-metrics queries are not supported")
|
||||
} else {
|
||||
frame, err := zabbixDS.queryNumericItems(ctx, &query)
|
||||
ds.logger.Debug("DS got frame", "frame", frame)
|
||||
if err != nil {
|
||||
res.Error = err
|
||||
} else {
|
||||
res.Frames = []*data.Frame{frame}
|
||||
}
|
||||
}
|
||||
qdr.Responses[q.RefID] = res
|
||||
}
|
||||
|
||||
@@ -120,8 +121,9 @@ func (ds *ZabbixDatasource) NewZabbixDatasource(dsInfo *backend.DataSourceInstan
|
||||
// }
|
||||
|
||||
// GetDatasource Returns cached datasource or creates new one
|
||||
func (ds *ZabbixDatasource) GetDatasource(orgID int64, dsInfo *backend.DataSourceInstanceSettings) (*ZabbixDatasourceInstance, error) {
|
||||
dsInfoHash := HashDatasourceInfo(dsInfo)
|
||||
func (ds *ZabbixDatasource) GetDatasource(pluginContext backend.PluginContext) (*ZabbixDatasourceInstance, error) {
|
||||
dsSettings := pluginContext.DataSourceInstanceSettings
|
||||
dsInfoHash := HashDatasourceInfo(dsSettings)
|
||||
|
||||
if cachedData, ok := ds.datasourceCache.Get(dsInfoHash); ok {
|
||||
if cachedDS, ok := cachedData.(*ZabbixDatasourceInstance); ok {
|
||||
@@ -129,9 +131,9 @@ func (ds *ZabbixDatasource) GetDatasource(orgID int64, dsInfo *backend.DataSourc
|
||||
}
|
||||
}
|
||||
|
||||
ds.logger.Debug(fmt.Sprintf("Datasource cache miss (Org %d Id %d '%s' %s)", orgID, dsInfo.ID, dsInfo.Name, dsInfoHash))
|
||||
ds.logger.Debug(fmt.Sprintf("Datasource cache miss (Org %d Id %d '%s' %s)", pluginContext.OrgID, dsSettings.ID, dsSettings.Name, dsInfoHash))
|
||||
|
||||
dsInstance, err := ds.NewZabbixDatasource(dsInfo)
|
||||
dsInstance, err := ds.NewZabbixDatasource(pluginContext.DataSourceInstanceSettings)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -3,8 +3,80 @@ package main
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
)
|
||||
|
||||
type ZabbixAPIResourceRequest struct {
|
||||
DatasourceId int64 `json:"datasourceId"`
|
||||
Method string `json:"method"`
|
||||
Params map[string]interface{} `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
type ZabbixAPIRequest struct {
|
||||
Method string `json:"method"`
|
||||
Params map[string]interface{} `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
type ZabbixAPIResourceResponse struct {
|
||||
Result interface{} `json:"result,omitempty"`
|
||||
}
|
||||
|
||||
func (r *ZabbixAPIRequest) String() string {
|
||||
jsonRequest, _ := json.Marshal(r.Params)
|
||||
return r.Method + string(jsonRequest)
|
||||
}
|
||||
|
||||
// QueryModel model
|
||||
type QueryModel struct {
|
||||
Mode int64 `json:"mode"`
|
||||
Group QueryFilter `json:"group"`
|
||||
Host QueryFilter `json:"host"`
|
||||
Application QueryFilter `json:"application"`
|
||||
Item QueryFilter `json:"item"`
|
||||
Functions []QueryFunction `json:"functions,omitempty"`
|
||||
Options QueryOptions `json:"options"`
|
||||
|
||||
// Direct from the gRPC interfaces
|
||||
TimeRange backend.TimeRange `json:"-"`
|
||||
}
|
||||
|
||||
// QueryOptions model
|
||||
type QueryFilter struct {
|
||||
Filter string `json:"filter"`
|
||||
}
|
||||
|
||||
// QueryOptions model
|
||||
type QueryOptions struct {
|
||||
ShowDisabledItems bool `json:"showDisabledItems"`
|
||||
}
|
||||
|
||||
// QueryOptions model
|
||||
type QueryFunction struct {
|
||||
Def QueryFunctionDef `json:"def"`
|
||||
Params []string `json:"params"`
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
// QueryOptions model
|
||||
type QueryFunctionDef struct {
|
||||
Name string `json:"name"`
|
||||
Category string `json:"category"`
|
||||
}
|
||||
|
||||
// ReadQuery will read and validate Settings from the DataSourceConfg
|
||||
func ReadQuery(query backend.DataQuery) (QueryModel, error) {
|
||||
model := QueryModel{}
|
||||
if err := json.Unmarshal(query.JSON, &model); err != nil {
|
||||
return model, fmt.Errorf("could not read query: %w", err)
|
||||
}
|
||||
|
||||
model.TimeRange = query.TimeRange
|
||||
return model, nil
|
||||
}
|
||||
|
||||
// Old models
|
||||
|
||||
type connectionTestResponse struct {
|
||||
ZabbixVersion string `json:"zabbixVersion"`
|
||||
DbConnectorStatus *dbConnectionStatus `json:"dbConnectorStatus"`
|
||||
@@ -103,23 +175,3 @@ type ZabbixAPIParamsLegacy struct {
|
||||
TimeFrom int64 `json:"time_from,omitempty"`
|
||||
TimeTill int64 `json:"time_till,omitempty"`
|
||||
}
|
||||
|
||||
type ZabbixAPIResourceRequest struct {
|
||||
DatasourceId int64 `json:"datasourceId"`
|
||||
Method string `json:"method"`
|
||||
Params map[string]interface{} `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
type ZabbixAPIRequest struct {
|
||||
Method string `json:"method"`
|
||||
Params map[string]interface{} `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
type ZabbixAPIResourceResponse struct {
|
||||
Result interface{} `json:"result,omitempty"`
|
||||
}
|
||||
|
||||
func (r *ZabbixAPIRequest) String() string {
|
||||
jsonRequest, _ := json.Marshal(r.Params)
|
||||
return r.Method + string(jsonRequest)
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ package main
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||
@@ -15,7 +17,7 @@ func main() {
|
||||
|
||||
pluginLogger := log.New()
|
||||
mux := http.NewServeMux()
|
||||
ds := NewDatasource(pluginLogger, mux)
|
||||
ds := Init(pluginLogger, mux)
|
||||
httpResourceHandler := httpadapter.New(mux)
|
||||
|
||||
pluginLogger.Debug("Starting Zabbix backend datasource")
|
||||
@@ -28,22 +30,25 @@ func main() {
|
||||
if err != nil {
|
||||
pluginLogger.Error(err.Error())
|
||||
}
|
||||
|
||||
// plugin.Serve(&plugin.ServeConfig{
|
||||
|
||||
// HandshakeConfig: plugin.HandshakeConfig{
|
||||
// ProtocolVersion: 1,
|
||||
// MagicCookieKey: "grafana_plugin_type",
|
||||
// MagicCookieValue: "datasource",
|
||||
// },
|
||||
// Plugins: map[string]plugin.Plugin{
|
||||
// "zabbix-backend-datasource": &datasource.DatasourcePluginImpl{Plugin: &ZabbixPlugin{
|
||||
// datasourceCache: NewCache(10*time.Minute, 10*time.Minute),
|
||||
// logger: pluginLogger,
|
||||
// }},
|
||||
// },
|
||||
|
||||
// // A non-nil value here enables gRPC serving for this plugin...
|
||||
// GRPCServer: plugin.DefaultGRPCServer,
|
||||
// })
|
||||
}
|
||||
|
||||
func Init(logger log.Logger, mux *http.ServeMux) *ZabbixDatasource {
|
||||
variableName := "GFX_ZABBIX_DATA_PATH"
|
||||
path, exist := os.LookupEnv(variableName)
|
||||
if !exist {
|
||||
logger.Error("could not read environment variable", variableName)
|
||||
} else {
|
||||
logger.Debug("environment variable for storage found", "variable", variableName, "value", path)
|
||||
}
|
||||
|
||||
ds := &ZabbixDatasource{
|
||||
logger: logger,
|
||||
datasourceCache: NewCache(10*time.Minute, 10*time.Minute),
|
||||
}
|
||||
|
||||
mux.HandleFunc("/", ds.rootHandler)
|
||||
mux.HandleFunc("/zabbix-api", ds.zabbixAPIHandler)
|
||||
// mux.Handle("/scenarios", getScenariosHandler(logger))
|
||||
|
||||
return ds
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ func (ds *ZabbixDatasource) zabbixAPIHandler(rw http.ResponseWriter, req *http.R
|
||||
|
||||
pluginCxt := httpadapter.PluginConfigFromContext(req.Context())
|
||||
|
||||
dsInstance, err := ds.GetDatasource(pluginCxt.OrgID, pluginCxt.DataSourceInstanceSettings)
|
||||
dsInstance, err := ds.GetDatasource(pluginCxt)
|
||||
ds.logger.Debug("Data source found", "ds", dsInstance.dsInfo.Name)
|
||||
|
||||
ds.logger.Debug("Invoke Zabbix API call", "ds", pluginCxt.DataSourceInstanceSettings.Name, "method", reqData.Method)
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
package zabbix
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Items []Item
|
||||
|
||||
type Item struct {
|
||||
ID string `json:"itemid,omitempty"`
|
||||
Key string `json:"key_,omitempty"`
|
||||
@@ -11,12 +17,65 @@ type Item struct {
|
||||
Status string `json:"status,omitempty"`
|
||||
State string `json:"state,omitempty"`
|
||||
}
|
||||
|
||||
func (item *Item) ExpandItem() string {
|
||||
name := item.Name
|
||||
key := item.Key
|
||||
|
||||
if strings.Index(key, "[") == -1 {
|
||||
return name
|
||||
}
|
||||
|
||||
keyRunes := []rune(item.Key)
|
||||
keyParamsStr := string(keyRunes[strings.Index(key, "[")+1 : strings.LastIndex(key, "]")])
|
||||
keyParams := splitKeyParams(keyParamsStr)
|
||||
|
||||
for i := len(keyParams); i >= 1; i-- {
|
||||
name = strings.ReplaceAll(name, fmt.Sprintf("$%v", i), keyParams[i-1])
|
||||
}
|
||||
|
||||
return name
|
||||
}
|
||||
|
||||
func splitKeyParams(paramStr string) []string {
|
||||
paramRunes := []rune(paramStr)
|
||||
params := []string{}
|
||||
quoted := false
|
||||
inArray := false
|
||||
splitSymbol := ","
|
||||
param := ""
|
||||
|
||||
for _, r := range paramRunes {
|
||||
symbol := string(r)
|
||||
if symbol == `"` && inArray {
|
||||
param += symbol
|
||||
} else if symbol == `"` && quoted {
|
||||
quoted = false
|
||||
} else if symbol == `"` && !quoted {
|
||||
quoted = true
|
||||
} else if symbol == "[" && !quoted {
|
||||
inArray = true
|
||||
} else if symbol == "]" && !quoted {
|
||||
inArray = false
|
||||
} else if symbol == splitSymbol && !quoted && !inArray {
|
||||
params = append(params, param)
|
||||
param = ""
|
||||
} else {
|
||||
param += symbol
|
||||
}
|
||||
}
|
||||
|
||||
params = append(params, param)
|
||||
return params
|
||||
}
|
||||
|
||||
type ItemHost struct {
|
||||
ID string `json:"hostid,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
}
|
||||
|
||||
type Trend []TrendPoint
|
||||
|
||||
type TrendPoint struct {
|
||||
ItemID string `json:"itemid,omitempty"`
|
||||
Clock int64 `json:"clock,omitempty,string"`
|
||||
@@ -27,6 +86,7 @@ type TrendPoint struct {
|
||||
}
|
||||
|
||||
type History []HistoryPoint
|
||||
|
||||
type HistoryPoint struct {
|
||||
ItemID string `json:"itemid,omitempty"`
|
||||
Clock int64 `json:"clock,omitempty,string"`
|
||||
|
||||
@@ -4,12 +4,13 @@ import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbix"
|
||||
simplejson "github.com/bitly/go-simplejson"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"github.com/grafana/grafana_plugin_model/go/datasource"
|
||||
"golang.org/x/net/context"
|
||||
)
|
||||
@@ -99,28 +100,13 @@ func (ds *ZabbixDatasourceInstance) TestConnection(ctx context.Context, tsdbReq
|
||||
return BuildResponse(testResponse)
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) queryNumericItems(ctx context.Context, tsdbReq *datasource.DatasourceRequest) (*datasource.DatasourceResponse, error) {
|
||||
func (ds *ZabbixDatasourceInstance) queryNumericItems(ctx context.Context, query *QueryModel) (*data.Frame, error) {
|
||||
tStart := time.Now()
|
||||
jsonQueries := make([]*simplejson.Json, 0)
|
||||
for _, query := range tsdbReq.Queries {
|
||||
json, err := simplejson.NewJson([]byte(query.ModelJson))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
jsonQueries = append(jsonQueries, json)
|
||||
}
|
||||
|
||||
if len(jsonQueries) == 0 {
|
||||
return nil, errors.New("At least one query should be provided")
|
||||
}
|
||||
|
||||
firstQuery := jsonQueries[0]
|
||||
|
||||
groupFilter := firstQuery.GetPath("group", "filter").MustString()
|
||||
hostFilter := firstQuery.GetPath("host", "filter").MustString()
|
||||
appFilter := firstQuery.GetPath("application", "filter").MustString()
|
||||
itemFilter := firstQuery.GetPath("item", "filter").MustString()
|
||||
groupFilter := query.Group.Filter
|
||||
hostFilter := query.Host.Filter
|
||||
appFilter := query.Application.Filter
|
||||
itemFilter := query.Item.Filter
|
||||
|
||||
ds.logger.Debug("queryNumericItems",
|
||||
"func", "ds.getItems",
|
||||
@@ -128,25 +114,25 @@ func (ds *ZabbixDatasourceInstance) queryNumericItems(ctx context.Context, tsdbR
|
||||
"hostFilter", hostFilter,
|
||||
"appFilter", appFilter,
|
||||
"itemFilter", itemFilter)
|
||||
items, err := ds.getItems(ctx, tsdbReq.GetDatasource(), groupFilter, hostFilter, appFilter, itemFilter, "num")
|
||||
items, err := ds.getItems(ctx, groupFilter, hostFilter, appFilter, itemFilter, "num")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ds.logger.Debug("queryNumericItems", "finished", "ds.getItems", "timeElapsed", time.Now().Sub(tStart))
|
||||
|
||||
metrics, err := ds.queryNumericDataForItems(ctx, tsdbReq, items, jsonQueries, isUseTrend(tsdbReq.GetTimeRange()))
|
||||
frames, err := ds.queryNumericDataForItems(ctx, query, items)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ds.logger.Debug("queryNumericItems", "finished", "queryNumericDataForItems", "timeElapsed", time.Now().Sub(tStart))
|
||||
|
||||
return BuildMetricsResponse(metrics)
|
||||
return frames, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getItems(ctx context.Context, dsInfo *datasource.DatasourceInfo, groupFilter string, hostFilter string, appFilter string, itemFilter string, itemType string) (zabbix.Items, error) {
|
||||
func (ds *ZabbixDatasourceInstance) getItems(ctx context.Context, groupFilter string, hostFilter string, appFilter string, itemFilter string, itemType string) (zabbix.Items, error) {
|
||||
tStart := time.Now()
|
||||
|
||||
hosts, err := ds.getHosts(ctx, dsInfo, groupFilter, hostFilter)
|
||||
hosts, err := ds.getHosts(ctx, groupFilter, hostFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -156,7 +142,7 @@ func (ds *ZabbixDatasourceInstance) getItems(ctx context.Context, dsInfo *dataso
|
||||
}
|
||||
ds.logger.Debug("getItems", "finished", "getHosts", "timeElapsed", time.Now().Sub(tStart))
|
||||
|
||||
apps, err := ds.getApps(ctx, dsInfo, groupFilter, hostFilter, appFilter)
|
||||
apps, err := ds.getApps(ctx, groupFilter, hostFilter, appFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -169,10 +155,10 @@ func (ds *ZabbixDatasourceInstance) getItems(ctx context.Context, dsInfo *dataso
|
||||
var allItems *simplejson.Json
|
||||
if len(hostids) > 0 {
|
||||
ds.logger.Debug("getAllItems", "with", "hostFilter")
|
||||
allItems, err = ds.getAllItems(ctx, dsInfo, hostids, nil, itemType)
|
||||
allItems, err = ds.getAllItems(ctx, hostids, nil, itemType)
|
||||
} else if len(appids) > 0 {
|
||||
ds.logger.Debug("getAllItems", "with", "appFilter")
|
||||
allItems, err = ds.getAllItems(ctx, dsInfo, nil, appids, itemType)
|
||||
allItems, err = ds.getAllItems(ctx, nil, appids, itemType)
|
||||
}
|
||||
ds.logger.Debug("getItems", "finished", "getAllItems", "timeElapsed", time.Now().Sub(tStart))
|
||||
|
||||
@@ -199,12 +185,13 @@ func (ds *ZabbixDatasourceInstance) getItems(ctx context.Context, dsInfo *dataso
|
||||
|
||||
filteredItems := zabbix.Items{}
|
||||
for _, item := range items {
|
||||
itemName := item.ExpandItem()
|
||||
if item.Status == "0" {
|
||||
if re != nil {
|
||||
if re.MatchString(item.Name) {
|
||||
if re.MatchString(itemName) {
|
||||
filteredItems = append(filteredItems, item)
|
||||
}
|
||||
} else if item.Name == itemFilter {
|
||||
} else if itemName == itemFilter {
|
||||
filteredItems = append(filteredItems, item)
|
||||
}
|
||||
}
|
||||
@@ -214,8 +201,8 @@ func (ds *ZabbixDatasourceInstance) getItems(ctx context.Context, dsInfo *dataso
|
||||
return filteredItems, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getApps(ctx context.Context, dsInfo *datasource.DatasourceInfo, groupFilter string, hostFilter string, appFilter string) ([]map[string]interface{}, error) {
|
||||
hosts, err := ds.getHosts(ctx, dsInfo, groupFilter, hostFilter)
|
||||
func (ds *ZabbixDatasourceInstance) getApps(ctx context.Context, groupFilter string, hostFilter string, appFilter string) ([]map[string]interface{}, error) {
|
||||
hosts, err := ds.getHosts(ctx, groupFilter, hostFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -223,7 +210,7 @@ func (ds *ZabbixDatasourceInstance) getApps(ctx context.Context, dsInfo *datasou
|
||||
for _, k := range hosts {
|
||||
hostids = append(hostids, k["hostid"].(string))
|
||||
}
|
||||
allApps, err := ds.getAllApps(ctx, dsInfo, hostids)
|
||||
allApps, err := ds.getAllApps(ctx, hostids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -248,8 +235,8 @@ func (ds *ZabbixDatasourceInstance) getApps(ctx context.Context, dsInfo *datasou
|
||||
return apps, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getHosts(ctx context.Context, dsInfo *datasource.DatasourceInfo, groupFilter string, hostFilter string) ([]map[string]interface{}, error) {
|
||||
groups, err := ds.getGroups(ctx, dsInfo, groupFilter)
|
||||
func (ds *ZabbixDatasourceInstance) getHosts(ctx context.Context, groupFilter string, hostFilter string) ([]map[string]interface{}, error) {
|
||||
groups, err := ds.getGroups(ctx, groupFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -257,7 +244,7 @@ func (ds *ZabbixDatasourceInstance) getHosts(ctx context.Context, dsInfo *dataso
|
||||
for _, k := range groups {
|
||||
groupids = append(groupids, k["groupid"].(string))
|
||||
}
|
||||
allHosts, err := ds.getAllHosts(ctx, dsInfo, groupids)
|
||||
allHosts, err := ds.getAllHosts(ctx, groupids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -283,8 +270,8 @@ func (ds *ZabbixDatasourceInstance) getHosts(ctx context.Context, dsInfo *dataso
|
||||
return hosts, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getGroups(ctx context.Context, dsInfo *datasource.DatasourceInfo, groupFilter string) ([]map[string]interface{}, error) {
|
||||
allGroups, err := ds.getAllGroups(ctx, dsInfo)
|
||||
func (ds *ZabbixDatasourceInstance) getGroups(ctx context.Context, groupFilter string) ([]map[string]interface{}, error) {
|
||||
allGroups, err := ds.getAllGroups(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -307,15 +294,15 @@ func (ds *ZabbixDatasourceInstance) getGroups(ctx context.Context, dsInfo *datas
|
||||
return groups, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getAllItems(ctx context.Context, dsInfo *datasource.DatasourceInfo, hostids []string, appids []string, itemtype string) (*simplejson.Json, error) {
|
||||
func (ds *ZabbixDatasourceInstance) getAllItems(ctx context.Context, hostids []string, appids []string, itemtype string) (*simplejson.Json, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": &zabbixParamOutput{Fields: []string{"itemid", "name", "key_", "value_type", "hostid", "status", "state"}},
|
||||
"sortField": "name",
|
||||
"webItems": true,
|
||||
"output": []string{"itemid", "name", "key_", "value_type", "hostid", "status", "state"},
|
||||
"sortfield": "name",
|
||||
"webitems": true,
|
||||
"filter": map[string]interface{}{},
|
||||
"selectHosts": []string{"hostid", "name"},
|
||||
"hostIDs": hostids,
|
||||
"appIDs": appids,
|
||||
"hostids": hostids,
|
||||
"applicationids": appids,
|
||||
}
|
||||
|
||||
filter := params["filter"].(map[string]interface{})
|
||||
@@ -328,81 +315,80 @@ func (ds *ZabbixDatasourceInstance) getAllItems(ctx context.Context, dsInfo *dat
|
||||
return ds.ZabbixRequest(ctx, "item.get", params)
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getAllApps(ctx context.Context, dsInfo *datasource.DatasourceInfo, hostids []string) (*simplejson.Json, error) {
|
||||
params := ZabbixAPIParams{"output": &zabbixParamOutput{Mode: "extend"}, "hostIDs": hostids}
|
||||
func (ds *ZabbixDatasourceInstance) getAllApps(ctx context.Context, hostids []string) (*simplejson.Json, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": "extend",
|
||||
"hostids": hostids,
|
||||
}
|
||||
|
||||
return ds.ZabbixRequest(ctx, "application.get", params)
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getAllHosts(ctx context.Context, dsInfo *datasource.DatasourceInfo, groupids []string) (*simplejson.Json, error) {
|
||||
params := ZabbixAPIParams{"output": &zabbixParamOutput{Fields: []string{"name", "host"}}, "sortField": "name", "groupIDs": groupids}
|
||||
func (ds *ZabbixDatasourceInstance) getAllHosts(ctx context.Context, groupids []string) (*simplejson.Json, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": []string{"name", "host"},
|
||||
"sortfield": "name",
|
||||
"groupids": groupids,
|
||||
}
|
||||
|
||||
return ds.ZabbixRequest(ctx, "host.get", params)
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getAllGroups(ctx context.Context, dsInfo *datasource.DatasourceInfo) (*simplejson.Json, error) {
|
||||
params := ZabbixAPIParams{"output": &zabbixParamOutput{Fields: []string{"name"}}, "sortField": "name", "realHosts": true}
|
||||
func (ds *ZabbixDatasourceInstance) getAllGroups(ctx context.Context) (*simplejson.Json, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": []string{"name"},
|
||||
"sortfield": "name",
|
||||
"real_hosts": true,
|
||||
}
|
||||
|
||||
return ds.ZabbixRequest(ctx, "hostgroup.get", params)
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) queryNumericDataForItems(ctx context.Context, tsdbReq *datasource.DatasourceRequest, items zabbix.Items, jsonQueries []*simplejson.Json, useTrend bool) ([]*datasource.TimeSeries, error) {
|
||||
valueType := ds.getTrendValueType(jsonQueries)
|
||||
consolidateBy := ds.getConsolidateBy(jsonQueries)
|
||||
func (ds *ZabbixDatasourceInstance) queryNumericDataForItems(ctx context.Context, query *QueryModel, items zabbix.Items) (*data.Frame, error) {
|
||||
valueType := ds.getTrendValueType(query)
|
||||
consolidateBy := ds.getConsolidateBy(query)
|
||||
|
||||
if consolidateBy == "" {
|
||||
consolidateBy = valueType
|
||||
}
|
||||
|
||||
history, err := ds.getHistotyOrTrend(ctx, tsdbReq, items, useTrend)
|
||||
history, err := ds.getHistotyOrTrend(ctx, query, items)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return convertHistory(history, items)
|
||||
ds.logger.Debug("Got history", "len", len(history), "items", len(items))
|
||||
return convertHistory(history, items), nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getTrendValueType(jsonQueries []*simplejson.Json) string {
|
||||
var trendFunctions []string
|
||||
var trendValueFunc string
|
||||
func (ds *ZabbixDatasourceInstance) getTrendValueType(query *QueryModel) string {
|
||||
trendValue := "avg"
|
||||
|
||||
// TODO: loop over populated categories
|
||||
for _, j := range new(FunctionCategories).Trends {
|
||||
trendFunctions = append(trendFunctions, j["name"].(string))
|
||||
}
|
||||
for _, k := range jsonQueries[0].Get("functions").MustArray() {
|
||||
for _, j := range trendFunctions {
|
||||
if j == k.(map[string]interface{})["def"].(map[string]interface{})["name"] {
|
||||
trendValueFunc = j
|
||||
}
|
||||
for _, fn := range query.Functions {
|
||||
if fn.Def.Name == "trendValue" && len(fn.Params) > 0 {
|
||||
trendValue = fn.Params[0]
|
||||
}
|
||||
}
|
||||
|
||||
if trendValueFunc == "" {
|
||||
trendValueFunc = "avg"
|
||||
}
|
||||
|
||||
return trendValueFunc
|
||||
return trendValue
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getConsolidateBy(jsonQueries []*simplejson.Json) string {
|
||||
var consolidateBy string
|
||||
func (ds *ZabbixDatasourceInstance) getConsolidateBy(query *QueryModel) string {
|
||||
consolidateBy := "avg"
|
||||
|
||||
for _, k := range jsonQueries[0].Get("functions").MustArray() {
|
||||
if k.(map[string]interface{})["def"].(map[string]interface{})["name"] == "consolidateBy" {
|
||||
defParams := k.(map[string]interface{})["def"].(map[string]interface{})["params"].([]interface{})
|
||||
if len(defParams) > 0 {
|
||||
consolidateBy = defParams[0].(string)
|
||||
}
|
||||
for _, fn := range query.Functions {
|
||||
if fn.Def.Name == "consolidateBy" && len(fn.Params) > 0 {
|
||||
consolidateBy = fn.Params[0]
|
||||
}
|
||||
}
|
||||
return consolidateBy
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getHistotyOrTrend(ctx context.Context, tsdbReq *datasource.DatasourceRequest, items zabbix.Items, useTrend bool) (zabbix.History, error) {
|
||||
func (ds *ZabbixDatasourceInstance) getHistotyOrTrend(ctx context.Context, query *QueryModel, items zabbix.Items) (zabbix.History, error) {
|
||||
timeRange := query.TimeRange
|
||||
useTrend := isUseTrend(timeRange)
|
||||
allHistory := zabbix.History{}
|
||||
|
||||
timeRange := tsdbReq.GetTimeRange()
|
||||
groupedItems := map[int]zabbix.Items{}
|
||||
|
||||
for _, j := range items {
|
||||
@@ -416,12 +402,12 @@ func (ds *ZabbixDatasourceInstance) getHistotyOrTrend(ctx context.Context, tsdbR
|
||||
}
|
||||
|
||||
params := ZabbixAPIParams{
|
||||
"output": &zabbixParamOutput{Mode: "extend"},
|
||||
"sortField": "clock",
|
||||
"sortOrder": "ASC",
|
||||
"itemIDs": itemids,
|
||||
"timeFrom": timeRange.GetFromEpochMs() / 1000,
|
||||
"timeTill": timeRange.GetToEpochMs() / 1000,
|
||||
"output": "extend",
|
||||
"sortfield": "clock",
|
||||
"sortorder": "ASC",
|
||||
"itemids": itemids,
|
||||
"time_from": timeRange.From.Unix(),
|
||||
"time_till": timeRange.To.Unix(),
|
||||
}
|
||||
|
||||
var response *simplejson.Json
|
||||
@@ -453,9 +439,9 @@ func (ds *ZabbixDatasourceInstance) getHistotyOrTrend(ctx context.Context, tsdbR
|
||||
return allHistory, nil
|
||||
}
|
||||
|
||||
func isUseTrend(timeRange *datasource.TimeRange) bool {
|
||||
fromSec := timeRange.GetFromEpochMs() / 1000
|
||||
toSec := timeRange.GetToEpochMs() / 1000
|
||||
func isUseTrend(timeRange backend.TimeRange) bool {
|
||||
fromSec := timeRange.From.Unix()
|
||||
toSec := timeRange.To.Unix()
|
||||
if (fromSec < time.Now().Add(time.Hour*-7*24).Unix()) ||
|
||||
(toSec-fromSec > (4 * 24 * time.Hour).Milliseconds()) {
|
||||
return true
|
||||
@@ -463,28 +449,38 @@ func isUseTrend(timeRange *datasource.TimeRange) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func convertHistory(history zabbix.History, items zabbix.Items) ([]*datasource.TimeSeries, error) {
|
||||
seriesMap := map[string]*datasource.TimeSeries{}
|
||||
func convertHistory(history zabbix.History, items zabbix.Items) *data.Frame {
|
||||
timeFileld := data.NewFieldFromFieldType(data.FieldTypeTime, 0)
|
||||
timeFileld.Name = "time"
|
||||
frame := data.NewFrame("History", timeFileld)
|
||||
|
||||
for _, item := range items {
|
||||
seriesMap[item.ID] = &datasource.TimeSeries{
|
||||
Name: fmt.Sprintf("%s %s", item.Hosts[0].Name, item.Name),
|
||||
Points: []*datasource.Point{},
|
||||
field := data.NewFieldFromFieldType(data.FieldTypeNullableFloat64, 0)
|
||||
if len(item.Hosts) > 0 {
|
||||
field.Name = fmt.Sprintf("%s: %s", item.Hosts[0].Name, item.ExpandItem())
|
||||
} else {
|
||||
field.Name = item.ExpandItem()
|
||||
}
|
||||
frame.Fields = append(frame.Fields, field)
|
||||
}
|
||||
|
||||
for _, point := range history {
|
||||
seriesMap[point.ItemID].Points = append(seriesMap[point.ItemID].Points, &datasource.Point{
|
||||
Timestamp: point.Clock*1000 + int64(math.Round(float64(point.NS)/1000000)),
|
||||
Value: point.Value,
|
||||
})
|
||||
for columnIndex, field := range frame.Fields {
|
||||
if columnIndex == 0 {
|
||||
ts := time.Unix(point.Clock, point.NS)
|
||||
field.Append(ts)
|
||||
} else {
|
||||
item := items[columnIndex-1]
|
||||
if point.ItemID == item.ID {
|
||||
field.Append(&point.Value)
|
||||
} else {
|
||||
field.Append(nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
seriesList := []*datasource.TimeSeries{}
|
||||
for _, series := range seriesMap {
|
||||
seriesList = append(seriesList, series)
|
||||
}
|
||||
return seriesList, nil
|
||||
return frame
|
||||
}
|
||||
|
||||
func parseFilter(filter string) (*regexp.Regexp, error) {
|
||||
|
||||
@@ -70,7 +70,7 @@ func (ds *ZabbixDatasourceInstance) ZabbixRequest(ctx context.Context, method st
|
||||
|
||||
// Skip auth for methods that are not required it
|
||||
if method == "apiinfo.version" {
|
||||
return ds.ZabbixAPIRequest(ctx, method, params, ds.authToken)
|
||||
return ds.ZabbixAPIRequest(ctx, method, params, "")
|
||||
}
|
||||
|
||||
for attempt := 0; attempt <= 3; attempt++ {
|
||||
@@ -171,7 +171,7 @@ func (ds *ZabbixDatasourceInstance) ZabbixAPIRequest(ctx context.Context, method
|
||||
}
|
||||
|
||||
requestTime := time.Now().Sub(tStart)
|
||||
ds.logger.Debug("Response from Zabbix Request", "method", method, "requestTime", requestTime)
|
||||
ds.logger.Debug("Response from Zabbix Request", "method", method, "params", params, "duration", requestTime)
|
||||
|
||||
return handleAPIResult(response)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user