Data queries (alerting)
This commit is contained in:
@@ -3,10 +3,8 @@ package main
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
|
||||||
"os"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
simplejson "github.com/bitly/go-simplejson"
|
simplejson "github.com/bitly/go-simplejson"
|
||||||
"github.com/grafana/grafana_plugin_model/go/datasource"
|
"github.com/grafana/grafana_plugin_model/go/datasource"
|
||||||
@@ -15,6 +13,7 @@ import (
|
|||||||
|
|
||||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ZabbixPlugin implements the Grafana backend interface and forwards queries to the ZabbixDatasourceInstance
|
// ZabbixPlugin implements the Grafana backend interface and forwards queries to the ZabbixDatasourceInstance
|
||||||
@@ -29,27 +28,6 @@ type ZabbixDatasource struct {
|
|||||||
logger log.Logger
|
logger log.Logger
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewDatasource(logger log.Logger, mux *http.ServeMux) *ZabbixDatasource {
|
|
||||||
variableName := "GFX_ZABBIX_DATA_PATH"
|
|
||||||
path, exist := os.LookupEnv(variableName)
|
|
||||||
if !exist {
|
|
||||||
logger.Error("could not read environment variable", variableName)
|
|
||||||
} else {
|
|
||||||
logger.Debug("environment variable for storage found", "variable", variableName, "value", path)
|
|
||||||
}
|
|
||||||
|
|
||||||
ds := &ZabbixDatasource{
|
|
||||||
logger: logger,
|
|
||||||
datasourceCache: NewCache(10*time.Minute, 10*time.Minute),
|
|
||||||
}
|
|
||||||
|
|
||||||
mux.HandleFunc("/", ds.rootHandler)
|
|
||||||
mux.HandleFunc("/zabbix-api", ds.zabbixAPIHandler)
|
|
||||||
// mux.Handle("/scenarios", getScenariosHandler(logger))
|
|
||||||
|
|
||||||
return ds
|
|
||||||
}
|
|
||||||
|
|
||||||
// CheckHealth checks if the plugin is running properly
|
// CheckHealth checks if the plugin is running properly
|
||||||
func (ds *ZabbixDatasource) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {
|
func (ds *ZabbixDatasource) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {
|
||||||
res := &backend.CheckHealthResult{}
|
res := &backend.CheckHealthResult{}
|
||||||
@@ -67,11 +45,34 @@ func (ds *ZabbixDatasource) CheckHealth(ctx context.Context, req *backend.CheckH
|
|||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (gds *ZabbixDatasource) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
|
func (ds *ZabbixDatasource) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
|
||||||
qdr := backend.NewQueryDataResponse()
|
qdr := backend.NewQueryDataResponse()
|
||||||
|
|
||||||
|
zabbixDS, err := ds.GetDatasource(req.PluginContext)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
for _, q := range req.Queries {
|
for _, q := range req.Queries {
|
||||||
res := backend.DataResponse{}
|
res := backend.DataResponse{}
|
||||||
|
query, err := ReadQuery(q)
|
||||||
|
ds.logger.Debug("DS query", "query", q)
|
||||||
|
ds.logger.Debug("DS query parsed", "query", query)
|
||||||
|
if err != nil {
|
||||||
|
res.Error = err
|
||||||
|
} else if len(query.Functions) > 0 {
|
||||||
|
res.Error = errors.New("Zabbix queries with functions are not supported")
|
||||||
|
} else if query.Mode != 0 {
|
||||||
|
res.Error = errors.New("Non-metrics queries are not supported")
|
||||||
|
} else {
|
||||||
|
frame, err := zabbixDS.queryNumericItems(ctx, &query)
|
||||||
|
ds.logger.Debug("DS got frame", "frame", frame)
|
||||||
|
if err != nil {
|
||||||
|
res.Error = err
|
||||||
|
} else {
|
||||||
|
res.Frames = []*data.Frame{frame}
|
||||||
|
}
|
||||||
|
}
|
||||||
qdr.Responses[q.RefID] = res
|
qdr.Responses[q.RefID] = res
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -120,8 +121,9 @@ func (ds *ZabbixDatasource) NewZabbixDatasource(dsInfo *backend.DataSourceInstan
|
|||||||
// }
|
// }
|
||||||
|
|
||||||
// GetDatasource Returns cached datasource or creates new one
|
// GetDatasource Returns cached datasource or creates new one
|
||||||
func (ds *ZabbixDatasource) GetDatasource(orgID int64, dsInfo *backend.DataSourceInstanceSettings) (*ZabbixDatasourceInstance, error) {
|
func (ds *ZabbixDatasource) GetDatasource(pluginContext backend.PluginContext) (*ZabbixDatasourceInstance, error) {
|
||||||
dsInfoHash := HashDatasourceInfo(dsInfo)
|
dsSettings := pluginContext.DataSourceInstanceSettings
|
||||||
|
dsInfoHash := HashDatasourceInfo(dsSettings)
|
||||||
|
|
||||||
if cachedData, ok := ds.datasourceCache.Get(dsInfoHash); ok {
|
if cachedData, ok := ds.datasourceCache.Get(dsInfoHash); ok {
|
||||||
if cachedDS, ok := cachedData.(*ZabbixDatasourceInstance); ok {
|
if cachedDS, ok := cachedData.(*ZabbixDatasourceInstance); ok {
|
||||||
@@ -129,9 +131,9 @@ func (ds *ZabbixDatasource) GetDatasource(orgID int64, dsInfo *backend.DataSourc
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ds.logger.Debug(fmt.Sprintf("Datasource cache miss (Org %d Id %d '%s' %s)", orgID, dsInfo.ID, dsInfo.Name, dsInfoHash))
|
ds.logger.Debug(fmt.Sprintf("Datasource cache miss (Org %d Id %d '%s' %s)", pluginContext.OrgID, dsSettings.ID, dsSettings.Name, dsInfoHash))
|
||||||
|
|
||||||
dsInstance, err := ds.NewZabbixDatasource(dsInfo)
|
dsInstance, err := ds.NewZabbixDatasource(pluginContext.DataSourceInstanceSettings)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,8 +3,80 @@ package main
|
|||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type ZabbixAPIResourceRequest struct {
|
||||||
|
DatasourceId int64 `json:"datasourceId"`
|
||||||
|
Method string `json:"method"`
|
||||||
|
Params map[string]interface{} `json:"params,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ZabbixAPIRequest struct {
|
||||||
|
Method string `json:"method"`
|
||||||
|
Params map[string]interface{} `json:"params,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ZabbixAPIResourceResponse struct {
|
||||||
|
Result interface{} `json:"result,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ZabbixAPIRequest) String() string {
|
||||||
|
jsonRequest, _ := json.Marshal(r.Params)
|
||||||
|
return r.Method + string(jsonRequest)
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryModel model
|
||||||
|
type QueryModel struct {
|
||||||
|
Mode int64 `json:"mode"`
|
||||||
|
Group QueryFilter `json:"group"`
|
||||||
|
Host QueryFilter `json:"host"`
|
||||||
|
Application QueryFilter `json:"application"`
|
||||||
|
Item QueryFilter `json:"item"`
|
||||||
|
Functions []QueryFunction `json:"functions,omitempty"`
|
||||||
|
Options QueryOptions `json:"options"`
|
||||||
|
|
||||||
|
// Direct from the gRPC interfaces
|
||||||
|
TimeRange backend.TimeRange `json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryOptions model
|
||||||
|
type QueryFilter struct {
|
||||||
|
Filter string `json:"filter"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryOptions model
|
||||||
|
type QueryOptions struct {
|
||||||
|
ShowDisabledItems bool `json:"showDisabledItems"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryOptions model
|
||||||
|
type QueryFunction struct {
|
||||||
|
Def QueryFunctionDef `json:"def"`
|
||||||
|
Params []string `json:"params"`
|
||||||
|
Text string `json:"text"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryOptions model
|
||||||
|
type QueryFunctionDef struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Category string `json:"category"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadQuery will read and validate Settings from the DataSourceConfg
|
||||||
|
func ReadQuery(query backend.DataQuery) (QueryModel, error) {
|
||||||
|
model := QueryModel{}
|
||||||
|
if err := json.Unmarshal(query.JSON, &model); err != nil {
|
||||||
|
return model, fmt.Errorf("could not read query: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
model.TimeRange = query.TimeRange
|
||||||
|
return model, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Old models
|
||||||
|
|
||||||
type connectionTestResponse struct {
|
type connectionTestResponse struct {
|
||||||
ZabbixVersion string `json:"zabbixVersion"`
|
ZabbixVersion string `json:"zabbixVersion"`
|
||||||
DbConnectorStatus *dbConnectionStatus `json:"dbConnectorStatus"`
|
DbConnectorStatus *dbConnectionStatus `json:"dbConnectorStatus"`
|
||||||
@@ -103,23 +175,3 @@ type ZabbixAPIParamsLegacy struct {
|
|||||||
TimeFrom int64 `json:"time_from,omitempty"`
|
TimeFrom int64 `json:"time_from,omitempty"`
|
||||||
TimeTill int64 `json:"time_till,omitempty"`
|
TimeTill int64 `json:"time_till,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ZabbixAPIResourceRequest struct {
|
|
||||||
DatasourceId int64 `json:"datasourceId"`
|
|
||||||
Method string `json:"method"`
|
|
||||||
Params map[string]interface{} `json:"params,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ZabbixAPIRequest struct {
|
|
||||||
Method string `json:"method"`
|
|
||||||
Params map[string]interface{} `json:"params,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ZabbixAPIResourceResponse struct {
|
|
||||||
Result interface{} `json:"result,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *ZabbixAPIRequest) String() string {
|
|
||||||
jsonRequest, _ := json.Marshal(r.Params)
|
|
||||||
return r.Method + string(jsonRequest)
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -2,6 +2,8 @@ package main
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||||
@@ -15,7 +17,7 @@ func main() {
|
|||||||
|
|
||||||
pluginLogger := log.New()
|
pluginLogger := log.New()
|
||||||
mux := http.NewServeMux()
|
mux := http.NewServeMux()
|
||||||
ds := NewDatasource(pluginLogger, mux)
|
ds := Init(pluginLogger, mux)
|
||||||
httpResourceHandler := httpadapter.New(mux)
|
httpResourceHandler := httpadapter.New(mux)
|
||||||
|
|
||||||
pluginLogger.Debug("Starting Zabbix backend datasource")
|
pluginLogger.Debug("Starting Zabbix backend datasource")
|
||||||
@@ -28,22 +30,25 @@ func main() {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
pluginLogger.Error(err.Error())
|
pluginLogger.Error(err.Error())
|
||||||
}
|
}
|
||||||
|
}
|
||||||
// plugin.Serve(&plugin.ServeConfig{
|
|
||||||
|
func Init(logger log.Logger, mux *http.ServeMux) *ZabbixDatasource {
|
||||||
// HandshakeConfig: plugin.HandshakeConfig{
|
variableName := "GFX_ZABBIX_DATA_PATH"
|
||||||
// ProtocolVersion: 1,
|
path, exist := os.LookupEnv(variableName)
|
||||||
// MagicCookieKey: "grafana_plugin_type",
|
if !exist {
|
||||||
// MagicCookieValue: "datasource",
|
logger.Error("could not read environment variable", variableName)
|
||||||
// },
|
} else {
|
||||||
// Plugins: map[string]plugin.Plugin{
|
logger.Debug("environment variable for storage found", "variable", variableName, "value", path)
|
||||||
// "zabbix-backend-datasource": &datasource.DatasourcePluginImpl{Plugin: &ZabbixPlugin{
|
}
|
||||||
// datasourceCache: NewCache(10*time.Minute, 10*time.Minute),
|
|
||||||
// logger: pluginLogger,
|
ds := &ZabbixDatasource{
|
||||||
// }},
|
logger: logger,
|
||||||
// },
|
datasourceCache: NewCache(10*time.Minute, 10*time.Minute),
|
||||||
|
}
|
||||||
// // A non-nil value here enables gRPC serving for this plugin...
|
|
||||||
// GRPCServer: plugin.DefaultGRPCServer,
|
mux.HandleFunc("/", ds.rootHandler)
|
||||||
// })
|
mux.HandleFunc("/zabbix-api", ds.zabbixAPIHandler)
|
||||||
|
// mux.Handle("/scenarios", getScenariosHandler(logger))
|
||||||
|
|
||||||
|
return ds
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ func (ds *ZabbixDatasource) zabbixAPIHandler(rw http.ResponseWriter, req *http.R
|
|||||||
|
|
||||||
pluginCxt := httpadapter.PluginConfigFromContext(req.Context())
|
pluginCxt := httpadapter.PluginConfigFromContext(req.Context())
|
||||||
|
|
||||||
dsInstance, err := ds.GetDatasource(pluginCxt.OrgID, pluginCxt.DataSourceInstanceSettings)
|
dsInstance, err := ds.GetDatasource(pluginCxt)
|
||||||
ds.logger.Debug("Data source found", "ds", dsInstance.dsInfo.Name)
|
ds.logger.Debug("Data source found", "ds", dsInstance.dsInfo.Name)
|
||||||
|
|
||||||
ds.logger.Debug("Invoke Zabbix API call", "ds", pluginCxt.DataSourceInstanceSettings.Name, "method", reqData.Method)
|
ds.logger.Debug("Invoke Zabbix API call", "ds", pluginCxt.DataSourceInstanceSettings.Name, "method", reqData.Method)
|
||||||
|
|||||||
@@ -1,6 +1,12 @@
|
|||||||
package zabbix
|
package zabbix
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
type Items []Item
|
type Items []Item
|
||||||
|
|
||||||
type Item struct {
|
type Item struct {
|
||||||
ID string `json:"itemid,omitempty"`
|
ID string `json:"itemid,omitempty"`
|
||||||
Key string `json:"key_,omitempty"`
|
Key string `json:"key_,omitempty"`
|
||||||
@@ -11,12 +17,65 @@ type Item struct {
|
|||||||
Status string `json:"status,omitempty"`
|
Status string `json:"status,omitempty"`
|
||||||
State string `json:"state,omitempty"`
|
State string `json:"state,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (item *Item) ExpandItem() string {
|
||||||
|
name := item.Name
|
||||||
|
key := item.Key
|
||||||
|
|
||||||
|
if strings.Index(key, "[") == -1 {
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
|
||||||
|
keyRunes := []rune(item.Key)
|
||||||
|
keyParamsStr := string(keyRunes[strings.Index(key, "[")+1 : strings.LastIndex(key, "]")])
|
||||||
|
keyParams := splitKeyParams(keyParamsStr)
|
||||||
|
|
||||||
|
for i := len(keyParams); i >= 1; i-- {
|
||||||
|
name = strings.ReplaceAll(name, fmt.Sprintf("$%v", i), keyParams[i-1])
|
||||||
|
}
|
||||||
|
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
|
||||||
|
func splitKeyParams(paramStr string) []string {
|
||||||
|
paramRunes := []rune(paramStr)
|
||||||
|
params := []string{}
|
||||||
|
quoted := false
|
||||||
|
inArray := false
|
||||||
|
splitSymbol := ","
|
||||||
|
param := ""
|
||||||
|
|
||||||
|
for _, r := range paramRunes {
|
||||||
|
symbol := string(r)
|
||||||
|
if symbol == `"` && inArray {
|
||||||
|
param += symbol
|
||||||
|
} else if symbol == `"` && quoted {
|
||||||
|
quoted = false
|
||||||
|
} else if symbol == `"` && !quoted {
|
||||||
|
quoted = true
|
||||||
|
} else if symbol == "[" && !quoted {
|
||||||
|
inArray = true
|
||||||
|
} else if symbol == "]" && !quoted {
|
||||||
|
inArray = false
|
||||||
|
} else if symbol == splitSymbol && !quoted && !inArray {
|
||||||
|
params = append(params, param)
|
||||||
|
param = ""
|
||||||
|
} else {
|
||||||
|
param += symbol
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
params = append(params, param)
|
||||||
|
return params
|
||||||
|
}
|
||||||
|
|
||||||
type ItemHost struct {
|
type ItemHost struct {
|
||||||
ID string `json:"hostid,omitempty"`
|
ID string `json:"hostid,omitempty"`
|
||||||
Name string `json:"name,omitempty"`
|
Name string `json:"name,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Trend []TrendPoint
|
type Trend []TrendPoint
|
||||||
|
|
||||||
type TrendPoint struct {
|
type TrendPoint struct {
|
||||||
ItemID string `json:"itemid,omitempty"`
|
ItemID string `json:"itemid,omitempty"`
|
||||||
Clock int64 `json:"clock,omitempty,string"`
|
Clock int64 `json:"clock,omitempty,string"`
|
||||||
@@ -27,6 +86,7 @@ type TrendPoint struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type History []HistoryPoint
|
type History []HistoryPoint
|
||||||
|
|
||||||
type HistoryPoint struct {
|
type HistoryPoint struct {
|
||||||
ItemID string `json:"itemid,omitempty"`
|
ItemID string `json:"itemid,omitempty"`
|
||||||
Clock int64 `json:"clock,omitempty,string"`
|
Clock int64 `json:"clock,omitempty,string"`
|
||||||
|
|||||||
@@ -4,12 +4,13 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
|
||||||
"regexp"
|
"regexp"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbix"
|
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbix"
|
||||||
simplejson "github.com/bitly/go-simplejson"
|
simplejson "github.com/bitly/go-simplejson"
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||||
"github.com/grafana/grafana_plugin_model/go/datasource"
|
"github.com/grafana/grafana_plugin_model/go/datasource"
|
||||||
"golang.org/x/net/context"
|
"golang.org/x/net/context"
|
||||||
)
|
)
|
||||||
@@ -99,28 +100,13 @@ func (ds *ZabbixDatasourceInstance) TestConnection(ctx context.Context, tsdbReq
|
|||||||
return BuildResponse(testResponse)
|
return BuildResponse(testResponse)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ds *ZabbixDatasourceInstance) queryNumericItems(ctx context.Context, tsdbReq *datasource.DatasourceRequest) (*datasource.DatasourceResponse, error) {
|
func (ds *ZabbixDatasourceInstance) queryNumericItems(ctx context.Context, query *QueryModel) (*data.Frame, error) {
|
||||||
tStart := time.Now()
|
tStart := time.Now()
|
||||||
jsonQueries := make([]*simplejson.Json, 0)
|
|
||||||
for _, query := range tsdbReq.Queries {
|
|
||||||
json, err := simplejson.NewJson([]byte(query.ModelJson))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
jsonQueries = append(jsonQueries, json)
|
groupFilter := query.Group.Filter
|
||||||
}
|
hostFilter := query.Host.Filter
|
||||||
|
appFilter := query.Application.Filter
|
||||||
if len(jsonQueries) == 0 {
|
itemFilter := query.Item.Filter
|
||||||
return nil, errors.New("At least one query should be provided")
|
|
||||||
}
|
|
||||||
|
|
||||||
firstQuery := jsonQueries[0]
|
|
||||||
|
|
||||||
groupFilter := firstQuery.GetPath("group", "filter").MustString()
|
|
||||||
hostFilter := firstQuery.GetPath("host", "filter").MustString()
|
|
||||||
appFilter := firstQuery.GetPath("application", "filter").MustString()
|
|
||||||
itemFilter := firstQuery.GetPath("item", "filter").MustString()
|
|
||||||
|
|
||||||
ds.logger.Debug("queryNumericItems",
|
ds.logger.Debug("queryNumericItems",
|
||||||
"func", "ds.getItems",
|
"func", "ds.getItems",
|
||||||
@@ -128,25 +114,25 @@ func (ds *ZabbixDatasourceInstance) queryNumericItems(ctx context.Context, tsdbR
|
|||||||
"hostFilter", hostFilter,
|
"hostFilter", hostFilter,
|
||||||
"appFilter", appFilter,
|
"appFilter", appFilter,
|
||||||
"itemFilter", itemFilter)
|
"itemFilter", itemFilter)
|
||||||
items, err := ds.getItems(ctx, tsdbReq.GetDatasource(), groupFilter, hostFilter, appFilter, itemFilter, "num")
|
items, err := ds.getItems(ctx, groupFilter, hostFilter, appFilter, itemFilter, "num")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
ds.logger.Debug("queryNumericItems", "finished", "ds.getItems", "timeElapsed", time.Now().Sub(tStart))
|
ds.logger.Debug("queryNumericItems", "finished", "ds.getItems", "timeElapsed", time.Now().Sub(tStart))
|
||||||
|
|
||||||
metrics, err := ds.queryNumericDataForItems(ctx, tsdbReq, items, jsonQueries, isUseTrend(tsdbReq.GetTimeRange()))
|
frames, err := ds.queryNumericDataForItems(ctx, query, items)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
ds.logger.Debug("queryNumericItems", "finished", "queryNumericDataForItems", "timeElapsed", time.Now().Sub(tStart))
|
ds.logger.Debug("queryNumericItems", "finished", "queryNumericDataForItems", "timeElapsed", time.Now().Sub(tStart))
|
||||||
|
|
||||||
return BuildMetricsResponse(metrics)
|
return frames, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ds *ZabbixDatasourceInstance) getItems(ctx context.Context, dsInfo *datasource.DatasourceInfo, groupFilter string, hostFilter string, appFilter string, itemFilter string, itemType string) (zabbix.Items, error) {
|
func (ds *ZabbixDatasourceInstance) getItems(ctx context.Context, groupFilter string, hostFilter string, appFilter string, itemFilter string, itemType string) (zabbix.Items, error) {
|
||||||
tStart := time.Now()
|
tStart := time.Now()
|
||||||
|
|
||||||
hosts, err := ds.getHosts(ctx, dsInfo, groupFilter, hostFilter)
|
hosts, err := ds.getHosts(ctx, groupFilter, hostFilter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -156,7 +142,7 @@ func (ds *ZabbixDatasourceInstance) getItems(ctx context.Context, dsInfo *dataso
|
|||||||
}
|
}
|
||||||
ds.logger.Debug("getItems", "finished", "getHosts", "timeElapsed", time.Now().Sub(tStart))
|
ds.logger.Debug("getItems", "finished", "getHosts", "timeElapsed", time.Now().Sub(tStart))
|
||||||
|
|
||||||
apps, err := ds.getApps(ctx, dsInfo, groupFilter, hostFilter, appFilter)
|
apps, err := ds.getApps(ctx, groupFilter, hostFilter, appFilter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -169,10 +155,10 @@ func (ds *ZabbixDatasourceInstance) getItems(ctx context.Context, dsInfo *dataso
|
|||||||
var allItems *simplejson.Json
|
var allItems *simplejson.Json
|
||||||
if len(hostids) > 0 {
|
if len(hostids) > 0 {
|
||||||
ds.logger.Debug("getAllItems", "with", "hostFilter")
|
ds.logger.Debug("getAllItems", "with", "hostFilter")
|
||||||
allItems, err = ds.getAllItems(ctx, dsInfo, hostids, nil, itemType)
|
allItems, err = ds.getAllItems(ctx, hostids, nil, itemType)
|
||||||
} else if len(appids) > 0 {
|
} else if len(appids) > 0 {
|
||||||
ds.logger.Debug("getAllItems", "with", "appFilter")
|
ds.logger.Debug("getAllItems", "with", "appFilter")
|
||||||
allItems, err = ds.getAllItems(ctx, dsInfo, nil, appids, itemType)
|
allItems, err = ds.getAllItems(ctx, nil, appids, itemType)
|
||||||
}
|
}
|
||||||
ds.logger.Debug("getItems", "finished", "getAllItems", "timeElapsed", time.Now().Sub(tStart))
|
ds.logger.Debug("getItems", "finished", "getAllItems", "timeElapsed", time.Now().Sub(tStart))
|
||||||
|
|
||||||
@@ -199,12 +185,13 @@ func (ds *ZabbixDatasourceInstance) getItems(ctx context.Context, dsInfo *dataso
|
|||||||
|
|
||||||
filteredItems := zabbix.Items{}
|
filteredItems := zabbix.Items{}
|
||||||
for _, item := range items {
|
for _, item := range items {
|
||||||
|
itemName := item.ExpandItem()
|
||||||
if item.Status == "0" {
|
if item.Status == "0" {
|
||||||
if re != nil {
|
if re != nil {
|
||||||
if re.MatchString(item.Name) {
|
if re.MatchString(itemName) {
|
||||||
filteredItems = append(filteredItems, item)
|
filteredItems = append(filteredItems, item)
|
||||||
}
|
}
|
||||||
} else if item.Name == itemFilter {
|
} else if itemName == itemFilter {
|
||||||
filteredItems = append(filteredItems, item)
|
filteredItems = append(filteredItems, item)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -214,8 +201,8 @@ func (ds *ZabbixDatasourceInstance) getItems(ctx context.Context, dsInfo *dataso
|
|||||||
return filteredItems, nil
|
return filteredItems, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ds *ZabbixDatasourceInstance) getApps(ctx context.Context, dsInfo *datasource.DatasourceInfo, groupFilter string, hostFilter string, appFilter string) ([]map[string]interface{}, error) {
|
func (ds *ZabbixDatasourceInstance) getApps(ctx context.Context, groupFilter string, hostFilter string, appFilter string) ([]map[string]interface{}, error) {
|
||||||
hosts, err := ds.getHosts(ctx, dsInfo, groupFilter, hostFilter)
|
hosts, err := ds.getHosts(ctx, groupFilter, hostFilter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -223,7 +210,7 @@ func (ds *ZabbixDatasourceInstance) getApps(ctx context.Context, dsInfo *datasou
|
|||||||
for _, k := range hosts {
|
for _, k := range hosts {
|
||||||
hostids = append(hostids, k["hostid"].(string))
|
hostids = append(hostids, k["hostid"].(string))
|
||||||
}
|
}
|
||||||
allApps, err := ds.getAllApps(ctx, dsInfo, hostids)
|
allApps, err := ds.getAllApps(ctx, hostids)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -248,8 +235,8 @@ func (ds *ZabbixDatasourceInstance) getApps(ctx context.Context, dsInfo *datasou
|
|||||||
return apps, nil
|
return apps, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ds *ZabbixDatasourceInstance) getHosts(ctx context.Context, dsInfo *datasource.DatasourceInfo, groupFilter string, hostFilter string) ([]map[string]interface{}, error) {
|
func (ds *ZabbixDatasourceInstance) getHosts(ctx context.Context, groupFilter string, hostFilter string) ([]map[string]interface{}, error) {
|
||||||
groups, err := ds.getGroups(ctx, dsInfo, groupFilter)
|
groups, err := ds.getGroups(ctx, groupFilter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -257,7 +244,7 @@ func (ds *ZabbixDatasourceInstance) getHosts(ctx context.Context, dsInfo *dataso
|
|||||||
for _, k := range groups {
|
for _, k := range groups {
|
||||||
groupids = append(groupids, k["groupid"].(string))
|
groupids = append(groupids, k["groupid"].(string))
|
||||||
}
|
}
|
||||||
allHosts, err := ds.getAllHosts(ctx, dsInfo, groupids)
|
allHosts, err := ds.getAllHosts(ctx, groupids)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -283,8 +270,8 @@ func (ds *ZabbixDatasourceInstance) getHosts(ctx context.Context, dsInfo *dataso
|
|||||||
return hosts, nil
|
return hosts, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ds *ZabbixDatasourceInstance) getGroups(ctx context.Context, dsInfo *datasource.DatasourceInfo, groupFilter string) ([]map[string]interface{}, error) {
|
func (ds *ZabbixDatasourceInstance) getGroups(ctx context.Context, groupFilter string) ([]map[string]interface{}, error) {
|
||||||
allGroups, err := ds.getAllGroups(ctx, dsInfo)
|
allGroups, err := ds.getAllGroups(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -307,15 +294,15 @@ func (ds *ZabbixDatasourceInstance) getGroups(ctx context.Context, dsInfo *datas
|
|||||||
return groups, nil
|
return groups, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ds *ZabbixDatasourceInstance) getAllItems(ctx context.Context, dsInfo *datasource.DatasourceInfo, hostids []string, appids []string, itemtype string) (*simplejson.Json, error) {
|
func (ds *ZabbixDatasourceInstance) getAllItems(ctx context.Context, hostids []string, appids []string, itemtype string) (*simplejson.Json, error) {
|
||||||
params := ZabbixAPIParams{
|
params := ZabbixAPIParams{
|
||||||
"output": &zabbixParamOutput{Fields: []string{"itemid", "name", "key_", "value_type", "hostid", "status", "state"}},
|
"output": []string{"itemid", "name", "key_", "value_type", "hostid", "status", "state"},
|
||||||
"sortField": "name",
|
"sortfield": "name",
|
||||||
"webItems": true,
|
"webitems": true,
|
||||||
"filter": map[string]interface{}{},
|
"filter": map[string]interface{}{},
|
||||||
"selectHosts": []string{"hostid", "name"},
|
"selectHosts": []string{"hostid", "name"},
|
||||||
"hostIDs": hostids,
|
"hostids": hostids,
|
||||||
"appIDs": appids,
|
"applicationids": appids,
|
||||||
}
|
}
|
||||||
|
|
||||||
filter := params["filter"].(map[string]interface{})
|
filter := params["filter"].(map[string]interface{})
|
||||||
@@ -328,81 +315,80 @@ func (ds *ZabbixDatasourceInstance) getAllItems(ctx context.Context, dsInfo *dat
|
|||||||
return ds.ZabbixRequest(ctx, "item.get", params)
|
return ds.ZabbixRequest(ctx, "item.get", params)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ds *ZabbixDatasourceInstance) getAllApps(ctx context.Context, dsInfo *datasource.DatasourceInfo, hostids []string) (*simplejson.Json, error) {
|
func (ds *ZabbixDatasourceInstance) getAllApps(ctx context.Context, hostids []string) (*simplejson.Json, error) {
|
||||||
params := ZabbixAPIParams{"output": &zabbixParamOutput{Mode: "extend"}, "hostIDs": hostids}
|
params := ZabbixAPIParams{
|
||||||
|
"output": "extend",
|
||||||
|
"hostids": hostids,
|
||||||
|
}
|
||||||
|
|
||||||
return ds.ZabbixRequest(ctx, "application.get", params)
|
return ds.ZabbixRequest(ctx, "application.get", params)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ds *ZabbixDatasourceInstance) getAllHosts(ctx context.Context, dsInfo *datasource.DatasourceInfo, groupids []string) (*simplejson.Json, error) {
|
func (ds *ZabbixDatasourceInstance) getAllHosts(ctx context.Context, groupids []string) (*simplejson.Json, error) {
|
||||||
params := ZabbixAPIParams{"output": &zabbixParamOutput{Fields: []string{"name", "host"}}, "sortField": "name", "groupIDs": groupids}
|
params := ZabbixAPIParams{
|
||||||
|
"output": []string{"name", "host"},
|
||||||
|
"sortfield": "name",
|
||||||
|
"groupids": groupids,
|
||||||
|
}
|
||||||
|
|
||||||
return ds.ZabbixRequest(ctx, "host.get", params)
|
return ds.ZabbixRequest(ctx, "host.get", params)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ds *ZabbixDatasourceInstance) getAllGroups(ctx context.Context, dsInfo *datasource.DatasourceInfo) (*simplejson.Json, error) {
|
func (ds *ZabbixDatasourceInstance) getAllGroups(ctx context.Context) (*simplejson.Json, error) {
|
||||||
params := ZabbixAPIParams{"output": &zabbixParamOutput{Fields: []string{"name"}}, "sortField": "name", "realHosts": true}
|
params := ZabbixAPIParams{
|
||||||
|
"output": []string{"name"},
|
||||||
|
"sortfield": "name",
|
||||||
|
"real_hosts": true,
|
||||||
|
}
|
||||||
|
|
||||||
return ds.ZabbixRequest(ctx, "hostgroup.get", params)
|
return ds.ZabbixRequest(ctx, "hostgroup.get", params)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ds *ZabbixDatasourceInstance) queryNumericDataForItems(ctx context.Context, tsdbReq *datasource.DatasourceRequest, items zabbix.Items, jsonQueries []*simplejson.Json, useTrend bool) ([]*datasource.TimeSeries, error) {
|
func (ds *ZabbixDatasourceInstance) queryNumericDataForItems(ctx context.Context, query *QueryModel, items zabbix.Items) (*data.Frame, error) {
|
||||||
valueType := ds.getTrendValueType(jsonQueries)
|
valueType := ds.getTrendValueType(query)
|
||||||
consolidateBy := ds.getConsolidateBy(jsonQueries)
|
consolidateBy := ds.getConsolidateBy(query)
|
||||||
|
|
||||||
if consolidateBy == "" {
|
if consolidateBy == "" {
|
||||||
consolidateBy = valueType
|
consolidateBy = valueType
|
||||||
}
|
}
|
||||||
|
|
||||||
history, err := ds.getHistotyOrTrend(ctx, tsdbReq, items, useTrend)
|
history, err := ds.getHistotyOrTrend(ctx, query, items)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return convertHistory(history, items)
|
ds.logger.Debug("Got history", "len", len(history), "items", len(items))
|
||||||
|
return convertHistory(history, items), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ds *ZabbixDatasourceInstance) getTrendValueType(jsonQueries []*simplejson.Json) string {
|
func (ds *ZabbixDatasourceInstance) getTrendValueType(query *QueryModel) string {
|
||||||
var trendFunctions []string
|
trendValue := "avg"
|
||||||
var trendValueFunc string
|
|
||||||
|
|
||||||
// TODO: loop over populated categories
|
for _, fn := range query.Functions {
|
||||||
for _, j := range new(FunctionCategories).Trends {
|
if fn.Def.Name == "trendValue" && len(fn.Params) > 0 {
|
||||||
trendFunctions = append(trendFunctions, j["name"].(string))
|
trendValue = fn.Params[0]
|
||||||
}
|
|
||||||
for _, k := range jsonQueries[0].Get("functions").MustArray() {
|
|
||||||
for _, j := range trendFunctions {
|
|
||||||
if j == k.(map[string]interface{})["def"].(map[string]interface{})["name"] {
|
|
||||||
trendValueFunc = j
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if trendValueFunc == "" {
|
return trendValue
|
||||||
trendValueFunc = "avg"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return trendValueFunc
|
func (ds *ZabbixDatasourceInstance) getConsolidateBy(query *QueryModel) string {
|
||||||
}
|
consolidateBy := "avg"
|
||||||
|
|
||||||
func (ds *ZabbixDatasourceInstance) getConsolidateBy(jsonQueries []*simplejson.Json) string {
|
for _, fn := range query.Functions {
|
||||||
var consolidateBy string
|
if fn.Def.Name == "consolidateBy" && len(fn.Params) > 0 {
|
||||||
|
consolidateBy = fn.Params[0]
|
||||||
for _, k := range jsonQueries[0].Get("functions").MustArray() {
|
|
||||||
if k.(map[string]interface{})["def"].(map[string]interface{})["name"] == "consolidateBy" {
|
|
||||||
defParams := k.(map[string]interface{})["def"].(map[string]interface{})["params"].([]interface{})
|
|
||||||
if len(defParams) > 0 {
|
|
||||||
consolidateBy = defParams[0].(string)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return consolidateBy
|
return consolidateBy
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ds *ZabbixDatasourceInstance) getHistotyOrTrend(ctx context.Context, tsdbReq *datasource.DatasourceRequest, items zabbix.Items, useTrend bool) (zabbix.History, error) {
|
func (ds *ZabbixDatasourceInstance) getHistotyOrTrend(ctx context.Context, query *QueryModel, items zabbix.Items) (zabbix.History, error) {
|
||||||
|
timeRange := query.TimeRange
|
||||||
|
useTrend := isUseTrend(timeRange)
|
||||||
allHistory := zabbix.History{}
|
allHistory := zabbix.History{}
|
||||||
|
|
||||||
timeRange := tsdbReq.GetTimeRange()
|
|
||||||
groupedItems := map[int]zabbix.Items{}
|
groupedItems := map[int]zabbix.Items{}
|
||||||
|
|
||||||
for _, j := range items {
|
for _, j := range items {
|
||||||
@@ -416,12 +402,12 @@ func (ds *ZabbixDatasourceInstance) getHistotyOrTrend(ctx context.Context, tsdbR
|
|||||||
}
|
}
|
||||||
|
|
||||||
params := ZabbixAPIParams{
|
params := ZabbixAPIParams{
|
||||||
"output": &zabbixParamOutput{Mode: "extend"},
|
"output": "extend",
|
||||||
"sortField": "clock",
|
"sortfield": "clock",
|
||||||
"sortOrder": "ASC",
|
"sortorder": "ASC",
|
||||||
"itemIDs": itemids,
|
"itemids": itemids,
|
||||||
"timeFrom": timeRange.GetFromEpochMs() / 1000,
|
"time_from": timeRange.From.Unix(),
|
||||||
"timeTill": timeRange.GetToEpochMs() / 1000,
|
"time_till": timeRange.To.Unix(),
|
||||||
}
|
}
|
||||||
|
|
||||||
var response *simplejson.Json
|
var response *simplejson.Json
|
||||||
@@ -453,9 +439,9 @@ func (ds *ZabbixDatasourceInstance) getHistotyOrTrend(ctx context.Context, tsdbR
|
|||||||
return allHistory, nil
|
return allHistory, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func isUseTrend(timeRange *datasource.TimeRange) bool {
|
func isUseTrend(timeRange backend.TimeRange) bool {
|
||||||
fromSec := timeRange.GetFromEpochMs() / 1000
|
fromSec := timeRange.From.Unix()
|
||||||
toSec := timeRange.GetToEpochMs() / 1000
|
toSec := timeRange.To.Unix()
|
||||||
if (fromSec < time.Now().Add(time.Hour*-7*24).Unix()) ||
|
if (fromSec < time.Now().Add(time.Hour*-7*24).Unix()) ||
|
||||||
(toSec-fromSec > (4 * 24 * time.Hour).Milliseconds()) {
|
(toSec-fromSec > (4 * 24 * time.Hour).Milliseconds()) {
|
||||||
return true
|
return true
|
||||||
@@ -463,28 +449,38 @@ func isUseTrend(timeRange *datasource.TimeRange) bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func convertHistory(history zabbix.History, items zabbix.Items) ([]*datasource.TimeSeries, error) {
|
func convertHistory(history zabbix.History, items zabbix.Items) *data.Frame {
|
||||||
seriesMap := map[string]*datasource.TimeSeries{}
|
timeFileld := data.NewFieldFromFieldType(data.FieldTypeTime, 0)
|
||||||
|
timeFileld.Name = "time"
|
||||||
|
frame := data.NewFrame("History", timeFileld)
|
||||||
|
|
||||||
for _, item := range items {
|
for _, item := range items {
|
||||||
seriesMap[item.ID] = &datasource.TimeSeries{
|
field := data.NewFieldFromFieldType(data.FieldTypeNullableFloat64, 0)
|
||||||
Name: fmt.Sprintf("%s %s", item.Hosts[0].Name, item.Name),
|
if len(item.Hosts) > 0 {
|
||||||
Points: []*datasource.Point{},
|
field.Name = fmt.Sprintf("%s: %s", item.Hosts[0].Name, item.ExpandItem())
|
||||||
|
} else {
|
||||||
|
field.Name = item.ExpandItem()
|
||||||
}
|
}
|
||||||
|
frame.Fields = append(frame.Fields, field)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, point := range history {
|
for _, point := range history {
|
||||||
seriesMap[point.ItemID].Points = append(seriesMap[point.ItemID].Points, &datasource.Point{
|
for columnIndex, field := range frame.Fields {
|
||||||
Timestamp: point.Clock*1000 + int64(math.Round(float64(point.NS)/1000000)),
|
if columnIndex == 0 {
|
||||||
Value: point.Value,
|
ts := time.Unix(point.Clock, point.NS)
|
||||||
})
|
field.Append(ts)
|
||||||
|
} else {
|
||||||
|
item := items[columnIndex-1]
|
||||||
|
if point.ItemID == item.ID {
|
||||||
|
field.Append(&point.Value)
|
||||||
|
} else {
|
||||||
|
field.Append(nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
seriesList := []*datasource.TimeSeries{}
|
return frame
|
||||||
for _, series := range seriesMap {
|
|
||||||
seriesList = append(seriesList, series)
|
|
||||||
}
|
|
||||||
return seriesList, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseFilter(filter string) (*regexp.Regexp, error) {
|
func parseFilter(filter string) (*regexp.Regexp, error) {
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ func (ds *ZabbixDatasourceInstance) ZabbixRequest(ctx context.Context, method st
|
|||||||
|
|
||||||
// Skip auth for methods that are not required it
|
// Skip auth for methods that are not required it
|
||||||
if method == "apiinfo.version" {
|
if method == "apiinfo.version" {
|
||||||
return ds.ZabbixAPIRequest(ctx, method, params, ds.authToken)
|
return ds.ZabbixAPIRequest(ctx, method, params, "")
|
||||||
}
|
}
|
||||||
|
|
||||||
for attempt := 0; attempt <= 3; attempt++ {
|
for attempt := 0; attempt <= 3; attempt++ {
|
||||||
@@ -171,7 +171,7 @@ func (ds *ZabbixDatasourceInstance) ZabbixAPIRequest(ctx context.Context, method
|
|||||||
}
|
}
|
||||||
|
|
||||||
requestTime := time.Now().Sub(tStart)
|
requestTime := time.Now().Sub(tStart)
|
||||||
ds.logger.Debug("Response from Zabbix Request", "method", method, "requestTime", requestTime)
|
ds.logger.Debug("Response from Zabbix Request", "method", method, "params", params, "duration", requestTime)
|
||||||
|
|
||||||
return handleAPIResult(response)
|
return handleAPIResult(response)
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user