reorganize packages and add tests for zabbix datasource instance methods
This commit is contained in:
188
pkg/datasource/datasource.go
Normal file
188
pkg/datasource/datasource.go
Normal file
@@ -0,0 +1,188 @@
|
||||
package datasource
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/cache"
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/gtime"
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbixapi"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
)
|
||||
|
||||
type ZabbixDatasource struct {
|
||||
datasourceCache *cache.Cache
|
||||
logger log.Logger
|
||||
}
|
||||
|
||||
// ZabbixDatasourceInstance stores state about a specific datasource
|
||||
// and provides methods to make requests to the Zabbix API
|
||||
type ZabbixDatasourceInstance struct {
|
||||
zabbixAPI *zabbixapi.ZabbixAPI
|
||||
dsInfo *backend.DataSourceInstanceSettings
|
||||
Settings *ZabbixDatasourceSettings
|
||||
queryCache *cache.Cache
|
||||
logger log.Logger
|
||||
}
|
||||
|
||||
func NewZabbixDatasource() *ZabbixDatasource {
|
||||
return &ZabbixDatasource{
|
||||
datasourceCache: cache.NewCache(10*time.Minute, 10*time.Minute),
|
||||
logger: log.New(),
|
||||
}
|
||||
}
|
||||
|
||||
// NewZabbixDatasourceInstance returns an initialized zabbix datasource instance
|
||||
func NewZabbixDatasourceInstance(dsInfo *backend.DataSourceInstanceSettings) (*ZabbixDatasourceInstance, error) {
|
||||
zabbixAPI, err := zabbixapi.New(dsInfo.URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zabbixSettings, err := readZabbixSettings(dsInfo)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &ZabbixDatasourceInstance{
|
||||
dsInfo: dsInfo,
|
||||
zabbixAPI: zabbixAPI,
|
||||
Settings: zabbixSettings,
|
||||
queryCache: cache.NewCache(zabbixSettings.CacheTTL, 10*time.Minute),
|
||||
logger: log.New(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// CheckHealth checks if the plugin is running properly
|
||||
func (ds *ZabbixDatasource) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {
|
||||
res := &backend.CheckHealthResult{}
|
||||
|
||||
dsInstance, err := ds.GetDatasource(req.PluginContext)
|
||||
if err != nil {
|
||||
res.Status = backend.HealthStatusError
|
||||
res.Message = "Error getting datasource instance"
|
||||
ds.logger.Error("Error getting datasource instance", "err", err)
|
||||
return res, nil
|
||||
}
|
||||
|
||||
message, err := dsInstance.TestConnection(ctx)
|
||||
if err != nil {
|
||||
res.Status = backend.HealthStatusError
|
||||
res.Message = err.Error()
|
||||
ds.logger.Error("Error connecting zabbix", "err", err)
|
||||
return res, nil
|
||||
}
|
||||
|
||||
res.Status = backend.HealthStatusOk
|
||||
res.Message = message
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasource) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
|
||||
qdr := backend.NewQueryDataResponse()
|
||||
|
||||
zabbixDS, err := ds.GetDatasource(req.PluginContext)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, q := range req.Queries {
|
||||
res := backend.DataResponse{}
|
||||
query, err := ReadQuery(q)
|
||||
ds.logger.Debug("DS query", "query", q)
|
||||
if err != nil {
|
||||
res.Error = err
|
||||
} else if len(query.Functions) > 0 {
|
||||
res.Error = errors.New("Zabbix queries with functions are not supported")
|
||||
} else if query.Mode != 0 {
|
||||
res.Error = errors.New("Non-metrics queries are not supported")
|
||||
} else {
|
||||
frame, err := zabbixDS.queryNumericItems(ctx, &query)
|
||||
if err != nil {
|
||||
res.Error = err
|
||||
} else {
|
||||
res.Frames = []*data.Frame{frame}
|
||||
}
|
||||
}
|
||||
qdr.Responses[q.RefID] = res
|
||||
}
|
||||
|
||||
return qdr, nil
|
||||
}
|
||||
|
||||
// GetDatasource Returns cached datasource or creates new one
|
||||
func (ds *ZabbixDatasource) GetDatasource(pluginContext backend.PluginContext) (*ZabbixDatasourceInstance, error) {
|
||||
dsSettings := pluginContext.DataSourceInstanceSettings
|
||||
dsKey := fmt.Sprintf("%d-%d", pluginContext.OrgID, dsSettings.ID)
|
||||
// Get hash to check if settings changed
|
||||
dsInfoHash := cache.HashDatasourceInfo(dsSettings)
|
||||
|
||||
if cachedData, ok := ds.datasourceCache.Get(dsKey); ok {
|
||||
if cachedDS, ok := cachedData.(*ZabbixDatasourceInstance); ok {
|
||||
cachedDSHash := cache.HashDatasourceInfo(cachedDS.dsInfo)
|
||||
if cachedDSHash == dsInfoHash {
|
||||
return cachedDS, nil
|
||||
}
|
||||
ds.logger.Debug("Data source settings changed", "org", pluginContext.OrgID, "id", dsSettings.ID, "name", dsSettings.Name)
|
||||
}
|
||||
}
|
||||
|
||||
ds.logger.Debug("Initializing data source", "org", pluginContext.OrgID, "id", dsSettings.ID, "name", dsSettings.Name)
|
||||
dsInstance, err := NewZabbixDatasourceInstance(pluginContext.DataSourceInstanceSettings)
|
||||
if err != nil {
|
||||
ds.logger.Error("Error initializing datasource", "error", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ds.datasourceCache.Set(dsKey, dsInstance)
|
||||
return dsInstance, nil
|
||||
}
|
||||
|
||||
func readZabbixSettings(dsInstanceSettings *backend.DataSourceInstanceSettings) (*ZabbixDatasourceSettings, error) {
|
||||
zabbixSettingsDTO := &ZabbixDatasourceSettingsDTO{}
|
||||
|
||||
err := json.Unmarshal(dsInstanceSettings.JSONData, &zabbixSettingsDTO)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if zabbixSettingsDTO.TrendsFrom == "" {
|
||||
zabbixSettingsDTO.TrendsFrom = "7d"
|
||||
}
|
||||
if zabbixSettingsDTO.TrendsRange == "" {
|
||||
zabbixSettingsDTO.TrendsRange = "4d"
|
||||
}
|
||||
if zabbixSettingsDTO.CacheTTL == "" {
|
||||
zabbixSettingsDTO.CacheTTL = "1h"
|
||||
}
|
||||
|
||||
trendsFrom, err := gtime.ParseInterval(zabbixSettingsDTO.TrendsFrom)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
trendsRange, err := gtime.ParseInterval(zabbixSettingsDTO.TrendsRange)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cacheTTL, err := gtime.ParseInterval(zabbixSettingsDTO.CacheTTL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zabbixSettings := &ZabbixDatasourceSettings{
|
||||
Trends: zabbixSettingsDTO.Trends,
|
||||
TrendsFrom: trendsFrom,
|
||||
TrendsRange: trendsRange,
|
||||
CacheTTL: cacheTTL,
|
||||
}
|
||||
|
||||
return zabbixSettings, nil
|
||||
}
|
||||
86
pkg/datasource/datasource_test.go
Normal file
86
pkg/datasource/datasource_test.go
Normal file
@@ -0,0 +1,86 @@
|
||||
package datasource
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/cache"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||
"gotest.tools/assert"
|
||||
)
|
||||
|
||||
func TestZabbixBackend_getCachedDatasource(t *testing.T) {
|
||||
basicDsSettings := &backend.DataSourceInstanceSettings{
|
||||
ID: 1,
|
||||
Name: "TestDatasource",
|
||||
URL: "http://zabbix.org/zabbix",
|
||||
JSONData: []byte("{}"),
|
||||
}
|
||||
|
||||
modifiedDatasourceSettings := &backend.DataSourceInstanceSettings{
|
||||
ID: 1,
|
||||
Name: "TestDatasource",
|
||||
URL: "http://another.zabbix.org/zabbix",
|
||||
JSONData: []byte("{}"),
|
||||
}
|
||||
modifiedDatasource, _ := NewZabbixDatasourceInstance(modifiedDatasourceSettings)
|
||||
|
||||
basicDS, _ := NewZabbixDatasourceInstance(basicDsSettings)
|
||||
dsCache := cache.NewCache(cache.NoExpiration, cache.NoExpiration)
|
||||
dsCache.Set("1-1", basicDS)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
cache *cache.Cache
|
||||
pluginContext backend.PluginContext
|
||||
want *ZabbixDatasourceInstance
|
||||
}{
|
||||
{
|
||||
name: "Uncached Datasource (nothing in cache)",
|
||||
pluginContext: backend.PluginContext{
|
||||
OrgID: 1,
|
||||
DataSourceInstanceSettings: basicDsSettings,
|
||||
},
|
||||
want: basicDS,
|
||||
},
|
||||
{
|
||||
name: "Cached Datasource",
|
||||
cache: dsCache,
|
||||
pluginContext: backend.PluginContext{
|
||||
OrgID: 1,
|
||||
DataSourceInstanceSettings: basicDsSettings,
|
||||
},
|
||||
want: basicDS,
|
||||
},
|
||||
{
|
||||
name: "Cached then modified",
|
||||
cache: dsCache,
|
||||
pluginContext: backend.PluginContext{
|
||||
OrgID: 1,
|
||||
DataSourceInstanceSettings: modifiedDatasourceSettings,
|
||||
},
|
||||
want: modifiedDatasource,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if tt.cache == nil {
|
||||
tt.cache = cache.NewCache(cache.NoExpiration, cache.NoExpiration)
|
||||
}
|
||||
ds := &ZabbixDatasource{
|
||||
datasourceCache: tt.cache,
|
||||
logger: log.New(),
|
||||
}
|
||||
got, _ := ds.GetDatasource(tt.pluginContext)
|
||||
|
||||
// Only checking the URL, being the easiest value to, and guarantee equality for
|
||||
assert.Equal(t, tt.want.zabbixAPI.GetUrl().String(), got.zabbixAPI.GetUrl().String())
|
||||
|
||||
// Ensure the datasource is in the cache
|
||||
cacheds, ok := tt.cache.Get(fmt.Sprint("1-", tt.pluginContext.DataSourceInstanceSettings.ID))
|
||||
assert.Equal(t, true, ok)
|
||||
assert.Equal(t, got, cacheds)
|
||||
})
|
||||
}
|
||||
}
|
||||
99
pkg/datasource/models.go
Normal file
99
pkg/datasource/models.go
Normal file
@@ -0,0 +1,99 @@
|
||||
package datasource
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
)
|
||||
|
||||
// ZabbixDatasourceSettingsDTO model
|
||||
type ZabbixDatasourceSettingsDTO struct {
|
||||
Trends bool `json:"trends"`
|
||||
TrendsFrom string `json:"trendsFrom"`
|
||||
TrendsRange string `json:"trendsRange"`
|
||||
CacheTTL string `json:"cacheTTL"`
|
||||
|
||||
DisableReadOnlyUsersAck bool `json:"disableReadOnlyUsersAck"`
|
||||
}
|
||||
|
||||
// ZabbixDatasourceSettings model
|
||||
type ZabbixDatasourceSettings struct {
|
||||
Trends bool
|
||||
TrendsFrom time.Duration
|
||||
TrendsRange time.Duration
|
||||
CacheTTL time.Duration
|
||||
|
||||
DisableReadOnlyUsersAck bool `json:"disableReadOnlyUsersAck"`
|
||||
}
|
||||
|
||||
type ZabbixAPIResourceRequest struct {
|
||||
DatasourceId int64 `json:"datasourceId"`
|
||||
Method string `json:"method"`
|
||||
Params map[string]interface{} `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
type ZabbixAPIRequest struct {
|
||||
Method string `json:"method"`
|
||||
Params ZabbixAPIParams `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
func (r *ZabbixAPIRequest) String() string {
|
||||
jsonRequest, _ := json.Marshal(r.Params)
|
||||
return r.Method + string(jsonRequest)
|
||||
}
|
||||
|
||||
type ZabbixAPIParams = map[string]interface{}
|
||||
|
||||
type ZabbixAPIResourceResponse struct {
|
||||
Result interface{} `json:"result,omitempty"`
|
||||
}
|
||||
|
||||
// QueryModel model
|
||||
type QueryModel struct {
|
||||
Mode int64 `json:"mode"`
|
||||
Group QueryFilter `json:"group"`
|
||||
Host QueryFilter `json:"host"`
|
||||
Application QueryFilter `json:"application"`
|
||||
Item QueryFilter `json:"item"`
|
||||
Functions []QueryFunction `json:"functions,omitempty"`
|
||||
Options QueryOptions `json:"options"`
|
||||
|
||||
// Direct from the gRPC interfaces
|
||||
TimeRange backend.TimeRange `json:"-"`
|
||||
}
|
||||
|
||||
// QueryOptions model
|
||||
type QueryFilter struct {
|
||||
Filter string `json:"filter"`
|
||||
}
|
||||
|
||||
// QueryOptions model
|
||||
type QueryOptions struct {
|
||||
ShowDisabledItems bool `json:"showDisabledItems"`
|
||||
}
|
||||
|
||||
// QueryOptions model
|
||||
type QueryFunction struct {
|
||||
Def QueryFunctionDef `json:"def"`
|
||||
Params []string `json:"params"`
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
// QueryOptions model
|
||||
type QueryFunctionDef struct {
|
||||
Name string `json:"name"`
|
||||
Category string `json:"category"`
|
||||
}
|
||||
|
||||
// ReadQuery will read and validate Settings from the DataSourceConfg
|
||||
func ReadQuery(query backend.DataQuery) (QueryModel, error) {
|
||||
model := QueryModel{}
|
||||
if err := json.Unmarshal(query.JSON, &model); err != nil {
|
||||
return model, fmt.Errorf("could not read query: %w", err)
|
||||
}
|
||||
|
||||
model.TimeRange = query.TimeRange
|
||||
return model, nil
|
||||
}
|
||||
84
pkg/datasource/resource_handler.go
Normal file
84
pkg/datasource/resource_handler.go
Normal file
@@ -0,0 +1,84 @@
|
||||
package datasource
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter"
|
||||
)
|
||||
|
||||
func (ds *ZabbixDatasource) RootHandler(rw http.ResponseWriter, req *http.Request) {
|
||||
ds.logger.Debug("Received resource call", "url", req.URL.String(), "method", req.Method)
|
||||
|
||||
rw.Write([]byte("Hello from Zabbix data source!"))
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasource) ZabbixAPIHandler(rw http.ResponseWriter, req *http.Request) {
|
||||
if req.Method != http.MethodPost {
|
||||
return
|
||||
}
|
||||
|
||||
body, err := ioutil.ReadAll(req.Body)
|
||||
defer req.Body.Close()
|
||||
if err != nil || len(body) == 0 {
|
||||
writeError(rw, http.StatusBadRequest, err)
|
||||
return
|
||||
}
|
||||
|
||||
var reqData ZabbixAPIResourceRequest
|
||||
err = json.Unmarshal(body, &reqData)
|
||||
if err != nil {
|
||||
ds.logger.Error("Cannot unmarshal request", "error", err.Error())
|
||||
writeError(rw, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
pluginCxt := httpadapter.PluginConfigFromContext(req.Context())
|
||||
dsInstance, err := ds.GetDatasource(pluginCxt)
|
||||
if err != nil {
|
||||
ds.logger.Error("Error loading datasource", "error", err)
|
||||
writeError(rw, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
apiReq := &ZabbixAPIRequest{Method: reqData.Method, Params: reqData.Params}
|
||||
|
||||
result, err := dsInstance.ZabbixAPIQuery(req.Context(), apiReq)
|
||||
if err != nil {
|
||||
ds.logger.Error("Zabbix API request error", "error", err)
|
||||
writeError(rw, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
writeResponse(rw, result)
|
||||
}
|
||||
|
||||
func writeResponse(rw http.ResponseWriter, result *ZabbixAPIResourceResponse) {
|
||||
resultJson, err := json.Marshal(*result)
|
||||
if err != nil {
|
||||
writeError(rw, http.StatusInternalServerError, err)
|
||||
}
|
||||
|
||||
rw.Header().Add("Content-Type", "application/json")
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
rw.Write(resultJson)
|
||||
}
|
||||
|
||||
func writeError(rw http.ResponseWriter, statusCode int, err error) {
|
||||
data := make(map[string]interface{})
|
||||
|
||||
data["error"] = "Internal Server Error"
|
||||
data["message"] = err.Error()
|
||||
|
||||
var b []byte
|
||||
if b, err = json.Marshal(data); err != nil {
|
||||
rw.WriteHeader(statusCode)
|
||||
return
|
||||
}
|
||||
|
||||
rw.Header().Add("Content-Type", "application/json")
|
||||
rw.WriteHeader(http.StatusInternalServerError)
|
||||
rw.Write(b)
|
||||
}
|
||||
95
pkg/datasource/response_models.go
Normal file
95
pkg/datasource/response_models.go
Normal file
@@ -0,0 +1,95 @@
|
||||
package datasource
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Items []Item
|
||||
|
||||
type Item struct {
|
||||
ID string `json:"itemid,omitempty"`
|
||||
Key string `json:"key_,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
ValueType int `json:"value_type,omitempty,string"`
|
||||
HostID string `json:"hostid,omitempty"`
|
||||
Hosts []ItemHost `json:"hosts,omitempty"`
|
||||
Status string `json:"status,omitempty"`
|
||||
State string `json:"state,omitempty"`
|
||||
}
|
||||
|
||||
func (item *Item) ExpandItem() string {
|
||||
name := item.Name
|
||||
key := item.Key
|
||||
|
||||
if strings.Index(key, "[") == -1 {
|
||||
return name
|
||||
}
|
||||
|
||||
keyRunes := []rune(item.Key)
|
||||
keyParamsStr := string(keyRunes[strings.Index(key, "[")+1 : strings.LastIndex(key, "]")])
|
||||
keyParams := splitKeyParams(keyParamsStr)
|
||||
|
||||
for i := len(keyParams); i >= 1; i-- {
|
||||
name = strings.ReplaceAll(name, fmt.Sprintf("$%v", i), keyParams[i-1])
|
||||
}
|
||||
|
||||
return name
|
||||
}
|
||||
|
||||
func splitKeyParams(paramStr string) []string {
|
||||
paramRunes := []rune(paramStr)
|
||||
params := []string{}
|
||||
quoted := false
|
||||
inArray := false
|
||||
splitSymbol := ","
|
||||
param := ""
|
||||
|
||||
for _, r := range paramRunes {
|
||||
symbol := string(r)
|
||||
if symbol == `"` && inArray {
|
||||
param += symbol
|
||||
} else if symbol == `"` && quoted {
|
||||
quoted = false
|
||||
} else if symbol == `"` && !quoted {
|
||||
quoted = true
|
||||
} else if symbol == "[" && !quoted {
|
||||
inArray = true
|
||||
} else if symbol == "]" && !quoted {
|
||||
inArray = false
|
||||
} else if symbol == splitSymbol && !quoted && !inArray {
|
||||
params = append(params, param)
|
||||
param = ""
|
||||
} else {
|
||||
param += symbol
|
||||
}
|
||||
}
|
||||
|
||||
params = append(params, param)
|
||||
return params
|
||||
}
|
||||
|
||||
type ItemHost struct {
|
||||
ID string `json:"hostid,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
}
|
||||
|
||||
type Trend []TrendPoint
|
||||
|
||||
type TrendPoint struct {
|
||||
ItemID string `json:"itemid,omitempty"`
|
||||
Clock int64 `json:"clock,omitempty,string"`
|
||||
Num string `json:"num,omitempty"`
|
||||
ValueMin string `json:"value_min,omitempty"`
|
||||
ValueAvg string `json:"value_avg,omitempty"`
|
||||
ValueMax string `json:"value_max,omitempty"`
|
||||
}
|
||||
|
||||
type History []HistoryPoint
|
||||
|
||||
type HistoryPoint struct {
|
||||
ItemID string `json:"itemid,omitempty"`
|
||||
Clock int64 `json:"clock,omitempty,string"`
|
||||
Value float64 `json:"value,omitempty,string"`
|
||||
NS int64 `json:"ns,omitempty,string"`
|
||||
}
|
||||
534
pkg/datasource/zabbix.go
Normal file
534
pkg/datasource/zabbix.go
Normal file
@@ -0,0 +1,534 @@
|
||||
package datasource
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/cache"
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbixapi"
|
||||
simplejson "github.com/bitly/go-simplejson"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"golang.org/x/net/context"
|
||||
)
|
||||
|
||||
var CachedMethods = map[string]bool{
|
||||
"hostgroup.get": true,
|
||||
"host.get": true,
|
||||
"application.get": true,
|
||||
"item.get": true,
|
||||
"service.get": true,
|
||||
"usermacro.get": true,
|
||||
"proxy.get": true,
|
||||
}
|
||||
|
||||
// ZabbixQuery handles query requests to Zabbix
|
||||
func (ds *ZabbixDatasourceInstance) ZabbixQuery(ctx context.Context, apiReq *ZabbixAPIRequest) (*simplejson.Json, error) {
|
||||
var resultJson *simplejson.Json
|
||||
var err error
|
||||
requestHash := cache.HashString(apiReq.String())
|
||||
|
||||
cachedResult, queryExistInCache := ds.queryCache.Get(requestHash)
|
||||
if !queryExistInCache {
|
||||
resultJson, err = ds.ZabbixRequest(ctx, apiReq.Method, apiReq.Params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, ok := CachedMethods[apiReq.Method]; ok {
|
||||
ds.logger.Debug("Write result to cache", "method", apiReq.Method)
|
||||
ds.queryCache.Set(requestHash, resultJson)
|
||||
}
|
||||
} else {
|
||||
var ok bool
|
||||
resultJson, ok = cachedResult.(*simplejson.Json)
|
||||
if !ok {
|
||||
resultJson = simplejson.New()
|
||||
}
|
||||
}
|
||||
|
||||
return resultJson, nil
|
||||
}
|
||||
|
||||
// ZabbixAPIQuery handles query requests to Zabbix API
|
||||
func (ds *ZabbixDatasourceInstance) ZabbixAPIQuery(ctx context.Context, apiReq *ZabbixAPIRequest) (*ZabbixAPIResourceResponse, error) {
|
||||
resultJson, err := ds.ZabbixQuery(ctx, apiReq)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result := resultJson.Interface()
|
||||
return BuildAPIResponse(&result)
|
||||
}
|
||||
|
||||
func BuildAPIResponse(responseData *interface{}) (*ZabbixAPIResourceResponse, error) {
|
||||
return &ZabbixAPIResourceResponse{
|
||||
Result: *responseData,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// TestConnection checks authentication and version of the Zabbix API and returns that info
|
||||
func (ds *ZabbixDatasourceInstance) TestConnection(ctx context.Context) (string, error) {
|
||||
_, err := ds.getAllGroups(ctx)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
response, err := ds.ZabbixRequest(ctx, "apiinfo.version", ZabbixAPIParams{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
resultByte, _ := response.MarshalJSON()
|
||||
ds.logger.Debug("TestConnection", "result", string(resultByte))
|
||||
|
||||
return string(resultByte), nil
|
||||
}
|
||||
|
||||
// ZabbixRequest checks authentication and makes a request to the Zabbix API
|
||||
func (ds *ZabbixDatasourceInstance) ZabbixRequest(ctx context.Context, method string, params ZabbixAPIParams) (*simplejson.Json, error) {
|
||||
ds.logger.Debug("Invoke Zabbix API request", "ds", ds.dsInfo.Name, "method", method)
|
||||
var result *simplejson.Json
|
||||
var err error
|
||||
|
||||
// Skip auth for methods that are not required it
|
||||
if method == "apiinfo.version" {
|
||||
return ds.zabbixAPI.RequestUnauthenticated(ctx, method, params)
|
||||
}
|
||||
|
||||
result, err = ds.zabbixAPI.Request(ctx, method, params)
|
||||
if err == zabbixapi.ErrNotAuthenticated {
|
||||
err = ds.login(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ds.ZabbixRequest(ctx, method, params)
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) login(ctx context.Context) error {
|
||||
jsonData, err := simplejson.NewJson(ds.dsInfo.JSONData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
zabbixLogin := jsonData.Get("username").MustString()
|
||||
var zabbixPassword string
|
||||
if securePassword, exists := ds.dsInfo.DecryptedSecureJSONData["password"]; exists {
|
||||
zabbixPassword = securePassword
|
||||
} else {
|
||||
// Fallback
|
||||
zabbixPassword = jsonData.Get("password").MustString()
|
||||
}
|
||||
|
||||
err = ds.zabbixAPI.Authenticate(ctx, zabbixLogin, zabbixPassword)
|
||||
if err != nil {
|
||||
ds.logger.Error("Zabbix authentication error", "error", err)
|
||||
return err
|
||||
}
|
||||
ds.logger.Debug("Successfully authenticated", "url", ds.zabbixAPI.GetUrl().String(), "user", zabbixLogin)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) queryNumericItems(ctx context.Context, query *QueryModel) (*data.Frame, error) {
|
||||
groupFilter := query.Group.Filter
|
||||
hostFilter := query.Host.Filter
|
||||
appFilter := query.Application.Filter
|
||||
itemFilter := query.Item.Filter
|
||||
|
||||
items, err := ds.getItems(ctx, groupFilter, hostFilter, appFilter, itemFilter, "num")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
frames, err := ds.queryNumericDataForItems(ctx, query, items)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return frames, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getItems(ctx context.Context, groupFilter string, hostFilter string, appFilter string, itemFilter string, itemType string) (Items, error) {
|
||||
hosts, err := ds.getHosts(ctx, groupFilter, hostFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var hostids []string
|
||||
for _, k := range hosts {
|
||||
hostids = append(hostids, k["hostid"].(string))
|
||||
}
|
||||
|
||||
apps, err := ds.getApps(ctx, groupFilter, hostFilter, appFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var appids []string
|
||||
for _, l := range apps {
|
||||
appids = append(appids, l["applicationid"].(string))
|
||||
}
|
||||
|
||||
var allItems *simplejson.Json
|
||||
if len(hostids) > 0 {
|
||||
allItems, err = ds.getAllItems(ctx, hostids, nil, itemType)
|
||||
} else if len(appids) > 0 {
|
||||
allItems, err = ds.getAllItems(ctx, nil, appids, itemType)
|
||||
}
|
||||
|
||||
var items Items
|
||||
|
||||
if allItems == nil {
|
||||
items = Items{}
|
||||
} else {
|
||||
itemsJSON, err := allItems.MarshalJSON()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(itemsJSON, &items)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
re, err := parseFilter(itemFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
filteredItems := Items{}
|
||||
for _, item := range items {
|
||||
itemName := item.ExpandItem()
|
||||
if item.Status == "0" {
|
||||
if re != nil {
|
||||
if re.MatchString(itemName) {
|
||||
filteredItems = append(filteredItems, item)
|
||||
}
|
||||
} else if itemName == itemFilter {
|
||||
filteredItems = append(filteredItems, item)
|
||||
}
|
||||
}
|
||||
}
|
||||
return filteredItems, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getApps(ctx context.Context, groupFilter string, hostFilter string, appFilter string) ([]map[string]interface{}, error) {
|
||||
hosts, err := ds.getHosts(ctx, groupFilter, hostFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var hostids []string
|
||||
for _, k := range hosts {
|
||||
hostids = append(hostids, k["hostid"].(string))
|
||||
}
|
||||
allApps, err := ds.getAllApps(ctx, hostids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
re, err := parseFilter(appFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var apps []map[string]interface{}
|
||||
for _, i := range allApps.MustArray() {
|
||||
name := i.(map[string]interface{})["name"].(string)
|
||||
if re != nil {
|
||||
if re.MatchString(name) {
|
||||
apps = append(apps, i.(map[string]interface{}))
|
||||
}
|
||||
} else if name == appFilter {
|
||||
apps = append(apps, i.(map[string]interface{}))
|
||||
}
|
||||
}
|
||||
return apps, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getHosts(ctx context.Context, groupFilter string, hostFilter string) ([]map[string]interface{}, error) {
|
||||
groups, err := ds.getGroups(ctx, groupFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var groupids []string
|
||||
for _, k := range groups {
|
||||
groupids = append(groupids, k["groupid"].(string))
|
||||
}
|
||||
allHosts, err := ds.getAllHosts(ctx, groupids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
re, err := parseFilter(hostFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var hosts []map[string]interface{}
|
||||
for _, i := range allHosts.MustArray() {
|
||||
name := i.(map[string]interface{})["name"].(string)
|
||||
if re != nil {
|
||||
if re.MatchString(name) {
|
||||
hosts = append(hosts, i.(map[string]interface{}))
|
||||
}
|
||||
} else if name == hostFilter {
|
||||
hosts = append(hosts, i.(map[string]interface{}))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return hosts, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getGroups(ctx context.Context, groupFilter string) ([]map[string]interface{}, error) {
|
||||
allGroups, err := ds.getAllGroups(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
re, err := parseFilter(groupFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var groups []map[string]interface{}
|
||||
for _, i := range allGroups.MustArray() {
|
||||
name := i.(map[string]interface{})["name"].(string)
|
||||
if re != nil {
|
||||
if re.MatchString(name) {
|
||||
groups = append(groups, i.(map[string]interface{}))
|
||||
}
|
||||
} else if name == groupFilter {
|
||||
groups = append(groups, i.(map[string]interface{}))
|
||||
}
|
||||
}
|
||||
return groups, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getAllItems(ctx context.Context, hostids []string, appids []string, itemtype string) (*simplejson.Json, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": []string{"itemid", "name", "key_", "value_type", "hostid", "status", "state"},
|
||||
"sortfield": "name",
|
||||
"webitems": true,
|
||||
"filter": map[string]interface{}{},
|
||||
"selectHosts": []string{"hostid", "name"},
|
||||
"hostids": hostids,
|
||||
"applicationids": appids,
|
||||
}
|
||||
|
||||
filter := params["filter"].(map[string]interface{})
|
||||
if itemtype == "num" {
|
||||
filter["value_type"] = []int{0, 3}
|
||||
} else if itemtype == "text" {
|
||||
filter["value_type"] = []int{1, 2, 4}
|
||||
}
|
||||
|
||||
return ds.ZabbixQuery(ctx, &ZabbixAPIRequest{Method: "item.get", Params: params})
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getAllApps(ctx context.Context, hostids []string) (*simplejson.Json, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": "extend",
|
||||
"hostids": hostids,
|
||||
}
|
||||
|
||||
return ds.ZabbixQuery(ctx, &ZabbixAPIRequest{Method: "application.get", Params: params})
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getAllHosts(ctx context.Context, groupids []string) (*simplejson.Json, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": []string{"name", "host"},
|
||||
"sortfield": "name",
|
||||
"groupids": groupids,
|
||||
}
|
||||
|
||||
return ds.ZabbixQuery(ctx, &ZabbixAPIRequest{Method: "host.get", Params: params})
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getAllGroups(ctx context.Context) (*simplejson.Json, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": []string{"name"},
|
||||
"sortfield": "name",
|
||||
"real_hosts": true,
|
||||
}
|
||||
|
||||
return ds.ZabbixQuery(ctx, &ZabbixAPIRequest{Method: "hostgroup.get", Params: params})
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) queryNumericDataForItems(ctx context.Context, query *QueryModel, items Items) (*data.Frame, error) {
|
||||
valueType := ds.getTrendValueType(query)
|
||||
consolidateBy := ds.getConsolidateBy(query)
|
||||
|
||||
if consolidateBy == "" {
|
||||
consolidateBy = valueType
|
||||
}
|
||||
|
||||
history, err := ds.getHistotyOrTrend(ctx, query, items)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return convertHistory(history, items), nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getTrendValueType(query *QueryModel) string {
|
||||
trendValue := "avg"
|
||||
|
||||
for _, fn := range query.Functions {
|
||||
if fn.Def.Name == "trendValue" && len(fn.Params) > 0 {
|
||||
trendValue = fn.Params[0]
|
||||
}
|
||||
}
|
||||
|
||||
return trendValue
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getConsolidateBy(query *QueryModel) string {
|
||||
consolidateBy := "avg"
|
||||
|
||||
for _, fn := range query.Functions {
|
||||
if fn.Def.Name == "consolidateBy" && len(fn.Params) > 0 {
|
||||
consolidateBy = fn.Params[0]
|
||||
}
|
||||
}
|
||||
return consolidateBy
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getHistotyOrTrend(ctx context.Context, query *QueryModel, items Items) (History, error) {
|
||||
timeRange := query.TimeRange
|
||||
useTrend := ds.isUseTrend(timeRange)
|
||||
allHistory := History{}
|
||||
|
||||
groupedItems := map[int]Items{}
|
||||
|
||||
for _, j := range items {
|
||||
groupedItems[j.ValueType] = append(groupedItems[j.ValueType], j)
|
||||
}
|
||||
|
||||
for k, l := range groupedItems {
|
||||
var itemids []string
|
||||
for _, m := range l {
|
||||
itemids = append(itemids, m.ID)
|
||||
}
|
||||
|
||||
params := ZabbixAPIParams{
|
||||
"output": "extend",
|
||||
"sortfield": "clock",
|
||||
"sortorder": "ASC",
|
||||
"itemids": itemids,
|
||||
"time_from": timeRange.From.Unix(),
|
||||
"time_till": timeRange.To.Unix(),
|
||||
}
|
||||
|
||||
var response *simplejson.Json
|
||||
var err error
|
||||
if useTrend {
|
||||
response, err = ds.ZabbixQuery(ctx, &ZabbixAPIRequest{Method: "trend.get", Params: params})
|
||||
} else {
|
||||
params["history"] = &k
|
||||
response, err = ds.ZabbixQuery(ctx, &ZabbixAPIRequest{Method: "history.get", Params: params})
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pointJSON, err := response.MarshalJSON()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Internal error parsing response JSON: %w", err)
|
||||
}
|
||||
|
||||
history := History{}
|
||||
err = json.Unmarshal(pointJSON, &history)
|
||||
if err != nil {
|
||||
ds.logger.Warn(fmt.Sprintf("Could not map Zabbix response to History: %s", err.Error()))
|
||||
} else {
|
||||
allHistory = append(allHistory, history...)
|
||||
}
|
||||
}
|
||||
return allHistory, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) isUseTrend(timeRange backend.TimeRange) bool {
|
||||
if !ds.Settings.Trends {
|
||||
return false
|
||||
}
|
||||
|
||||
trendsFrom := ds.Settings.TrendsFrom
|
||||
trendsRange := ds.Settings.TrendsRange
|
||||
fromSec := timeRange.From.Unix()
|
||||
toSec := timeRange.To.Unix()
|
||||
rangeSec := float64(toSec - fromSec)
|
||||
|
||||
if (fromSec < time.Now().Add(-trendsFrom).Unix()) || (rangeSec > trendsRange.Seconds()) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func convertHistory(history History, items Items) *data.Frame {
|
||||
timeFileld := data.NewFieldFromFieldType(data.FieldTypeTime, 0)
|
||||
timeFileld.Name = "time"
|
||||
frame := data.NewFrame("History", timeFileld)
|
||||
|
||||
for _, item := range items {
|
||||
field := data.NewFieldFromFieldType(data.FieldTypeNullableFloat64, 0)
|
||||
if len(item.Hosts) > 0 {
|
||||
field.Name = fmt.Sprintf("%s: %s", item.Hosts[0].Name, item.ExpandItem())
|
||||
} else {
|
||||
field.Name = item.ExpandItem()
|
||||
}
|
||||
frame.Fields = append(frame.Fields, field)
|
||||
}
|
||||
|
||||
for _, point := range history {
|
||||
for columnIndex, field := range frame.Fields {
|
||||
if columnIndex == 0 {
|
||||
ts := time.Unix(point.Clock, point.NS)
|
||||
field.Append(ts)
|
||||
} else {
|
||||
item := items[columnIndex-1]
|
||||
if point.ItemID == item.ID {
|
||||
value := point.Value
|
||||
field.Append(&value)
|
||||
} else {
|
||||
field.Append(nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: convert to wide format
|
||||
wideFrame, err := data.LongToWide(frame, &data.FillMissing{Mode: data.FillModeNull})
|
||||
if err == nil {
|
||||
return wideFrame
|
||||
}
|
||||
return frame
|
||||
}
|
||||
|
||||
func parseFilter(filter string) (*regexp.Regexp, error) {
|
||||
regex := regexp.MustCompile(`^/(.+)/(.*)$`)
|
||||
flagRE := regexp.MustCompile("[imsU]+")
|
||||
|
||||
matches := regex.FindStringSubmatch(filter)
|
||||
if len(matches) <= 1 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
pattern := ""
|
||||
if matches[2] != "" {
|
||||
if flagRE.MatchString(matches[2]) {
|
||||
pattern += "(?" + matches[2] + ")"
|
||||
} else {
|
||||
return nil, fmt.Errorf("error parsing regexp: unsupported flags `%s` (expected [imsU])", matches[2])
|
||||
}
|
||||
}
|
||||
pattern += matches[1]
|
||||
|
||||
return regexp.Compile(pattern)
|
||||
}
|
||||
134
pkg/datasource/zabbix_test.go
Normal file
134
pkg/datasource/zabbix_test.go
Normal file
@@ -0,0 +1,134 @@
|
||||
package datasource
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/cache"
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbixapi"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
var emptyParams = map[string]interface{}{}
|
||||
|
||||
type RoundTripFunc func(req *http.Request) *http.Response
|
||||
|
||||
func (f RoundTripFunc) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
return f(req), nil
|
||||
}
|
||||
|
||||
//NewTestClient returns *http.Client with Transport replaced to avoid making real calls
|
||||
func NewTestClient(fn RoundTripFunc) *http.Client {
|
||||
return &http.Client{
|
||||
Transport: RoundTripFunc(fn),
|
||||
}
|
||||
}
|
||||
|
||||
var basicDatasourceInfo = &backend.DataSourceInstanceSettings{
|
||||
ID: 1,
|
||||
Name: "TestDatasource",
|
||||
URL: "http://zabbix.org/zabbix",
|
||||
JSONData: []byte(`{"username":"username", "password":"password"}}`),
|
||||
}
|
||||
|
||||
func mockZabbixQuery(method string, params ZabbixAPIParams) *ZabbixAPIRequest {
|
||||
return &ZabbixAPIRequest{
|
||||
Method: method,
|
||||
Params: params,
|
||||
}
|
||||
}
|
||||
|
||||
func MockZabbixDataSource(body string, statusCode int) *ZabbixDatasourceInstance {
|
||||
zabbixAPI, _ := zabbixapi.MockZabbixAPI(body, statusCode)
|
||||
zabbixSettings, _ := readZabbixSettings(basicDatasourceInfo)
|
||||
|
||||
return &ZabbixDatasourceInstance{
|
||||
dsInfo: basicDatasourceInfo,
|
||||
zabbixAPI: zabbixAPI,
|
||||
Settings: zabbixSettings,
|
||||
queryCache: cache.NewCache(cache.NoExpiration, 10*time.Minute),
|
||||
logger: log.New(),
|
||||
}
|
||||
}
|
||||
|
||||
func MockZabbixDataSourceResponse(dsInstance *ZabbixDatasourceInstance, body string, statusCode int) *ZabbixDatasourceInstance {
|
||||
zabbixAPI, _ := zabbixapi.MockZabbixAPI(body, statusCode)
|
||||
dsInstance.zabbixAPI = zabbixAPI
|
||||
|
||||
return dsInstance
|
||||
}
|
||||
|
||||
func TestLogin(t *testing.T) {
|
||||
dsInstance := MockZabbixDataSource(`{"result":"secretauth"}`, 200)
|
||||
err := dsInstance.login(context.Background())
|
||||
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "secretauth", dsInstance.zabbixAPI.GetAuth())
|
||||
}
|
||||
|
||||
func TestLoginError(t *testing.T) {
|
||||
dsInstance := MockZabbixDataSource(`{"result":""}`, 500)
|
||||
err := dsInstance.login(context.Background())
|
||||
|
||||
assert.NotNil(t, err)
|
||||
assert.Equal(t, "", dsInstance.zabbixAPI.GetAuth())
|
||||
}
|
||||
|
||||
func TestZabbixAPIQuery(t *testing.T) {
|
||||
dsInstance := MockZabbixDataSource(`{"result":"test"}`, 200)
|
||||
resp, err := dsInstance.ZabbixAPIQuery(context.Background(), mockZabbixQuery("test.get", emptyParams))
|
||||
|
||||
assert.Nil(t, err)
|
||||
|
||||
result, ok := resp.Result.(string)
|
||||
assert.True(t, ok)
|
||||
assert.Equal(t, "test", result)
|
||||
}
|
||||
|
||||
func TestCachedQuery(t *testing.T) {
|
||||
// Using methods with caching enabled
|
||||
query := mockZabbixQuery("host.get", emptyParams)
|
||||
dsInstance := MockZabbixDataSource(`{"result":"testOld"}`, 200)
|
||||
|
||||
// Run query first time
|
||||
resp, err := dsInstance.ZabbixAPIQuery(context.Background(), query)
|
||||
|
||||
assert.Nil(t, err)
|
||||
result, _ := resp.Result.(string)
|
||||
assert.Equal(t, "testOld", result)
|
||||
|
||||
// Mock request with new value
|
||||
dsInstance = MockZabbixDataSourceResponse(dsInstance, `{"result":"testNew"}`, 200)
|
||||
// Should not run actual API query and return first result
|
||||
resp, err = dsInstance.ZabbixAPIQuery(context.Background(), query)
|
||||
|
||||
assert.Nil(t, err)
|
||||
result, _ = resp.Result.(string)
|
||||
assert.Equal(t, "testOld", result)
|
||||
}
|
||||
|
||||
func TestNonCachedQuery(t *testing.T) {
|
||||
// Using methods with caching disabled
|
||||
query := mockZabbixQuery("history.get", emptyParams)
|
||||
dsInstance := MockZabbixDataSource(`{"result":"testOld"}`, 200)
|
||||
|
||||
// Run query first time
|
||||
resp, err := dsInstance.ZabbixAPIQuery(context.Background(), query)
|
||||
|
||||
assert.Nil(t, err)
|
||||
result, _ := resp.Result.(string)
|
||||
assert.Equal(t, "testOld", result)
|
||||
|
||||
// Mock request with new value
|
||||
dsInstance = MockZabbixDataSourceResponse(dsInstance, `{"result":"testNew"}`, 200)
|
||||
// Should not run actual API query and return first result
|
||||
resp, err = dsInstance.ZabbixAPIQuery(context.Background(), query)
|
||||
|
||||
assert.Nil(t, err)
|
||||
result, _ = resp.Result.(string)
|
||||
assert.Equal(t, "testNew", result)
|
||||
}
|
||||
Reference in New Issue
Block a user