Merge pull request #1256 from alexanderzobnin/backend-functions
Implement data processing functions on the backend
This commit is contained in:
27
Makefile
27
Makefile
@@ -5,22 +5,27 @@ install:
|
||||
# Frontend
|
||||
yarn install --pure-lockfile
|
||||
# Backend
|
||||
go mod vendor
|
||||
go install -v ./pkg/
|
||||
GO111MODULE=off go get -u golang.org/x/lint/golint
|
||||
|
||||
deps-go:
|
||||
go install -v ./pkg/
|
||||
|
||||
build: build-frontend build-backend
|
||||
build-frontend:
|
||||
yarn dev-build
|
||||
build-backend:
|
||||
env GOOS=linux go build -mod=vendor -o ./dist/zabbix-plugin_linux_amd64 ./pkg
|
||||
env GOOS=linux go build -o ./dist/zabbix-plugin_linux_amd64 ./pkg
|
||||
build-debug:
|
||||
env GOOS=linux go build -mod=vendor -gcflags=all="-N -l" -o ./dist/zabbix-plugin_linux_amd64 ./pkg
|
||||
env GOOS=linux go build -gcflags="all=-N -l" -o ./dist/zabbix-plugin_linux_amd64 ./pkg
|
||||
|
||||
# Build for specific platform
|
||||
build-backend-windows: extension = .exe
|
||||
build-backend-darwin-arm64:
|
||||
env GOOS=darwin GOARCH=arm64 go build -o ./dist/zabbix-plugin_darwin_arm64 ./pkg
|
||||
build-backend-%:
|
||||
$(eval filename = zabbix-plugin_$*_amd64$(extension))
|
||||
env GOOS=$* GOARCH=amd64 go build -mod=vendor -o ./dist/$(filename) ./pkg
|
||||
env GOOS=$* GOARCH=amd64 go build -o ./dist/$(filename) ./pkg
|
||||
|
||||
run-frontend:
|
||||
yarn install --pure-lockfile
|
||||
@@ -40,27 +45,29 @@ dist-backend: dist-backend-linux dist-backend-darwin dist-backend-windows dist-a
|
||||
dist-backend-windows: extension = .exe
|
||||
dist-backend-%:
|
||||
$(eval filename = zabbix-plugin_$*_amd64$(extension))
|
||||
env GOOS=$* GOARCH=amd64 go build -ldflags="-s -w" -mod=vendor -o ./dist/$(filename) ./pkg
|
||||
env GOOS=$* GOARCH=amd64 go build -ldflags="-s -w" -o ./dist/$(filename) ./pkg
|
||||
|
||||
# ARM
|
||||
dist-arm: dist-arm-linux-arm-v6 dist-arm-linux-arm64
|
||||
dist-arm-linux-arm-v6:
|
||||
env GOOS=linux GOARCH=arm GOARM=6 go build -ldflags="-s -w" -mod=vendor -o ./dist/zabbix-plugin_linux_arm ./pkg
|
||||
env GOOS=linux GOARCH=arm GOARM=6 go build -ldflags="-s -w" -o ./dist/zabbix-plugin_linux_arm ./pkg
|
||||
dist-arm-linux-arm-v7:
|
||||
env GOOS=linux GOARCH=arm GOARM=7 go build -ldflags="-s -w" -mod=vendor -o ./dist/zabbix-plugin_linux_arm ./pkg
|
||||
env GOOS=linux GOARCH=arm GOARM=7 go build -ldflags="-s -w" -o ./dist/zabbix-plugin_linux_arm ./pkg
|
||||
dist-arm-linux-arm64:
|
||||
env GOOS=linux GOARCH=arm64 go build -ldflags="-s -w" -mod=vendor -o ./dist/zabbix-plugin_linux_arm64 ./pkg
|
||||
env GOOS=linux GOARCH=arm64 go build -ldflags="-s -w" -o ./dist/zabbix-plugin_linux_arm64 ./pkg
|
||||
dist-arm-linux-arm64:
|
||||
env GOOS=darwin GOARCH=arm64 go build -ldflags="-s -w" -o ./dist/zabbix-plugin_darwin_arm64 ./pkg
|
||||
|
||||
.PHONY: test
|
||||
test: test-frontend test-backend
|
||||
test-frontend:
|
||||
yarn test
|
||||
test-backend:
|
||||
go test -mod=vendor ./pkg/...
|
||||
go test ./pkg/...
|
||||
test-ci:
|
||||
yarn ci-test
|
||||
mkdir -p tmp/coverage/golang/
|
||||
go test -race -coverprofile=tmp/coverage/golang/coverage.txt -covermode=atomic -mod=vendor ./pkg/...
|
||||
go test -race -coverprofile=tmp/coverage/golang/coverage.txt -covermode=atomic ./pkg/...
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
|
||||
@@ -7,6 +7,13 @@ fi
|
||||
PORT="${2:-3222}"
|
||||
PLUGIN_NAME="${1:-zabbix-plugin_}"
|
||||
|
||||
# Build optimized for debug
|
||||
make build-debug
|
||||
|
||||
# Reload plugin
|
||||
pkill ${PLUGIN_NAME}
|
||||
sleep 2
|
||||
|
||||
if [ "$OSTYPE" == "linux-gnu" ]; then
|
||||
ptrace_scope=`cat /proc/sys/kernel/yama/ptrace_scope`
|
||||
if [ "$ptrace_scope" != 0 ]; then
|
||||
|
||||
10
go.mod
10
go.mod
@@ -1,14 +1,14 @@
|
||||
module github.com/alexanderzobnin/grafana-zabbix
|
||||
|
||||
go 1.12
|
||||
go 1.15
|
||||
|
||||
require (
|
||||
github.com/bitly/go-simplejson v0.5.0
|
||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.65.0
|
||||
github.com/hashicorp/go-hclog v0.9.2 // indirect
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.98.1
|
||||
github.com/hashicorp/go-hclog v0.16.1 // indirect
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible
|
||||
github.com/stretchr/testify v1.5.1
|
||||
golang.org/x/net v0.0.0-20190923162816-aa69164e4478
|
||||
github.com/stretchr/testify v1.7.0
|
||||
golang.org/x/net v0.0.0-20210510120150-4163338589ed
|
||||
gotest.tools v2.2.0+incompatible
|
||||
)
|
||||
|
||||
416
go.sum
416
go.sum
@@ -1,207 +1,529 @@
|
||||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0=
|
||||
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
|
||||
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
|
||||
github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g=
|
||||
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||
github.com/apache/arrow/go/arrow v0.0.0-20200403134915-89ce1cadb678 h1:R72+9UXiP7TnpTAdznM1okjzyqb3bzopSA7HCP7p3gM=
|
||||
github.com/apache/arrow/go/arrow v0.0.0-20200403134915-89ce1cadb678/go.mod h1:QNYViu/X0HXDHw7m3KXzWSVXIbfUvJqBFe6Gj8/pYA0=
|
||||
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
|
||||
github.com/apache/arrow/go/arrow v0.0.0-20210223225224-5bea62493d91 h1:rbe942bXzd2vnds4y9fYQL8X4yFltXoZsKW7KtG+TFM=
|
||||
github.com/apache/arrow/go/arrow v0.0.0-20210223225224-5bea62493d91/go.mod h1:c9sxoIT3YgLxH4UhLOCKaBlEojuMhVYpk4Ntv3opUTQ=
|
||||
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
|
||||
github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
|
||||
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
|
||||
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
|
||||
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||
github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A=
|
||||
github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU=
|
||||
github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
|
||||
github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g=
|
||||
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
||||
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
||||
github.com/bitly/go-simplejson v0.5.0 h1:6IH+V8/tVMab511d5bn4M7EwGXZf9Hj6i2xSwkNEM+Y=
|
||||
github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA=
|
||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY=
|
||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
|
||||
github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ=
|
||||
github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
|
||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/cheekybits/genny v1.0.0 h1:uGGa4nei+j20rOSeDeP5Of12XVm7TGUd4dJA9RDitfE=
|
||||
github.com/cheekybits/genny v1.0.0/go.mod h1:+tQajlRqAUrPI7DOSpB0XAqZYtQakVtB7wXkRAgjxjQ=
|
||||
github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||
github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8=
|
||||
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI=
|
||||
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||
github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||
github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
|
||||
github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
|
||||
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
|
||||
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
|
||||
github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M=
|
||||
github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g=
|
||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
||||
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
|
||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||
github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys=
|
||||
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||
github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4=
|
||||
github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||
github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o=
|
||||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
||||
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
|
||||
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s=
|
||||
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
|
||||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs=
|
||||
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.4 h1:87PNWwrRvUSnqS4dlcBU/ftvOIBep4sYuBLlh6rX2wk=
|
||||
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
|
||||
github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
|
||||
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
|
||||
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
|
||||
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
|
||||
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
|
||||
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
|
||||
github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
|
||||
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
||||
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/golang/protobuf v1.5.1 h1:jAbXjIeW2ZSW2AwFxlGTDoc2CjI2XujLkV3ArsZFCvc=
|
||||
github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=
|
||||
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/flatbuffers v1.11.0 h1:O7CEyB8Cb3/DmtxODGtLHcEvpr81Jm5qLg/hsHnxA2A=
|
||||
github.com/google/flatbuffers v1.11.0/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg=
|
||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.65.0 h1:l6cPKCFxf3AN3gd7Sprum2TuhcqsGI98Xa/1dDuin9E=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.65.0/go.mod h1:w855JyiC5PDP3naWUJP0h/vY8RlzlE4+4fodyoXph+4=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0 h1:0IKlLyQ3Hs9nDaiK5cSHAGmcQEIC8l2Ts1u6x5Dfrqg=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0/go.mod h1:mJzapYve32yjrKlk9GbyCZHuPgZsrbyIbyKhSzOpg6s=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
|
||||
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
||||
github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
||||
github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.98.1 h1:Q/OGVdacBv/IdptTfwSsw54UuhriFun+b1Pyup+VErk=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.98.1/go.mod h1:D7x3ah+1d4phNXpbnOaxa/osSaZlwh9/ZUnGGzegRbk=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y=
|
||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho=
|
||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
|
||||
github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd h1:rNuUHR+CvK1IS89MMtcF0EpcVMZtjKfPRp4MEmt/aTs=
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
||||
github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE=
|
||||
github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
|
||||
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||
github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI=
|
||||
github.com/hashicorp/go-hclog v0.9.2 h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxCsHI=
|
||||
github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
|
||||
github.com/hashicorp/go-hclog v0.16.1 h1:IVQwpTGNRRIHafnTs2dQLIk4ENtneRIEEJWOVDqz99o=
|
||||
github.com/hashicorp/go-hclog v0.16.1/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
|
||||
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
|
||||
github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
|
||||
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
|
||||
github.com/hashicorp/go-plugin v1.2.2 h1:mgDpq0PkoK5gck2w4ivaMpWRHv/matdOR4xmeScmf/w=
|
||||
github.com/hashicorp/go-plugin v1.2.2/go.mod h1:F9eH4LrE/ZsRdbwhfjs9k9HoDUwAHnYtXdgmf1AVNs0=
|
||||
github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb h1:b5rjCoWHc7eqmAS4/qyk21ZsHyb6Mxv/jykxvNTkU4M=
|
||||
github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU=
|
||||
github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
|
||||
github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
|
||||
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
||||
github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
|
||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
|
||||
github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
|
||||
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
|
||||
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
|
||||
github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM=
|
||||
github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d h1:kJCB4vdITiW1eC1vq2e6IsrXKrZit1bv/TDYFGMp4BQ=
|
||||
github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM=
|
||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||
github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg=
|
||||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||
github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo=
|
||||
github.com/jhump/protoreflect v1.6.0 h1:h5jfMVslIg6l29nsMs0D8Wj17RDVdNYti0vDN/PZZoE=
|
||||
github.com/jhump/protoreflect v1.6.0/go.mod h1:eaTn3RZAmMBcV0fifFvlm6VHNz3wSkYyXYWUh7ymB74=
|
||||
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
||||
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
||||
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
|
||||
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
||||
github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ=
|
||||
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
||||
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
|
||||
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
|
||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/magefile/mage v1.9.0 h1:t3AU2wNwehMCW97vuqQLtw6puppWXHO+O2MHo5a50XE=
|
||||
github.com/magefile/mage v1.9.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
|
||||
github.com/mattetti/filebuffer v1.0.0 h1:ixTvQ0JjBTwWbdpDZ98lLrydo7KRi8xNRIi5RFszsbY=
|
||||
github.com/mattetti/filebuffer v1.0.0/go.mod h1:X6nyAIge2JGVmuJt2MFCqmHrb/5IHiphfHtot0s5cnI=
|
||||
github.com/mattn/go-runewidth v0.0.7 h1:Ei8KR0497xHyKJPAv59M1dkC+rOZCMBJ+t3fZ+twI54=
|
||||
github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM=
|
||||
github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4=
|
||||
github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ=
|
||||
github.com/magefile/mage v1.11.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
|
||||
github.com/mattetti/filebuffer v1.0.1 h1:gG7pyfnSIZCxdoKq+cPa8T0hhYtD9NxCdI4D7PTjRLM=
|
||||
github.com/mattetti/filebuffer v1.0.1/go.mod h1:YdMURNDOttIiruleeVr6f56OrMc+MydEnTcXwtkxNVs=
|
||||
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||
github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA=
|
||||
github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
|
||||
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
||||
github.com/mattn/go-isatty v0.0.10 h1:qxFzApOv4WsAL965uUPIsXzAKCZxN2p9UqdhFS4ZW10=
|
||||
github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84=
|
||||
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
|
||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||
github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77 h1:7GoSOOW2jpsfkntVKaS2rAr1TJqfcxotyaUcuxoZSzg=
|
||||
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
||||
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
|
||||
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
|
||||
github.com/mitchellh/reflectwalk v1.0.1 h1:FVzMWA5RllMAKIdUSC8mdWo3XtwoecrH79BY70sEEpE=
|
||||
github.com/mitchellh/reflectwalk v1.0.1/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
||||
github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0=
|
||||
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
|
||||
github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg=
|
||||
github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY=
|
||||
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
|
||||
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=
|
||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg=
|
||||
github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU=
|
||||
github.com/nats-io/nats-server/v2 v2.1.2/go.mod h1:Afk+wRZqkMQs/p45uXdrVLuab3gwv3Z8C4HTBu8GD/k=
|
||||
github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w=
|
||||
github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w=
|
||||
github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w=
|
||||
github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
|
||||
github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs=
|
||||
github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw=
|
||||
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
|
||||
github.com/olekukonko/tablewriter v0.0.4 h1:vHD/YYe1Wolo78koG299f7V/VAS08c6IpCLn+Ejf/w8=
|
||||
github.com/olekukonko/tablewriter v0.0.4/go.mod h1:zq6QwlOf5SlnkVbMSr5EoBv3636FWnp+qbPhuoO21uA=
|
||||
github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo=
|
||||
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
|
||||
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
|
||||
github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis=
|
||||
github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74=
|
||||
github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||
github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5/go.mod h1:/wsWhb9smxSfWAKL3wpBW7V8scJMt8N8gnaMCS9E/cA=
|
||||
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
|
||||
github.com/openzipkin/zipkin-go v0.2.1/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4=
|
||||
github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4=
|
||||
github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM=
|
||||
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
|
||||
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
|
||||
github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac=
|
||||
github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc=
|
||||
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
|
||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
|
||||
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||
github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
|
||||
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
|
||||
github.com/prometheus/client_golang v1.3.0 h1:miYCvYqFXtl/J9FIy8eNpBfYthAEFg+Ys0XyUVEcDsc=
|
||||
github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og=
|
||||
github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
|
||||
github.com/prometheus/client_golang v1.10.0 h1:/o0BDeWzLWXNZ+4q5gXltUvaMpJqckTa+jTNoB+z4cg=
|
||||
github.com/prometheus/client_golang v1.10.0/go.mod h1:WJM3cc3yu7XKBKa/I8WeZm+V3eltZnBwfENSU7mdogU=
|
||||
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
||||
github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
||||
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.1.0 h1:ElTg5tNp4DqfV7UQjDqv2+RJlNzsDtvNAWccbItceIE=
|
||||
github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M=
|
||||
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
|
||||
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
|
||||
github.com/prometheus/common v0.7.0 h1:L+1lyG48J1zAQXA3RBX/nG/B3gjlHq0zTt2tlbJLyCY=
|
||||
github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA=
|
||||
github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
|
||||
github.com/prometheus/common v0.18.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16Clt/msog/s=
|
||||
github.com/prometheus/common v0.23.0 h1:GXWvPYuTUenIa+BhOq/x+L/QZzCqASkVRny5KTlPDGM=
|
||||
github.com/prometheus/common v0.23.0/go.mod h1:H6QK/N6XVT42whUeIdI3dp36w49c+/iMDk7UAI2qm7Q=
|
||||
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||
github.com/prometheus/procfs v0.0.8 h1:+fpWZdT24pJBiqJdAwYBjPSk+5YmQzYNPYzQsdzLkt8=
|
||||
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
|
||||
github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
|
||||
github.com/prometheus/procfs v0.6.0 h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4=
|
||||
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
|
||||
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E=
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
|
||||
github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY=
|
||||
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
|
||||
github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw=
|
||||
github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw=
|
||||
github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.2.0/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
||||
github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
|
||||
github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
|
||||
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
|
||||
go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg=
|
||||
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
||||
go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
||||
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||
go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
|
||||
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
|
||||
go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4=
|
||||
go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA=
|
||||
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
|
||||
go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM=
|
||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
|
||||
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/net v0.0.0-20180530234432-1e491301e022/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d h1:g9qWBGx4puODJTMVyoPrpoxPFgVGd+z1DZwjfRu4d0I=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20190923162816-aa69164e4478 h1:l5EDrHhldLYb3ZRHDUhXF7Om7MvYXnkV9/iQNo1lX6g=
|
||||
golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||
golang.org/x/net v0.0.0-20200904194848-62affa334b73/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.0.0-20210510120150-4163338589ed h1:p9UgmWI9wKpfYmgaV/IZKGdXc5qEK45tDwwwDyjS26I=
|
||||
golang.org/x/net v0.0.0-20210510120150-4163338589ed/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4 h1:YUO/7uOKsKeq9UokNS62b8FYywz3ker1l1vDZRCRefw=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU=
|
||||
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191220142924-d4481acd189f h1:68K/z8GLUxV76xGSqwTWw2gyk/jwn79LUL43rES2g8o=
|
||||
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||
golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200909081042-eff7692f9009/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210309074719-68d13333faf2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da h1:b3NXsE2LusjYGGjL5bxEVZZORm/YEFFrWFjR8eFrw/c=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
|
||||
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/genproto v0.0.0-20170818010345-ee236bd376b0/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8 h1:Nw54tB0rB7hY/N0NQvRW8DG4Yk3Q6T9cu9RcFQDu1tc=
|
||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55 h1:gSJIx1SDwno+2ElGhA4+qG2zF97qiUzTM+rQ0klBOcE=
|
||||
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s=
|
||||
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||
google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
|
||||
google.golang.org/genproto v0.0.0-20200911024640-645f7a48b24f h1:Yv4xsIx7HZOoyUGSJ2ksDyWE2qIBXROsZKt2ny3hCGM=
|
||||
google.golang.org/genproto v0.0.0-20200911024640-645f7a48b24f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
|
||||
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM=
|
||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||
google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||
google.golang.org/grpc v1.22.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||
google.golang.org/grpc v1.27.1 h1:zvIju4sqAGvwKspUQOhwnpcqSbzi7/H6QomNNjTL4sk=
|
||||
google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
|
||||
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
|
||||
google.golang.org/grpc v1.32.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
||||
google.golang.org/grpc v1.37.1 h1:ARnQJNWxGyYJpdf/JXscNlQr/uv607ZPU9Z7ogHi+iI=
|
||||
google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
|
||||
google.golang.org/grpc/cmd/protoc-gen-go-grpc v0.0.0-20200910201057-6591123024b3/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw=
|
||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
||||
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
|
||||
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
|
||||
google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
|
||||
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw=
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||
gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o=
|
||||
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
|
||||
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
|
||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.5 h1:ymVxjfMaHvXD8RqPRmzHHsB3VvucivSkIAvJFDI5O3c=
|
||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 h1:tQIYjPdBoyREyB9XMu+nnTclpTYkz2zFM+lzLJFO4gQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=
|
||||
gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
|
||||
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||
sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o=
|
||||
sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU=
|
||||
|
||||
16
package.json
16
package.json
@@ -29,11 +29,12 @@
|
||||
"@babel/core": "7.7.7",
|
||||
"@babel/preset-env": "7.7.7",
|
||||
"@babel/preset-react": "7.6.3",
|
||||
"@emotion/core": "10.0.27",
|
||||
"@grafana/data": "^7.3.2",
|
||||
"@grafana/runtime": "^7.3.2",
|
||||
"@grafana/toolkit": "^7.3.2",
|
||||
"@grafana/ui": "7.0.1",
|
||||
"@emotion/css": "11.1.3",
|
||||
"@emotion/react": "11.1.5",
|
||||
"@grafana/data": "^8.0.6",
|
||||
"@grafana/runtime": "^8.0.6",
|
||||
"@grafana/toolkit": "^8.0.6",
|
||||
"@grafana/ui": "^8.0.6",
|
||||
"@popperjs/core": "2.4.0",
|
||||
"@types/classnames": "2.2.9",
|
||||
"@types/grafana": "github:CorpGlory/types-grafana",
|
||||
@@ -81,14 +82,15 @@
|
||||
"react-test-renderer": "^16.7.0",
|
||||
"react-transition-group": "4.3.0",
|
||||
"rst2html": "github:thoward/rst2html#990cb89",
|
||||
"rxjs": "6.6.3",
|
||||
"sass-loader": "8.0.2",
|
||||
"semver": "^7.3.2",
|
||||
"style-loader": "1.1.3",
|
||||
"tether-drop": "^1.4.2",
|
||||
"ts-jest": "24.1.0",
|
||||
"ts-loader": "4.4.1",
|
||||
"tslint": "5.20.1",
|
||||
"typescript": "3.9.2",
|
||||
"tslint": "^6.1.3",
|
||||
"typescript": "^4.1.2",
|
||||
"webpack": "4.41.5",
|
||||
"webpack-cli": "3.3.10"
|
||||
},
|
||||
|
||||
@@ -8,13 +8,14 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/gtime"
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/httpclient"
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbix"
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbixapi"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -30,11 +31,10 @@ type ZabbixDatasource struct {
|
||||
// ZabbixDatasourceInstance stores state about a specific datasource
|
||||
// and provides methods to make requests to the Zabbix API
|
||||
type ZabbixDatasourceInstance struct {
|
||||
zabbixAPI *zabbixapi.ZabbixAPI
|
||||
dsInfo *backend.DataSourceInstanceSettings
|
||||
Settings *ZabbixDatasourceSettings
|
||||
queryCache *DatasourceCache
|
||||
logger log.Logger
|
||||
zabbix *zabbix.Zabbix
|
||||
dsInfo *backend.DataSourceInstanceSettings
|
||||
Settings *ZabbixDatasourceSettings
|
||||
logger log.Logger
|
||||
}
|
||||
|
||||
func NewZabbixDatasource() *ZabbixDatasource {
|
||||
@@ -56,18 +56,29 @@ func newZabbixDatasourceInstance(settings backend.DataSourceInstanceSettings) (i
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zabbixAPI, err := zabbixapi.New(&settings, zabbixSettings.Timeout)
|
||||
client, err := httpclient.New(&settings, zabbixSettings.Timeout)
|
||||
if err != nil {
|
||||
logger.Error("Error initializing HTTP client", "error", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zabbixAPI, err := zabbixapi.New(settings.URL, client)
|
||||
if err != nil {
|
||||
logger.Error("Error initializing Zabbix API", "error", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zabbixClient, err := zabbix.New(&settings, zabbixAPI)
|
||||
if err != nil {
|
||||
logger.Error("Error initializing Zabbix client", "error", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &ZabbixDatasourceInstance{
|
||||
dsInfo: &settings,
|
||||
zabbixAPI: zabbixAPI,
|
||||
Settings: zabbixSettings,
|
||||
queryCache: NewDatasourceCache(zabbixSettings.CacheTTL, 10*time.Minute),
|
||||
logger: logger,
|
||||
dsInfo: &settings,
|
||||
zabbix: zabbixClient,
|
||||
Settings: zabbixSettings,
|
||||
logger: logger,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -97,6 +108,7 @@ func (ds *ZabbixDatasource) CheckHealth(ctx context.Context, req *backend.CheckH
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasource) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
|
||||
ds.logger.Debug("QueryData()")
|
||||
qdr := backend.NewQueryDataResponse()
|
||||
|
||||
zabbixDS, err := ds.getDSInstance(req.PluginContext)
|
||||
@@ -110,17 +122,22 @@ func (ds *ZabbixDatasource) QueryData(ctx context.Context, req *backend.QueryDat
|
||||
ds.logger.Debug("DS query", "query", q)
|
||||
if err != nil {
|
||||
res.Error = err
|
||||
} else if len(query.Functions) > 0 {
|
||||
res.Error = ErrFunctionsNotSupported
|
||||
} else if query.Mode != 0 {
|
||||
res.Error = ErrNonMetricQueryNotSupported
|
||||
} else {
|
||||
frame, err := zabbixDS.queryNumericItems(ctx, &query)
|
||||
} else if query.QueryType == MODE_METRICS {
|
||||
frames, err := zabbixDS.queryNumericItems(ctx, &query)
|
||||
if err != nil {
|
||||
res.Error = err
|
||||
} else {
|
||||
res.Frames = []*data.Frame{frame}
|
||||
res.Frames = append(res.Frames, frames...)
|
||||
}
|
||||
} else if query.QueryType == MODE_ITEMID {
|
||||
frames, err := zabbixDS.queryItemIdData(ctx, &query)
|
||||
if err != nil {
|
||||
res.Error = err
|
||||
} else {
|
||||
res.Frames = append(res.Frames, frames...)
|
||||
}
|
||||
} else {
|
||||
res.Error = ErrNonMetricQueryNotSupported
|
||||
}
|
||||
qdr.Responses[q.RefID] = res
|
||||
}
|
||||
@@ -180,11 +197,13 @@ func readZabbixSettings(dsInstanceSettings *backend.DataSourceInstanceSettings)
|
||||
}
|
||||
|
||||
zabbixSettings := &ZabbixDatasourceSettings{
|
||||
Trends: zabbixSettingsDTO.Trends,
|
||||
TrendsFrom: trendsFrom,
|
||||
TrendsRange: trendsRange,
|
||||
CacheTTL: cacheTTL,
|
||||
Timeout: time.Duration(timeout) * time.Second,
|
||||
Trends: zabbixSettingsDTO.Trends,
|
||||
TrendsFrom: trendsFrom,
|
||||
TrendsRange: trendsRange,
|
||||
CacheTTL: cacheTTL,
|
||||
Timeout: time.Duration(timeout) * time.Second,
|
||||
DisableDataAlignment: zabbixSettingsDTO.DisableDataAlignment,
|
||||
DisableReadOnlyUsersAck: zabbixSettingsDTO.DisableReadOnlyUsersAck,
|
||||
}
|
||||
|
||||
return zabbixSettings, nil
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
package datasource
|
||||
|
||||
import (
|
||||
"crypto/sha1"
|
||||
"encoding/hex"
|
||||
"time"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/cache"
|
||||
)
|
||||
|
||||
// DatasourceCache is a cache for datasource instance.
|
||||
type DatasourceCache struct {
|
||||
cache *cache.Cache
|
||||
}
|
||||
|
||||
// NewDatasourceCache creates a DatasourceCache with expiration(ttl) time and cleanupInterval.
|
||||
func NewDatasourceCache(ttl time.Duration, cleanupInterval time.Duration) *DatasourceCache {
|
||||
return &DatasourceCache{
|
||||
cache.NewCache(ttl, cleanupInterval),
|
||||
}
|
||||
}
|
||||
|
||||
// GetAPIRequest gets request response from cache
|
||||
func (c *DatasourceCache) GetAPIRequest(request *ZabbixAPIRequest) (interface{}, bool) {
|
||||
requestHash := HashString(request.String())
|
||||
return c.cache.Get(requestHash)
|
||||
}
|
||||
|
||||
// SetAPIRequest writes request response to cache
|
||||
func (c *DatasourceCache) SetAPIRequest(request *ZabbixAPIRequest, response interface{}) {
|
||||
requestHash := HashString(request.String())
|
||||
c.cache.Set(requestHash, response)
|
||||
}
|
||||
|
||||
// HashString converts the given text string to hash string
|
||||
func HashString(text string) string {
|
||||
hash := sha1.New()
|
||||
hash.Write([]byte(text))
|
||||
return hex.EncodeToString(hash.Sum(nil))
|
||||
}
|
||||
@@ -61,7 +61,7 @@ func TestZabbixBackend_getCachedDatasource(t *testing.T) {
|
||||
got, _ := ds.getDSInstance(tt.pluginContext)
|
||||
|
||||
// Only checking the URL, being the easiest value to, and guarantee equality for
|
||||
assert.Equal(t, tt.want.zabbixAPI.GetUrl().String(), got.zabbixAPI.GetUrl().String())
|
||||
assert.Equal(t, tt.want.zabbix.GetAPI().GetUrl().String(), got.zabbix.GetAPI().GetUrl().String())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
446
pkg/datasource/functions.go
Normal file
446
pkg/datasource/functions.go
Normal file
@@ -0,0 +1,446 @@
|
||||
package datasource
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/gtime"
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/timeseries"
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbix"
|
||||
)
|
||||
|
||||
const RANGE_VARIABLE_VALUE = "range_series"
|
||||
|
||||
var (
|
||||
errFunctionNotSupported = func(name string) error {
|
||||
return fmt.Errorf("function not supported: %s", name)
|
||||
}
|
||||
errParsingFunctionParam = func(err error) error {
|
||||
return fmt.Errorf("failed to parse function param: %s", err)
|
||||
}
|
||||
)
|
||||
|
||||
func MustString(p QueryFunctionParam) (string, error) {
|
||||
if pStr, ok := p.(string); ok {
|
||||
return pStr, nil
|
||||
}
|
||||
return "", fmt.Errorf("failed to convert value to string: %v", p)
|
||||
}
|
||||
|
||||
func MustFloat64(p QueryFunctionParam) (float64, error) {
|
||||
if pFloat, ok := p.(float64); ok {
|
||||
return pFloat, nil
|
||||
} else if pStr, ok := p.(string); ok {
|
||||
if pFloat, err := strconv.ParseFloat(pStr, 64); err == nil {
|
||||
return pFloat, nil
|
||||
}
|
||||
}
|
||||
return 0, fmt.Errorf("failed to convert value to float: %v", p)
|
||||
}
|
||||
|
||||
type DataProcessingFunc = func(series timeseries.TimeSeries, params ...interface{}) (timeseries.TimeSeries, error)
|
||||
|
||||
type AggDataProcessingFunc = func(series []*timeseries.TimeSeriesData, params ...interface{}) ([]*timeseries.TimeSeriesData, error)
|
||||
|
||||
type PreProcessingFunc = func(query *QueryModel, items []*zabbix.Item, params ...interface{}) error
|
||||
|
||||
var seriesFuncMap map[string]DataProcessingFunc
|
||||
|
||||
var aggFuncMap map[string]AggDataProcessingFunc
|
||||
|
||||
var filterFuncMap map[string]AggDataProcessingFunc
|
||||
|
||||
var timeFuncMap map[string]PreProcessingFunc
|
||||
|
||||
var skippedFuncMap map[string]bool
|
||||
|
||||
func init() {
|
||||
seriesFuncMap = map[string]DataProcessingFunc{
|
||||
"groupBy": applyGroupBy,
|
||||
"scale": applyScale,
|
||||
"offset": applyOffset,
|
||||
"delta": applyDelta,
|
||||
"rate": applyRate,
|
||||
"movingAverage": applyMovingAverage,
|
||||
"exponentialMovingAverage": applyExponentialMovingAverage,
|
||||
"removeAboveValue": applyRemoveAboveValue,
|
||||
"removeBelowValue": applyRemoveBelowValue,
|
||||
"transformNull": applyTransformNull,
|
||||
"percentile": applyPercentile,
|
||||
"timeShift": applyTimeShiftPost,
|
||||
}
|
||||
|
||||
aggFuncMap = map[string]AggDataProcessingFunc{
|
||||
"aggregateBy": applyAggregateBy,
|
||||
"sumSeries": applySumSeries,
|
||||
"percentileAgg": applyPercentileAgg,
|
||||
}
|
||||
|
||||
filterFuncMap = map[string]AggDataProcessingFunc{
|
||||
"top": applyTop,
|
||||
"bottom": applyBottom,
|
||||
"sortSeries": applySortSeries,
|
||||
}
|
||||
|
||||
timeFuncMap = map[string]PreProcessingFunc{
|
||||
"timeShift": applyTimeShiftPre,
|
||||
}
|
||||
|
||||
// Functions not processing here or processing on the frontend, skip it
|
||||
skippedFuncMap = map[string]bool{
|
||||
"setAlias": true,
|
||||
"replaceAlias": true,
|
||||
"setAliasByRegex": true,
|
||||
"trendValue": true,
|
||||
"consolidateBy": true,
|
||||
}
|
||||
}
|
||||
|
||||
func applyFunctions(series []*timeseries.TimeSeriesData, functions []QueryFunction) ([]*timeseries.TimeSeriesData, error) {
|
||||
for _, f := range functions {
|
||||
if applyFunc, ok := seriesFuncMap[f.Def.Name]; ok {
|
||||
for _, s := range series {
|
||||
result, err := applyFunc(s.TS, f.Params...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.TS = result
|
||||
}
|
||||
} else if applyAggFunc, ok := aggFuncMap[f.Def.Name]; ok {
|
||||
result, err := applyAggFunc(series, f.Params...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
series = result
|
||||
} else if applyFilterFunc, ok := filterFuncMap[f.Def.Name]; ok {
|
||||
result, err := applyFilterFunc(series, f.Params...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
series = result
|
||||
} else if _, ok := skippedFuncMap[f.Def.Name]; ok {
|
||||
continue
|
||||
} else {
|
||||
err := errFunctionNotSupported(f.Def.Name)
|
||||
return series, err
|
||||
}
|
||||
}
|
||||
return series, nil
|
||||
}
|
||||
|
||||
// applyFunctionsPre applies functions requires pre-processing, like timeShift() (it needs to change original time range)
|
||||
func applyFunctionsPre(query *QueryModel, items []*zabbix.Item) error {
|
||||
for _, f := range query.Functions {
|
||||
if applyFunc, ok := timeFuncMap[f.Def.Name]; ok {
|
||||
err := applyFunc(query, items, f.Params...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func applyGroupBy(series timeseries.TimeSeries, params ...interface{}) (timeseries.TimeSeries, error) {
|
||||
pInterval, err := MustString(params[0])
|
||||
pAgg, err := MustString(params[1])
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
|
||||
aggFunc := getAggFunc(pAgg)
|
||||
if pInterval == RANGE_VARIABLE_VALUE {
|
||||
s := series.GroupByRange(aggFunc)
|
||||
return s, nil
|
||||
}
|
||||
|
||||
interval, err := gtime.ParseInterval(pInterval)
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
if interval == 0 {
|
||||
return series, nil
|
||||
}
|
||||
|
||||
return series.GroupBy(interval, aggFunc), nil
|
||||
}
|
||||
|
||||
func applyPercentile(series timeseries.TimeSeries, params ...interface{}) (timeseries.TimeSeries, error) {
|
||||
pInterval, err := MustString(params[0])
|
||||
percentile, err := MustFloat64(params[1])
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
|
||||
aggFunc := timeseries.AggPercentile(percentile)
|
||||
if pInterval == RANGE_VARIABLE_VALUE {
|
||||
s := series.GroupByRange(aggFunc)
|
||||
return s, nil
|
||||
}
|
||||
|
||||
interval, err := gtime.ParseInterval(pInterval)
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
if interval == 0 {
|
||||
return series, nil
|
||||
}
|
||||
|
||||
s := series.GroupBy(interval, aggFunc)
|
||||
return s, nil
|
||||
}
|
||||
|
||||
func applyScale(series timeseries.TimeSeries, params ...interface{}) (timeseries.TimeSeries, error) {
|
||||
pFactor, err := MustString(params[0])
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
factor, err := strconv.ParseFloat(pFactor, 64)
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
|
||||
transformFunc := timeseries.TransformScale(factor)
|
||||
return series.Transform(transformFunc), nil
|
||||
}
|
||||
|
||||
func applyOffset(series timeseries.TimeSeries, params ...interface{}) (timeseries.TimeSeries, error) {
|
||||
offset, err := MustFloat64(params[0])
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
|
||||
transformFunc := timeseries.TransformOffset(offset)
|
||||
return series.Transform(transformFunc), nil
|
||||
}
|
||||
|
||||
func applyDelta(series timeseries.TimeSeries, params ...interface{}) (timeseries.TimeSeries, error) {
|
||||
return series.Delta(), nil
|
||||
}
|
||||
|
||||
func applyRate(series timeseries.TimeSeries, params ...interface{}) (timeseries.TimeSeries, error) {
|
||||
return series.Rate(), nil
|
||||
}
|
||||
|
||||
func applyRemoveAboveValue(series timeseries.TimeSeries, params ...interface{}) (timeseries.TimeSeries, error) {
|
||||
threshold, err := MustFloat64(params[0])
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
|
||||
transformFunc := timeseries.TransformRemoveAboveValue(threshold)
|
||||
return series.Transform(transformFunc), nil
|
||||
}
|
||||
|
||||
func applyRemoveBelowValue(series timeseries.TimeSeries, params ...interface{}) (timeseries.TimeSeries, error) {
|
||||
threshold, err := MustFloat64(params[0])
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
|
||||
transformFunc := timeseries.TransformRemoveBelowValue(threshold)
|
||||
return series.Transform(transformFunc), nil
|
||||
}
|
||||
|
||||
func applyTransformNull(series timeseries.TimeSeries, params ...interface{}) (timeseries.TimeSeries, error) {
|
||||
nullValue, err := MustFloat64(params[0])
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
|
||||
transformFunc := timeseries.TransformNull(nullValue)
|
||||
return series.Transform(transformFunc), nil
|
||||
}
|
||||
|
||||
func applyMovingAverage(series timeseries.TimeSeries, params ...interface{}) (timeseries.TimeSeries, error) {
|
||||
nFloat, err := MustFloat64(params[0])
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
n := int(nFloat)
|
||||
|
||||
return series.SimpleMovingAverage(n), nil
|
||||
}
|
||||
|
||||
func applyExponentialMovingAverage(series timeseries.TimeSeries, params ...interface{}) (timeseries.TimeSeries, error) {
|
||||
n, err := MustFloat64(params[0])
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
|
||||
return series.ExponentialMovingAverage(n), nil
|
||||
}
|
||||
|
||||
func applyAggregateBy(series []*timeseries.TimeSeriesData, params ...interface{}) ([]*timeseries.TimeSeriesData, error) {
|
||||
pInterval, err := MustString(params[0])
|
||||
pAgg, err := MustString(params[1])
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
|
||||
interval, err := gtime.ParseInterval(pInterval)
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
if interval == 0 {
|
||||
return series, nil
|
||||
}
|
||||
|
||||
aggFunc := getAggFunc(pAgg)
|
||||
aggregatedSeries := timeseries.AggregateBy(series, interval, aggFunc)
|
||||
aggregatedSeries.Meta.Name = fmt.Sprintf("aggregateBy(%s, %s)", pInterval, pAgg)
|
||||
|
||||
return []*timeseries.TimeSeriesData{aggregatedSeries}, nil
|
||||
}
|
||||
|
||||
func applySumSeries(series []*timeseries.TimeSeriesData, params ...interface{}) ([]*timeseries.TimeSeriesData, error) {
|
||||
sum := timeseries.SumSeries(series)
|
||||
sum.Meta.Name = "sumSeries()"
|
||||
return []*timeseries.TimeSeriesData{sum}, nil
|
||||
}
|
||||
|
||||
func applyPercentileAgg(series []*timeseries.TimeSeriesData, params ...interface{}) ([]*timeseries.TimeSeriesData, error) {
|
||||
pInterval, err := MustString(params[0])
|
||||
percentile, err := MustFloat64(params[1])
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
|
||||
interval, err := gtime.ParseInterval(pInterval)
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
if interval == 0 {
|
||||
return series, nil
|
||||
}
|
||||
|
||||
aggFunc := timeseries.AggPercentile(percentile)
|
||||
aggregatedSeries := timeseries.AggregateBy(series, interval, aggFunc)
|
||||
aggregatedSeries.Meta.Name = fmt.Sprintf("percentileAgg(%s, %v)", pInterval, percentile)
|
||||
|
||||
return []*timeseries.TimeSeriesData{aggregatedSeries}, nil
|
||||
}
|
||||
|
||||
func applyTop(series []*timeseries.TimeSeriesData, params ...interface{}) ([]*timeseries.TimeSeriesData, error) {
|
||||
n, err := MustFloat64(params[0])
|
||||
pAgg, err := MustString(params[1])
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
|
||||
aggFunc := getAggFunc(pAgg)
|
||||
filteredSeries := timeseries.Filter(series, int(n), "top", aggFunc)
|
||||
return filteredSeries, nil
|
||||
}
|
||||
|
||||
func applyBottom(series []*timeseries.TimeSeriesData, params ...interface{}) ([]*timeseries.TimeSeriesData, error) {
|
||||
n, err := MustFloat64(params[0])
|
||||
pAgg, err := MustString(params[1])
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
|
||||
aggFunc := getAggFunc(pAgg)
|
||||
filteredSeries := timeseries.Filter(series, int(n), "bottom", aggFunc)
|
||||
return filteredSeries, nil
|
||||
}
|
||||
|
||||
func applySortSeries(series []*timeseries.TimeSeriesData, params ...interface{}) ([]*timeseries.TimeSeriesData, error) {
|
||||
order, err := MustString(params[0])
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
|
||||
aggFunc := timeseries.AggAvg
|
||||
sorted := timeseries.SortBy(series, order, aggFunc)
|
||||
return sorted, nil
|
||||
}
|
||||
|
||||
func applyTimeShiftPre(query *QueryModel, items []*zabbix.Item, params ...interface{}) error {
|
||||
pInterval, err := MustString(params[0])
|
||||
if err != nil {
|
||||
return errParsingFunctionParam(err)
|
||||
}
|
||||
shiftForward := false
|
||||
pInterval = strings.TrimPrefix(pInterval, "-")
|
||||
if strings.Index(pInterval, "+") == 0 {
|
||||
pInterval = strings.TrimPrefix(pInterval, "+")
|
||||
shiftForward = true
|
||||
}
|
||||
|
||||
interval, err := gtime.ParseInterval(pInterval)
|
||||
if err != nil {
|
||||
return errParsingFunctionParam(err)
|
||||
}
|
||||
if interval == 0 {
|
||||
return fmt.Errorf("interval should be non-null value")
|
||||
}
|
||||
|
||||
if shiftForward {
|
||||
query.TimeRange.From = query.TimeRange.From.Add(interval)
|
||||
query.TimeRange.To = query.TimeRange.To.Add(interval)
|
||||
} else {
|
||||
query.TimeRange.From = query.TimeRange.From.Add(-interval)
|
||||
query.TimeRange.To = query.TimeRange.To.Add(-interval)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func applyTimeShiftPost(series timeseries.TimeSeries, params ...interface{}) (timeseries.TimeSeries, error) {
|
||||
pInterval, err := MustString(params[0])
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
shiftForward := false
|
||||
pInterval = strings.TrimPrefix(pInterval, "-")
|
||||
if strings.Index(pInterval, "+") == 0 {
|
||||
pInterval = strings.TrimPrefix(pInterval, "+")
|
||||
shiftForward = true
|
||||
}
|
||||
|
||||
interval, err := gtime.ParseInterval(pInterval)
|
||||
if err != nil {
|
||||
return nil, errParsingFunctionParam(err)
|
||||
}
|
||||
if interval == 0 {
|
||||
return series, nil
|
||||
}
|
||||
if shiftForward == true {
|
||||
interval = -interval
|
||||
}
|
||||
|
||||
transformFunc := timeseries.TransformShiftTime(interval)
|
||||
return series.Transform(transformFunc), nil
|
||||
}
|
||||
|
||||
func getAggFunc(agg string) timeseries.AggFunc {
|
||||
switch agg {
|
||||
case "avg":
|
||||
return timeseries.AggAvg
|
||||
case "max":
|
||||
return timeseries.AggMax
|
||||
case "min":
|
||||
return timeseries.AggMin
|
||||
case "sum":
|
||||
return timeseries.AggSum
|
||||
case "median":
|
||||
return timeseries.AggMedian
|
||||
case "count":
|
||||
return timeseries.AggCount
|
||||
case "first":
|
||||
return timeseries.AggFirst
|
||||
case "last":
|
||||
return timeseries.AggLast
|
||||
default:
|
||||
return timeseries.AggAvg
|
||||
}
|
||||
}
|
||||
|
||||
func sortSeriesPoints(series []*timeseries.TimeSeriesData) {
|
||||
for _, s := range series {
|
||||
s.TS.Sort()
|
||||
}
|
||||
}
|
||||
@@ -3,9 +3,22 @@ package datasource
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/timeseries"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/bitly/go-simplejson"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||
)
|
||||
|
||||
const (
|
||||
MODE_METRICS = "0"
|
||||
MODE_ITSERVICE = "1"
|
||||
MODE_TEXT = "2"
|
||||
MODE_ITEMID = "3"
|
||||
MODE_TRIGGERS = "4"
|
||||
MODE_PROBLEMS = "5"
|
||||
)
|
||||
|
||||
// ZabbixDatasourceSettingsDTO model
|
||||
@@ -16,6 +29,7 @@ type ZabbixDatasourceSettingsDTO struct {
|
||||
CacheTTL string `json:"cacheTTL"`
|
||||
Timeout string `json:"timeout"`
|
||||
|
||||
DisableDataAlignment bool `json:"disableDataAlignment"`
|
||||
DisableReadOnlyUsersAck bool `json:"disableReadOnlyUsersAck"`
|
||||
}
|
||||
|
||||
@@ -27,43 +41,53 @@ type ZabbixDatasourceSettings struct {
|
||||
CacheTTL time.Duration
|
||||
Timeout time.Duration
|
||||
|
||||
DisableDataAlignment bool `json:"disableDataAlignment"`
|
||||
DisableReadOnlyUsersAck bool `json:"disableReadOnlyUsersAck"`
|
||||
}
|
||||
|
||||
type DBConnectionPostProcessingRequest struct {
|
||||
Query QueryModel `json:"query"`
|
||||
TimeRange TimeRangePostProcessingRequest `json:"timeRange"`
|
||||
Series []*timeseries.TimeSeriesData `json:"series"`
|
||||
}
|
||||
|
||||
type TimeRangePostProcessingRequest struct {
|
||||
From int64
|
||||
To int64
|
||||
}
|
||||
|
||||
type ZabbixAPIResourceRequest struct {
|
||||
DatasourceId int64 `json:"datasourceId"`
|
||||
Method string `json:"method"`
|
||||
Params map[string]interface{} `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
type ZabbixAPIRequest struct {
|
||||
Method string `json:"method"`
|
||||
Params ZabbixAPIParams `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
func (r *ZabbixAPIRequest) String() string {
|
||||
jsonRequest, _ := json.Marshal(r.Params)
|
||||
return r.Method + string(jsonRequest)
|
||||
}
|
||||
|
||||
type ZabbixAPIParams = map[string]interface{}
|
||||
|
||||
type ZabbixAPIResourceResponse struct {
|
||||
Result interface{} `json:"result,omitempty"`
|
||||
}
|
||||
|
||||
// QueryModel model
|
||||
type QueryModel struct {
|
||||
Mode int64 `json:"mode"`
|
||||
Group QueryFilter `json:"group"`
|
||||
Host QueryFilter `json:"host"`
|
||||
Application QueryFilter `json:"application"`
|
||||
Item QueryFilter `json:"item"`
|
||||
Functions []QueryFunction `json:"functions,omitempty"`
|
||||
Options QueryOptions `json:"options"`
|
||||
// Deprecated `mode` field, use QueryType instead
|
||||
Mode int64 `json:"mode"`
|
||||
QueryType string `json:"queryType"`
|
||||
|
||||
Group QueryFilter `json:"group"`
|
||||
Host QueryFilter `json:"host"`
|
||||
Application QueryFilter `json:"application"`
|
||||
Item QueryFilter `json:"item"`
|
||||
|
||||
// Item ID mode
|
||||
ItemIDs string `json:"itemids,omitempty"`
|
||||
|
||||
Functions []QueryFunction `json:"functions,omitempty"`
|
||||
Options QueryOptions `json:"options"`
|
||||
|
||||
// Direct from the gRPC interfaces
|
||||
TimeRange backend.TimeRange `json:"-"`
|
||||
RefID string `json:"-"`
|
||||
TimeRange backend.TimeRange `json:"-"`
|
||||
MaxDataPoints int64 `json:"-"`
|
||||
Interval time.Duration `json:"-"`
|
||||
}
|
||||
|
||||
// QueryOptions model
|
||||
@@ -73,29 +97,65 @@ type QueryFilter struct {
|
||||
|
||||
// QueryOptions model
|
||||
type QueryOptions struct {
|
||||
ShowDisabledItems bool `json:"showDisabledItems"`
|
||||
ShowDisabledItems bool `json:"showDisabledItems"`
|
||||
DisableDataAlignment bool `json:"disableDataAlignment"`
|
||||
}
|
||||
|
||||
// QueryOptions model
|
||||
type QueryFunction struct {
|
||||
Def QueryFunctionDef `json:"def"`
|
||||
Params []string `json:"params"`
|
||||
Text string `json:"text"`
|
||||
Def QueryFunctionDef `json:"def"`
|
||||
Params []QueryFunctionParam `json:"params"`
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
// QueryOptions model
|
||||
type QueryFunctionDef struct {
|
||||
Name string `json:"name"`
|
||||
Category string `json:"category"`
|
||||
Name string `json:"name"`
|
||||
Category string `json:"category"`
|
||||
Params []QueryFunctionParamDef `json:"params"`
|
||||
DefaultParams []QueryFunctionParam `json:"defaultParams"`
|
||||
}
|
||||
|
||||
type QueryFunctionParamDef struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
type QueryFunctionParam = interface{}
|
||||
|
||||
type ScopedVar struct {
|
||||
Text string `json:"text"`
|
||||
Value string `json:"value"`
|
||||
}
|
||||
|
||||
// ReadQuery will read and validate Settings from the DataSourceConfg
|
||||
func ReadQuery(query backend.DataQuery) (QueryModel, error) {
|
||||
model := QueryModel{}
|
||||
model := QueryModel{
|
||||
RefID: query.RefID,
|
||||
QueryType: query.QueryType,
|
||||
TimeRange: query.TimeRange,
|
||||
MaxDataPoints: query.MaxDataPoints,
|
||||
Interval: query.Interval,
|
||||
}
|
||||
if err := json.Unmarshal(query.JSON, &model); err != nil {
|
||||
return model, fmt.Errorf("could not read query: %w", err)
|
||||
}
|
||||
|
||||
model.TimeRange = query.TimeRange
|
||||
if model.QueryType == "" {
|
||||
queryJSON, err := simplejson.NewJson(query.JSON)
|
||||
if err != nil {
|
||||
return model, fmt.Errorf("could not read query JSON: %w", err)
|
||||
}
|
||||
|
||||
queryType, err := queryJSON.Get("queryType").Int64()
|
||||
if err != nil {
|
||||
log.DefaultLogger.Warn("could not read query type", "error", err)
|
||||
log.DefaultLogger.Debug("setting query type to default value")
|
||||
model.QueryType = "0"
|
||||
} else {
|
||||
model.QueryType = strconv.FormatInt(queryType, 10)
|
||||
}
|
||||
}
|
||||
|
||||
return model, nil
|
||||
}
|
||||
|
||||
@@ -4,7 +4,9 @@ import (
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbix"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter"
|
||||
)
|
||||
|
||||
@@ -47,7 +49,7 @@ func (ds *ZabbixDatasource) ZabbixAPIHandler(rw http.ResponseWriter, req *http.R
|
||||
return
|
||||
}
|
||||
|
||||
apiReq := &ZabbixAPIRequest{Method: reqData.Method, Params: reqData.Params}
|
||||
apiReq := &zabbix.ZabbixAPIRequest{Method: reqData.Method, Params: reqData.Params}
|
||||
|
||||
result, err := dsInstance.ZabbixAPIQuery(req.Context(), apiReq)
|
||||
if err != nil {
|
||||
@@ -59,6 +61,50 @@ func (ds *ZabbixDatasource) ZabbixAPIHandler(rw http.ResponseWriter, req *http.R
|
||||
writeResponse(rw, result)
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasource) DBConnectionPostProcessingHandler(rw http.ResponseWriter, req *http.Request) {
|
||||
if req.Method != http.MethodPost {
|
||||
return
|
||||
}
|
||||
|
||||
body, err := ioutil.ReadAll(req.Body)
|
||||
defer req.Body.Close()
|
||||
if err != nil || len(body) == 0 {
|
||||
writeError(rw, http.StatusBadRequest, err)
|
||||
return
|
||||
}
|
||||
|
||||
var reqData DBConnectionPostProcessingRequest
|
||||
err = json.Unmarshal(body, &reqData)
|
||||
if err != nil {
|
||||
ds.logger.Error("Cannot unmarshal request", "error", err.Error())
|
||||
writeError(rw, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
pluginCxt := httpadapter.PluginConfigFromContext(req.Context())
|
||||
dsInstance, err := ds.getDSInstance(pluginCxt)
|
||||
if err != nil {
|
||||
ds.logger.Error("Error loading datasource", "error", err)
|
||||
writeError(rw, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
reqData.Query.TimeRange.From = time.Unix(reqData.TimeRange.From, 0)
|
||||
reqData.Query.TimeRange.To = time.Unix(reqData.TimeRange.To, 0)
|
||||
|
||||
frames, err := dsInstance.applyDataProcessing(req.Context(), &reqData.Query, reqData.Series)
|
||||
|
||||
resultJson, err := json.Marshal(frames)
|
||||
if err != nil {
|
||||
writeError(rw, http.StatusInternalServerError, err)
|
||||
}
|
||||
|
||||
rw.Header().Add("Content-Type", "application/json")
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
rw.Write(resultJson)
|
||||
|
||||
}
|
||||
|
||||
func writeResponse(rw http.ResponseWriter, result *ZabbixAPIResourceResponse) {
|
||||
resultJson, err := json.Marshal(*result)
|
||||
if err != nil {
|
||||
|
||||
@@ -2,13 +2,143 @@ package datasource
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/gtime"
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/timeseries"
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbix"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
)
|
||||
|
||||
func convertHistory(history History, items Items) *data.Frame {
|
||||
func convertHistoryToTimeSeries(history zabbix.History, items []*zabbix.Item) []*timeseries.TimeSeriesData {
|
||||
seriesMap := make(map[string]*timeseries.TimeSeriesData, len(items))
|
||||
|
||||
itemsMap := make(map[string]*zabbix.Item, len(items))
|
||||
for _, item := range items {
|
||||
itemsMap[item.ID] = item
|
||||
}
|
||||
|
||||
for _, point := range history {
|
||||
pointItem := itemsMap[point.ItemID]
|
||||
if seriesMap[point.ItemID] == nil {
|
||||
seriesMap[point.ItemID] = timeseries.NewTimeSeriesData()
|
||||
}
|
||||
pointSeries := seriesMap[point.ItemID]
|
||||
if pointSeries.Meta.Item == nil {
|
||||
itemName := pointItem.ExpandItemName()
|
||||
pointSeries.Meta.Item = pointItem
|
||||
pointSeries.Meta.Item.Name = itemName
|
||||
pointSeries.Meta.Name = itemName
|
||||
if len(pointItem.Hosts) > 0 {
|
||||
pointSeries.Meta.Name = fmt.Sprintf("%s: %s", pointItem.Hosts[0].Name, itemName)
|
||||
}
|
||||
pointSeries.Meta.Interval = parseItemUpdateInterval(pointItem.Delay)
|
||||
}
|
||||
|
||||
value := point.Value
|
||||
pointSeries.Add(timeseries.TimePoint{
|
||||
Time: time.Unix(point.Clock, point.NS),
|
||||
Value: &value,
|
||||
})
|
||||
}
|
||||
|
||||
series := make([]*timeseries.TimeSeriesData, 0)
|
||||
for _, tsd := range seriesMap {
|
||||
series = append(series, tsd)
|
||||
}
|
||||
|
||||
timeseries.SortByItem(series)
|
||||
return series
|
||||
}
|
||||
|
||||
func convertTimeSeriesToDataFrame(series []*timeseries.TimeSeriesData) *data.Frame {
|
||||
timeFileld := data.NewFieldFromFieldType(data.FieldTypeTime, 0)
|
||||
timeFileld.Name = "time"
|
||||
frame := data.NewFrame("History", timeFileld)
|
||||
|
||||
if len(series) == 0 {
|
||||
return frame
|
||||
}
|
||||
|
||||
for _, s := range series {
|
||||
field := data.NewFieldFromFieldType(data.FieldTypeNullableFloat64, 0)
|
||||
field.Name = s.Meta.Name
|
||||
|
||||
frame.Fields = append(frame.Fields, field)
|
||||
}
|
||||
|
||||
for i, s := range series {
|
||||
currentFieldIndex := i + 1
|
||||
for _, point := range s.TS {
|
||||
timeFileld.Append(point.Time)
|
||||
for fieldIndex, field := range frame.Fields {
|
||||
if fieldIndex == currentFieldIndex {
|
||||
field.Append(point.Value)
|
||||
} else if fieldIndex > 0 {
|
||||
field.Append(nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
wideFrame, err := data.LongToWide(frame, &data.FillMissing{Mode: data.FillModeNull})
|
||||
if err != nil {
|
||||
backend.Logger.Debug("Error converting data frame to the wide format", "error", err)
|
||||
return frame
|
||||
}
|
||||
return wideFrame
|
||||
}
|
||||
|
||||
func convertTimeSeriesToDataFrames(series []*timeseries.TimeSeriesData) []*data.Frame {
|
||||
frames := make([]*data.Frame, 0)
|
||||
|
||||
for _, s := range series {
|
||||
frames = append(frames, seriesToDataFrame(s))
|
||||
}
|
||||
|
||||
return frames
|
||||
}
|
||||
|
||||
func seriesToDataFrame(series *timeseries.TimeSeriesData) *data.Frame {
|
||||
timeFileld := data.NewFieldFromFieldType(data.FieldTypeTime, 0)
|
||||
timeFileld.Name = data.TimeSeriesTimeFieldName
|
||||
|
||||
seriesName := series.Meta.Name
|
||||
valueField := data.NewFieldFromFieldType(data.FieldTypeNullableFloat64, 0)
|
||||
valueField.Name = data.TimeSeriesValueFieldName
|
||||
|
||||
item := series.Meta.Item
|
||||
scopedVars := map[string]ScopedVar{
|
||||
"__zbx_item": {Value: item.Name},
|
||||
"__zbx_item_name": {Value: item.Name},
|
||||
"__zbx_item_key": {Value: item.Key},
|
||||
"__zbx_item_interval": {Value: item.Delay},
|
||||
"__zbx_host": {Value: item.Delay},
|
||||
}
|
||||
if len(item.Hosts) > 0 {
|
||||
scopedVars["__zbx_host"] = ScopedVar{Value: item.Hosts[0].Name}
|
||||
scopedVars["__zbx_host_name"] = ScopedVar{Value: item.Hosts[0].Name}
|
||||
}
|
||||
valueField.Config = &data.FieldConfig{
|
||||
Custom: map[string]interface{}{
|
||||
"scopedVars": scopedVars,
|
||||
},
|
||||
}
|
||||
|
||||
frame := data.NewFrame(seriesName, timeFileld, valueField)
|
||||
|
||||
for _, point := range series.TS {
|
||||
timeFileld.Append(point.Time)
|
||||
valueField.Append(point.Value)
|
||||
}
|
||||
|
||||
return frame
|
||||
}
|
||||
|
||||
func convertHistoryToDataFrame(history zabbix.History, items []*zabbix.Item) *data.Frame {
|
||||
timeFileld := data.NewFieldFromFieldType(data.FieldTypeTime, 0)
|
||||
timeFileld.Name = "time"
|
||||
frame := data.NewFrame("History", timeFileld)
|
||||
@@ -16,9 +146,9 @@ func convertHistory(history History, items Items) *data.Frame {
|
||||
for _, item := range items {
|
||||
field := data.NewFieldFromFieldType(data.FieldTypeNullableFloat64, 0)
|
||||
if len(item.Hosts) > 0 {
|
||||
field.Name = fmt.Sprintf("%s: %s", item.Hosts[0].Name, item.ExpandItem())
|
||||
field.Name = fmt.Sprintf("%s: %s", item.Hosts[0].Name, item.ExpandItemName())
|
||||
} else {
|
||||
field.Name = item.ExpandItem()
|
||||
field.Name = item.ExpandItemName()
|
||||
}
|
||||
frame.Fields = append(frame.Fields, field)
|
||||
}
|
||||
@@ -47,3 +177,74 @@ func convertHistory(history History, items Items) *data.Frame {
|
||||
}
|
||||
return wideFrame
|
||||
}
|
||||
|
||||
func convertTrendToHistory(trend zabbix.Trend, valueType string) (zabbix.History, error) {
|
||||
history := make([]zabbix.HistoryPoint, 0)
|
||||
for _, point := range trend {
|
||||
value, err := getTrendPointValue(point, valueType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
history = append(history, zabbix.HistoryPoint{
|
||||
ItemID: point.ItemID,
|
||||
Clock: point.Clock,
|
||||
Value: value,
|
||||
})
|
||||
}
|
||||
|
||||
return history, nil
|
||||
}
|
||||
|
||||
func getTrendPointValue(point zabbix.TrendPoint, valueType string) (float64, error) {
|
||||
if valueType == "avg" || valueType == "min" || valueType == "max" || valueType == "count" {
|
||||
valueStr := point.ValueAvg
|
||||
switch valueType {
|
||||
case "min":
|
||||
valueStr = point.ValueMin
|
||||
case "max":
|
||||
valueStr = point.ValueMax
|
||||
case "count":
|
||||
valueStr = point.Num
|
||||
}
|
||||
|
||||
value, err := strconv.ParseFloat(valueStr, 64)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("error parsing trend value: %s", err)
|
||||
}
|
||||
return value, nil
|
||||
} else if valueType == "sum" {
|
||||
avgStr := point.ValueAvg
|
||||
avg, err := strconv.ParseFloat(avgStr, 64)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("error parsing trend value: %s", err)
|
||||
}
|
||||
countStr := point.Num
|
||||
count, err := strconv.ParseFloat(countStr, 64)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("error parsing trend value: %s", err)
|
||||
}
|
||||
if count > 0 {
|
||||
return avg * count, nil
|
||||
} else {
|
||||
return 0, nil
|
||||
}
|
||||
}
|
||||
|
||||
return 0, fmt.Errorf("failed to get trend value, unknown value type: %s", valueType)
|
||||
}
|
||||
|
||||
var fixedUpdateIntervalPattern = regexp.MustCompile(`^(\d+)([smhdw]?)$`)
|
||||
|
||||
func parseItemUpdateInterval(delay string) *time.Duration {
|
||||
if valid := fixedUpdateIntervalPattern.MatchString(delay); !valid {
|
||||
return nil
|
||||
}
|
||||
|
||||
interval, err := gtime.ParseInterval(delay)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &interval
|
||||
}
|
||||
|
||||
@@ -1,79 +1,5 @@
|
||||
package datasource
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Items []Item
|
||||
|
||||
type Item struct {
|
||||
ID string `json:"itemid,omitempty"`
|
||||
Key string `json:"key_,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
ValueType int `json:"value_type,omitempty,string"`
|
||||
HostID string `json:"hostid,omitempty"`
|
||||
Hosts []ItemHost `json:"hosts,omitempty"`
|
||||
Status string `json:"status,omitempty"`
|
||||
State string `json:"state,omitempty"`
|
||||
}
|
||||
|
||||
func (item *Item) ExpandItem() string {
|
||||
name := item.Name
|
||||
key := item.Key
|
||||
|
||||
if strings.Index(key, "[") == -1 {
|
||||
return name
|
||||
}
|
||||
|
||||
keyRunes := []rune(item.Key)
|
||||
keyParamsStr := string(keyRunes[strings.Index(key, "[")+1 : strings.LastIndex(key, "]")])
|
||||
keyParams := splitKeyParams(keyParamsStr)
|
||||
|
||||
for i := len(keyParams); i >= 1; i-- {
|
||||
name = strings.ReplaceAll(name, fmt.Sprintf("$%v", i), keyParams[i-1])
|
||||
}
|
||||
|
||||
return name
|
||||
}
|
||||
|
||||
func splitKeyParams(paramStr string) []string {
|
||||
paramRunes := []rune(paramStr)
|
||||
params := []string{}
|
||||
quoted := false
|
||||
inArray := false
|
||||
splitSymbol := ","
|
||||
param := ""
|
||||
|
||||
for _, r := range paramRunes {
|
||||
symbol := string(r)
|
||||
if symbol == `"` && inArray {
|
||||
param += symbol
|
||||
} else if symbol == `"` && quoted {
|
||||
quoted = false
|
||||
} else if symbol == `"` && !quoted {
|
||||
quoted = true
|
||||
} else if symbol == "[" && !quoted {
|
||||
inArray = true
|
||||
} else if symbol == "]" && !quoted {
|
||||
inArray = false
|
||||
} else if symbol == splitSymbol && !quoted && !inArray {
|
||||
params = append(params, param)
|
||||
param = ""
|
||||
} else {
|
||||
param += symbol
|
||||
}
|
||||
}
|
||||
|
||||
params = append(params, param)
|
||||
return params
|
||||
}
|
||||
|
||||
type ItemHost struct {
|
||||
ID string `json:"hostid,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
}
|
||||
|
||||
type Trend []TrendPoint
|
||||
|
||||
type TrendPoint struct {
|
||||
|
||||
@@ -1,59 +1,19 @@
|
||||
package datasource
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/timeseries"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbixapi"
|
||||
simplejson "github.com/bitly/go-simplejson"
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbix"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"golang.org/x/net/context"
|
||||
)
|
||||
|
||||
var CachedMethods = map[string]bool{
|
||||
"hostgroup.get": true,
|
||||
"host.get": true,
|
||||
"application.get": true,
|
||||
"item.get": true,
|
||||
"service.get": true,
|
||||
"usermacro.get": true,
|
||||
"proxy.get": true,
|
||||
}
|
||||
|
||||
// ZabbixQuery handles query requests to Zabbix
|
||||
func (ds *ZabbixDatasourceInstance) ZabbixQuery(ctx context.Context, apiReq *ZabbixAPIRequest) (*simplejson.Json, error) {
|
||||
var resultJson *simplejson.Json
|
||||
var err error
|
||||
|
||||
cachedResult, queryExistInCache := ds.queryCache.GetAPIRequest(apiReq)
|
||||
if !queryExistInCache {
|
||||
resultJson, err = ds.ZabbixRequest(ctx, apiReq.Method, apiReq.Params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, ok := CachedMethods[apiReq.Method]; ok {
|
||||
ds.logger.Debug("Writing result to cache", "method", apiReq.Method)
|
||||
ds.queryCache.SetAPIRequest(apiReq, resultJson)
|
||||
}
|
||||
} else {
|
||||
var ok bool
|
||||
resultJson, ok = cachedResult.(*simplejson.Json)
|
||||
if !ok {
|
||||
resultJson = simplejson.New()
|
||||
}
|
||||
}
|
||||
|
||||
return resultJson, nil
|
||||
}
|
||||
|
||||
// ZabbixAPIQuery handles query requests to Zabbix API
|
||||
func (ds *ZabbixDatasourceInstance) ZabbixAPIQuery(ctx context.Context, apiReq *ZabbixAPIRequest) (*ZabbixAPIResourceResponse, error) {
|
||||
resultJson, err := ds.ZabbixQuery(ctx, apiReq)
|
||||
func (ds *ZabbixDatasourceInstance) ZabbixAPIQuery(ctx context.Context, apiReq *zabbix.ZabbixAPIRequest) (*ZabbixAPIResourceResponse, error) {
|
||||
resultJson, err := ds.zabbix.Request(ctx, apiReq)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -69,12 +29,12 @@ func BuildAPIResponse(responseData *interface{}) (*ZabbixAPIResourceResponse, er
|
||||
|
||||
// TestConnection checks authentication and version of the Zabbix API and returns that info
|
||||
func (ds *ZabbixDatasourceInstance) TestConnection(ctx context.Context) (string, error) {
|
||||
_, err := ds.getAllGroups(ctx)
|
||||
_, err := ds.zabbix.GetAllGroups(ctx)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
response, err := ds.ZabbixRequest(ctx, "apiinfo.version", ZabbixAPIParams{})
|
||||
response, err := ds.zabbix.Request(ctx, &zabbix.ZabbixAPIRequest{Method: "apiinfo.version"})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -83,67 +43,13 @@ func (ds *ZabbixDatasourceInstance) TestConnection(ctx context.Context) (string,
|
||||
return string(resultByte), nil
|
||||
}
|
||||
|
||||
// ZabbixRequest checks authentication and makes a request to the Zabbix API
|
||||
func (ds *ZabbixDatasourceInstance) ZabbixRequest(ctx context.Context, method string, params ZabbixAPIParams) (*simplejson.Json, error) {
|
||||
ds.logger.Debug("Zabbix API request", "datasource", ds.dsInfo.Name, "method", method)
|
||||
var result *simplejson.Json
|
||||
var err error
|
||||
|
||||
// Skip auth for methods that are not required it
|
||||
if method == "apiinfo.version" {
|
||||
return ds.zabbixAPI.RequestUnauthenticated(ctx, method, params)
|
||||
}
|
||||
|
||||
result, err = ds.zabbixAPI.Request(ctx, method, params)
|
||||
notAuthorized := isNotAuthorized(err)
|
||||
if err == zabbixapi.ErrNotAuthenticated || notAuthorized {
|
||||
if notAuthorized {
|
||||
ds.logger.Debug("Authentication token expired, performing re-login")
|
||||
}
|
||||
err = ds.login(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ds.ZabbixRequest(ctx, method, params)
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) login(ctx context.Context) error {
|
||||
jsonData, err := simplejson.NewJson(ds.dsInfo.JSONData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
zabbixLogin := jsonData.Get("username").MustString()
|
||||
var zabbixPassword string
|
||||
if securePassword, exists := ds.dsInfo.DecryptedSecureJSONData["password"]; exists {
|
||||
zabbixPassword = securePassword
|
||||
} else {
|
||||
// Fallback
|
||||
zabbixPassword = jsonData.Get("password").MustString()
|
||||
}
|
||||
|
||||
err = ds.zabbixAPI.Authenticate(ctx, zabbixLogin, zabbixPassword)
|
||||
if err != nil {
|
||||
ds.logger.Error("Zabbix authentication error", "error", err)
|
||||
return err
|
||||
}
|
||||
ds.logger.Debug("Successfully authenticated", "url", ds.zabbixAPI.GetUrl().String(), "user", zabbixLogin)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) queryNumericItems(ctx context.Context, query *QueryModel) (*data.Frame, error) {
|
||||
func (ds *ZabbixDatasourceInstance) queryNumericItems(ctx context.Context, query *QueryModel) ([]*data.Frame, error) {
|
||||
groupFilter := query.Group.Filter
|
||||
hostFilter := query.Host.Filter
|
||||
appFilter := query.Application.Filter
|
||||
itemFilter := query.Item.Filter
|
||||
|
||||
items, err := ds.getItems(ctx, groupFilter, hostFilter, appFilter, itemFilter, "num")
|
||||
items, err := ds.zabbix.GetItems(ctx, groupFilter, hostFilter, appFilter, itemFilter, "num")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -156,229 +62,90 @@ func (ds *ZabbixDatasourceInstance) queryNumericItems(ctx context.Context, query
|
||||
return frames, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getItems(ctx context.Context, groupFilter string, hostFilter string, appFilter string, itemFilter string, itemType string) (Items, error) {
|
||||
hosts, err := ds.getHosts(ctx, groupFilter, hostFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var hostids []string
|
||||
for _, k := range hosts {
|
||||
hostids = append(hostids, k["hostid"].(string))
|
||||
func (ds *ZabbixDatasourceInstance) queryItemIdData(ctx context.Context, query *QueryModel) ([]*data.Frame, error) {
|
||||
itemids := strings.Split(query.ItemIDs, ",")
|
||||
for i, id := range itemids {
|
||||
itemids[i] = strings.Trim(id, " ")
|
||||
}
|
||||
|
||||
apps, err := ds.getApps(ctx, groupFilter, hostFilter, appFilter)
|
||||
// Apps not supported in Zabbix 5.4 and higher
|
||||
if isAppMethodNotFoundError(err) {
|
||||
apps = []map[string]interface{}{}
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var appids []string
|
||||
for _, l := range apps {
|
||||
appids = append(appids, l["applicationid"].(string))
|
||||
}
|
||||
|
||||
var allItems *simplejson.Json
|
||||
if len(hostids) > 0 {
|
||||
allItems, err = ds.getAllItems(ctx, hostids, nil, itemType)
|
||||
} else if len(appids) > 0 {
|
||||
allItems, err = ds.getAllItems(ctx, nil, appids, itemType)
|
||||
}
|
||||
|
||||
var items Items
|
||||
|
||||
if allItems == nil {
|
||||
items = Items{}
|
||||
} else {
|
||||
itemsJSON, err := allItems.MarshalJSON()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(itemsJSON, &items)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
re, err := parseFilter(itemFilter)
|
||||
items, err := ds.zabbix.GetItemsByIDs(ctx, itemids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
filteredItems := Items{}
|
||||
for _, item := range items {
|
||||
itemName := item.ExpandItem()
|
||||
if item.Status == "0" {
|
||||
if re != nil {
|
||||
if re.MatchString(itemName) {
|
||||
filteredItems = append(filteredItems, item)
|
||||
}
|
||||
} else if itemName == itemFilter {
|
||||
filteredItems = append(filteredItems, item)
|
||||
}
|
||||
}
|
||||
frames, err := ds.queryNumericDataForItems(ctx, query, items)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return filteredItems, nil
|
||||
|
||||
return frames, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getApps(ctx context.Context, groupFilter string, hostFilter string, appFilter string) ([]map[string]interface{}, error) {
|
||||
hosts, err := ds.getHosts(ctx, groupFilter, hostFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var hostids []string
|
||||
for _, k := range hosts {
|
||||
hostids = append(hostids, k["hostid"].(string))
|
||||
}
|
||||
allApps, err := ds.getAllApps(ctx, hostids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
re, err := parseFilter(appFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var apps []map[string]interface{}
|
||||
for _, i := range allApps.MustArray() {
|
||||
name := i.(map[string]interface{})["name"].(string)
|
||||
if re != nil {
|
||||
if re.MatchString(name) {
|
||||
apps = append(apps, i.(map[string]interface{}))
|
||||
}
|
||||
} else if name == appFilter {
|
||||
apps = append(apps, i.(map[string]interface{}))
|
||||
}
|
||||
}
|
||||
return apps, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getHosts(ctx context.Context, groupFilter string, hostFilter string) ([]map[string]interface{}, error) {
|
||||
groups, err := ds.getGroups(ctx, groupFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var groupids []string
|
||||
for _, k := range groups {
|
||||
groupids = append(groupids, k["groupid"].(string))
|
||||
}
|
||||
allHosts, err := ds.getAllHosts(ctx, groupids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
re, err := parseFilter(hostFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var hosts []map[string]interface{}
|
||||
for _, i := range allHosts.MustArray() {
|
||||
name := i.(map[string]interface{})["name"].(string)
|
||||
if re != nil {
|
||||
if re.MatchString(name) {
|
||||
hosts = append(hosts, i.(map[string]interface{}))
|
||||
}
|
||||
} else if name == hostFilter {
|
||||
hosts = append(hosts, i.(map[string]interface{}))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return hosts, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getGroups(ctx context.Context, groupFilter string) ([]map[string]interface{}, error) {
|
||||
allGroups, err := ds.getAllGroups(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
re, err := parseFilter(groupFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var groups []map[string]interface{}
|
||||
for _, i := range allGroups.MustArray() {
|
||||
name := i.(map[string]interface{})["name"].(string)
|
||||
if re != nil {
|
||||
if re.MatchString(name) {
|
||||
groups = append(groups, i.(map[string]interface{}))
|
||||
}
|
||||
} else if name == groupFilter {
|
||||
groups = append(groups, i.(map[string]interface{}))
|
||||
}
|
||||
}
|
||||
return groups, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getAllItems(ctx context.Context, hostids []string, appids []string, itemtype string) (*simplejson.Json, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": []string{"itemid", "name", "key_", "value_type", "hostid", "status", "state"},
|
||||
"sortfield": "name",
|
||||
"webitems": true,
|
||||
"filter": map[string]interface{}{},
|
||||
"selectHosts": []string{"hostid", "name"},
|
||||
"hostids": hostids,
|
||||
"applicationids": appids,
|
||||
}
|
||||
|
||||
filter := params["filter"].(map[string]interface{})
|
||||
if itemtype == "num" {
|
||||
filter["value_type"] = []int{0, 3}
|
||||
} else if itemtype == "text" {
|
||||
filter["value_type"] = []int{1, 2, 4}
|
||||
}
|
||||
|
||||
return ds.ZabbixQuery(ctx, &ZabbixAPIRequest{Method: "item.get", Params: params})
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getAllApps(ctx context.Context, hostids []string) (*simplejson.Json, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": "extend",
|
||||
"hostids": hostids,
|
||||
}
|
||||
|
||||
return ds.ZabbixQuery(ctx, &ZabbixAPIRequest{Method: "application.get", Params: params})
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getAllHosts(ctx context.Context, groupids []string) (*simplejson.Json, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": []string{"name", "host"},
|
||||
"sortfield": "name",
|
||||
"groupids": groupids,
|
||||
}
|
||||
|
||||
return ds.ZabbixQuery(ctx, &ZabbixAPIRequest{Method: "host.get", Params: params})
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getAllGroups(ctx context.Context) (*simplejson.Json, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": []string{"name"},
|
||||
"sortfield": "name",
|
||||
"real_hosts": true,
|
||||
}
|
||||
|
||||
return ds.ZabbixQuery(ctx, &ZabbixAPIRequest{Method: "hostgroup.get", Params: params})
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) queryNumericDataForItems(ctx context.Context, query *QueryModel, items Items) (*data.Frame, error) {
|
||||
valueType := ds.getTrendValueType(query)
|
||||
func (ds *ZabbixDatasourceInstance) queryNumericDataForItems(ctx context.Context, query *QueryModel, items []*zabbix.Item) ([]*data.Frame, error) {
|
||||
trendValueType := ds.getTrendValueType(query)
|
||||
consolidateBy := ds.getConsolidateBy(query)
|
||||
|
||||
if consolidateBy == "" {
|
||||
consolidateBy = valueType
|
||||
if consolidateBy != "" {
|
||||
trendValueType = consolidateBy
|
||||
}
|
||||
|
||||
history, err := ds.getHistotyOrTrend(ctx, query, items)
|
||||
err := applyFunctionsPre(query, items)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
frame := convertHistory(history, items)
|
||||
return frame, nil
|
||||
history, err := ds.getHistotyOrTrend(ctx, query, items, trendValueType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
series := convertHistoryToTimeSeries(history, items)
|
||||
return ds.applyDataProcessing(ctx, query, series)
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) applyDataProcessing(ctx context.Context, query *QueryModel, series []*timeseries.TimeSeriesData) ([]*data.Frame, error) {
|
||||
consolidateBy := ds.getConsolidateBy(query)
|
||||
|
||||
// Align time series data if possible
|
||||
useTrend := ds.isUseTrend(query.TimeRange)
|
||||
disableDataAlignment := query.Options.DisableDataAlignment || ds.Settings.DisableDataAlignment || query.QueryType == MODE_ITSERVICE
|
||||
if !disableDataAlignment {
|
||||
if useTrend {
|
||||
for _, s := range series {
|
||||
// Trend data is already aligned (by 1 hour interval), but null values should be added
|
||||
s.TS = s.TS.FillTrendWithNulls()
|
||||
}
|
||||
} else {
|
||||
for _, s := range series {
|
||||
if s.Meta.Interval != nil {
|
||||
s.TS = s.TS.Align(*s.Meta.Interval)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
series, err := applyFunctions(series, query.Functions)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, s := range series {
|
||||
if int64(s.Len()) > query.MaxDataPoints && query.Interval > 0 {
|
||||
downsampleFunc := consolidateBy
|
||||
if downsampleFunc == "" {
|
||||
downsampleFunc = "avg"
|
||||
}
|
||||
downsampled, err := applyGroupBy(s.TS, query.Interval.String(), downsampleFunc)
|
||||
if err == nil {
|
||||
s.TS = downsampled
|
||||
} else {
|
||||
ds.logger.Debug("Error downsampling series", "error", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
frames := convertTimeSeriesToDataFrames(series)
|
||||
return frames, nil
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getTrendValueType(query *QueryModel) string {
|
||||
@@ -386,7 +153,7 @@ func (ds *ZabbixDatasourceInstance) getTrendValueType(query *QueryModel) string
|
||||
|
||||
for _, fn := range query.Functions {
|
||||
if fn.Def.Name == "trendValue" && len(fn.Params) > 0 {
|
||||
trendValue = fn.Params[0]
|
||||
trendValue = fn.Params[0].(string)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -394,69 +161,29 @@ func (ds *ZabbixDatasourceInstance) getTrendValueType(query *QueryModel) string
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getConsolidateBy(query *QueryModel) string {
|
||||
consolidateBy := "avg"
|
||||
consolidateBy := ""
|
||||
|
||||
for _, fn := range query.Functions {
|
||||
if fn.Def.Name == "consolidateBy" && len(fn.Params) > 0 {
|
||||
consolidateBy = fn.Params[0]
|
||||
consolidateBy = fn.Params[0].(string)
|
||||
}
|
||||
}
|
||||
return consolidateBy
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) getHistotyOrTrend(ctx context.Context, query *QueryModel, items Items) (History, error) {
|
||||
func (ds *ZabbixDatasourceInstance) getHistotyOrTrend(ctx context.Context, query *QueryModel, items []*zabbix.Item, trendValueType string) (zabbix.History, error) {
|
||||
timeRange := query.TimeRange
|
||||
useTrend := ds.isUseTrend(timeRange)
|
||||
allHistory := History{}
|
||||
|
||||
groupedItems := map[int]Items{}
|
||||
|
||||
for _, j := range items {
|
||||
groupedItems[j.ValueType] = append(groupedItems[j.ValueType], j)
|
||||
}
|
||||
|
||||
for k, l := range groupedItems {
|
||||
var itemids []string
|
||||
for _, m := range l {
|
||||
itemids = append(itemids, m.ID)
|
||||
}
|
||||
|
||||
params := ZabbixAPIParams{
|
||||
"output": "extend",
|
||||
"sortfield": "clock",
|
||||
"sortorder": "ASC",
|
||||
"itemids": itemids,
|
||||
"time_from": timeRange.From.Unix(),
|
||||
"time_till": timeRange.To.Unix(),
|
||||
}
|
||||
|
||||
var response *simplejson.Json
|
||||
var err error
|
||||
if useTrend {
|
||||
response, err = ds.ZabbixQuery(ctx, &ZabbixAPIRequest{Method: "trend.get", Params: params})
|
||||
} else {
|
||||
params["history"] = &k
|
||||
response, err = ds.ZabbixQuery(ctx, &ZabbixAPIRequest{Method: "history.get", Params: params})
|
||||
}
|
||||
|
||||
if useTrend {
|
||||
result, err := ds.zabbix.GetTrend(ctx, items, timeRange)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pointJSON, err := response.MarshalJSON()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Internal error parsing response JSON: %w", err)
|
||||
}
|
||||
|
||||
history := History{}
|
||||
err = json.Unmarshal(pointJSON, &history)
|
||||
if err != nil {
|
||||
ds.logger.Error("Error handling history response", "error", err.Error())
|
||||
} else {
|
||||
allHistory = append(allHistory, history...)
|
||||
}
|
||||
return convertTrendToHistory(result, trendValueType)
|
||||
}
|
||||
return allHistory, nil
|
||||
|
||||
return ds.zabbix.GetHistory(ctx, items, timeRange)
|
||||
}
|
||||
|
||||
func (ds *ZabbixDatasourceInstance) isUseTrend(timeRange backend.TimeRange) bool {
|
||||
@@ -475,45 +202,3 @@ func (ds *ZabbixDatasourceInstance) isUseTrend(timeRange backend.TimeRange) bool
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func parseFilter(filter string) (*regexp.Regexp, error) {
|
||||
regex := regexp.MustCompile(`^/(.+)/(.*)$`)
|
||||
flagRE := regexp.MustCompile("[imsU]+")
|
||||
|
||||
matches := regex.FindStringSubmatch(filter)
|
||||
if len(matches) <= 1 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
pattern := ""
|
||||
if matches[2] != "" {
|
||||
if flagRE.MatchString(matches[2]) {
|
||||
pattern += "(?" + matches[2] + ")"
|
||||
} else {
|
||||
return nil, fmt.Errorf("error parsing regexp: unsupported flags `%s` (expected [imsU])", matches[2])
|
||||
}
|
||||
}
|
||||
pattern += matches[1]
|
||||
|
||||
return regexp.Compile(pattern)
|
||||
}
|
||||
|
||||
func isNotAuthorized(err error) bool {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
message := err.Error()
|
||||
return strings.Contains(message, "Session terminated, re-login, please.") ||
|
||||
strings.Contains(message, "Not authorised.") ||
|
||||
strings.Contains(message, "Not authorized.")
|
||||
}
|
||||
|
||||
func isAppMethodNotFoundError(err error) bool {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
message := err.Error()
|
||||
return message == `Method not found. Incorrect API "application".`
|
||||
}
|
||||
|
||||
@@ -1,134 +1,42 @@
|
||||
package datasource
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/cache"
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbixapi"
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbix"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
var emptyParams = map[string]interface{}{}
|
||||
|
||||
type RoundTripFunc func(req *http.Request) *http.Response
|
||||
|
||||
func (f RoundTripFunc) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
return f(req), nil
|
||||
}
|
||||
|
||||
//NewTestClient returns *http.Client with Transport replaced to avoid making real calls
|
||||
func NewTestClient(fn RoundTripFunc) *http.Client {
|
||||
return &http.Client{
|
||||
Transport: RoundTripFunc(fn),
|
||||
}
|
||||
}
|
||||
|
||||
var basicDatasourceInfo = &backend.DataSourceInstanceSettings{
|
||||
ID: 1,
|
||||
Name: "TestDatasource",
|
||||
URL: "http://zabbix.org/zabbix",
|
||||
JSONData: []byte(`{"username":"username", "password":"password"}}`),
|
||||
JSONData: []byte(`{"username":"username", "password":"password", "cacheTTL":"10m"}`),
|
||||
}
|
||||
|
||||
func mockZabbixQuery(method string, params ZabbixAPIParams) *ZabbixAPIRequest {
|
||||
return &ZabbixAPIRequest{
|
||||
func mockZabbixQuery(method string, params zabbix.ZabbixAPIParams) *zabbix.ZabbixAPIRequest {
|
||||
return &zabbix.ZabbixAPIRequest{
|
||||
Method: method,
|
||||
Params: params,
|
||||
}
|
||||
}
|
||||
|
||||
func MockZabbixDataSource(body string, statusCode int) *ZabbixDatasourceInstance {
|
||||
zabbixAPI, _ := zabbixapi.MockZabbixAPI(body, statusCode)
|
||||
zabbixSettings, _ := readZabbixSettings(basicDatasourceInfo)
|
||||
zabbixClient, _ := zabbix.MockZabbixClient(basicDatasourceInfo, body, statusCode)
|
||||
|
||||
return &ZabbixDatasourceInstance{
|
||||
dsInfo: basicDatasourceInfo,
|
||||
zabbixAPI: zabbixAPI,
|
||||
Settings: zabbixSettings,
|
||||
queryCache: NewDatasourceCache(cache.NoExpiration, 10*time.Minute),
|
||||
logger: log.New(),
|
||||
dsInfo: basicDatasourceInfo,
|
||||
zabbix: zabbixClient,
|
||||
Settings: zabbixSettings,
|
||||
logger: log.New(),
|
||||
}
|
||||
}
|
||||
|
||||
func MockZabbixDataSourceResponse(dsInstance *ZabbixDatasourceInstance, body string, statusCode int) *ZabbixDatasourceInstance {
|
||||
zabbixAPI, _ := zabbixapi.MockZabbixAPI(body, statusCode)
|
||||
dsInstance.zabbixAPI = zabbixAPI
|
||||
zabbixClient, _ := zabbix.MockZabbixClientResponse(dsInstance.zabbix, body, statusCode)
|
||||
dsInstance.zabbix = zabbixClient
|
||||
|
||||
return dsInstance
|
||||
}
|
||||
|
||||
func TestLogin(t *testing.T) {
|
||||
dsInstance := MockZabbixDataSource(`{"result":"secretauth"}`, 200)
|
||||
err := dsInstance.login(context.Background())
|
||||
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "secretauth", dsInstance.zabbixAPI.GetAuth())
|
||||
}
|
||||
|
||||
func TestLoginError(t *testing.T) {
|
||||
dsInstance := MockZabbixDataSource(`{"result":""}`, 500)
|
||||
err := dsInstance.login(context.Background())
|
||||
|
||||
assert.NotNil(t, err)
|
||||
assert.Equal(t, "", dsInstance.zabbixAPI.GetAuth())
|
||||
}
|
||||
|
||||
func TestZabbixAPIQuery(t *testing.T) {
|
||||
dsInstance := MockZabbixDataSource(`{"result":"test"}`, 200)
|
||||
resp, err := dsInstance.ZabbixAPIQuery(context.Background(), mockZabbixQuery("test.get", emptyParams))
|
||||
|
||||
assert.Nil(t, err)
|
||||
|
||||
result, ok := resp.Result.(string)
|
||||
assert.True(t, ok)
|
||||
assert.Equal(t, "test", result)
|
||||
}
|
||||
|
||||
func TestCachedQuery(t *testing.T) {
|
||||
// Using methods with caching enabled
|
||||
query := mockZabbixQuery("host.get", emptyParams)
|
||||
dsInstance := MockZabbixDataSource(`{"result":"testOld"}`, 200)
|
||||
|
||||
// Run query first time
|
||||
resp, err := dsInstance.ZabbixAPIQuery(context.Background(), query)
|
||||
|
||||
assert.Nil(t, err)
|
||||
result, _ := resp.Result.(string)
|
||||
assert.Equal(t, "testOld", result)
|
||||
|
||||
// Mock request with new value
|
||||
dsInstance = MockZabbixDataSourceResponse(dsInstance, `{"result":"testNew"}`, 200)
|
||||
// Should not run actual API query and return first result
|
||||
resp, err = dsInstance.ZabbixAPIQuery(context.Background(), query)
|
||||
|
||||
assert.Nil(t, err)
|
||||
result, _ = resp.Result.(string)
|
||||
assert.Equal(t, "testOld", result)
|
||||
}
|
||||
|
||||
func TestNonCachedQuery(t *testing.T) {
|
||||
// Using methods with caching disabled
|
||||
query := mockZabbixQuery("history.get", emptyParams)
|
||||
dsInstance := MockZabbixDataSource(`{"result":"testOld"}`, 200)
|
||||
|
||||
// Run query first time
|
||||
resp, err := dsInstance.ZabbixAPIQuery(context.Background(), query)
|
||||
|
||||
assert.Nil(t, err)
|
||||
result, _ := resp.Result.(string)
|
||||
assert.Equal(t, "testOld", result)
|
||||
|
||||
// Mock request with new value
|
||||
dsInstance = MockZabbixDataSourceResponse(dsInstance, `{"result":"testNew"}`, 200)
|
||||
// Should not run actual API query and return first result
|
||||
resp, err = dsInstance.ZabbixAPIQuery(context.Background(), query)
|
||||
|
||||
assert.Nil(t, err)
|
||||
result, _ = resp.Result.(string)
|
||||
assert.Equal(t, "testNew", result)
|
||||
}
|
||||
|
||||
@@ -2,187 +2,50 @@ package httpclient
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
simplejson "github.com/bitly/go-simplejson"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||
)
|
||||
|
||||
type proxyTransportCache struct {
|
||||
cache map[int64]cachedTransport
|
||||
sync.Mutex
|
||||
}
|
||||
// New creates new HTTP client.
|
||||
func New(dsInfo *backend.DataSourceInstanceSettings, timeout time.Duration) (*http.Client, error) {
|
||||
clientOptions, err := dsInfo.HTTPClientOptions()
|
||||
clientOptions.Timeouts.Timeout = timeout
|
||||
|
||||
// dataSourceTransport implements http.RoundTripper (https://golang.org/pkg/net/http/#RoundTripper)
|
||||
type dataSourceTransport struct {
|
||||
headers map[string]string
|
||||
transport *http.Transport
|
||||
}
|
||||
|
||||
// RoundTrip executes a single HTTP transaction, returning a Response for the provided Request.
|
||||
func (d *dataSourceTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
for key, value := range d.headers {
|
||||
req.Header.Set(key, value)
|
||||
}
|
||||
|
||||
return d.transport.RoundTrip(req)
|
||||
}
|
||||
|
||||
type cachedTransport struct {
|
||||
updated time.Time
|
||||
|
||||
*dataSourceTransport
|
||||
}
|
||||
|
||||
var ptc = proxyTransportCache{
|
||||
cache: make(map[int64]cachedTransport),
|
||||
}
|
||||
|
||||
// GetHttpClient returns new http.Client. Transport either initialized or got from cache.
|
||||
func GetHttpClient(ds *backend.DataSourceInstanceSettings, timeout time.Duration) (*http.Client, error) {
|
||||
transport, err := getHttpTransport(ds)
|
||||
tlsSkipVerify, err := getTLSSkipVerify(dsInfo)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.DefaultLogger.Debug("Initializing new HTTP client", "timeout", timeout.Seconds())
|
||||
|
||||
return &http.Client{
|
||||
Timeout: timeout,
|
||||
Transport: transport,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func getHttpTransport(ds *backend.DataSourceInstanceSettings) (*dataSourceTransport, error) {
|
||||
ptc.Lock()
|
||||
defer ptc.Unlock()
|
||||
|
||||
if t, present := ptc.cache[ds.ID]; present && ds.Updated.Equal(t.updated) {
|
||||
return t.dataSourceTransport, nil
|
||||
clientOptions.ConfigureTLSConfig = func(opts httpclient.Options, tlsConfig *tls.Config) {
|
||||
// grafana-plugin-sdk-go has a bug and InsecureSkipVerify only set if TLS Client Auth enabled, so it should be set
|
||||
// manually here
|
||||
tlsConfig.InsecureSkipVerify = tlsSkipVerify
|
||||
}
|
||||
|
||||
tlsConfig, err := getTLSConfig(ds)
|
||||
client, err := httpclient.New(clientOptions)
|
||||
if err != nil {
|
||||
log.DefaultLogger.Error("Failed to create HTTP client", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tlsConfig.Renegotiation = tls.RenegotiateFreelyAsClient
|
||||
|
||||
// Create transport which adds all
|
||||
customHeaders := getCustomHeaders(ds)
|
||||
transport := &http.Transport{
|
||||
TLSClientConfig: tlsConfig,
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
Dial: (&net.Dialer{
|
||||
Timeout: 30 * time.Second,
|
||||
KeepAlive: 30 * time.Second,
|
||||
}).Dial,
|
||||
TLSHandshakeTimeout: 10 * time.Second,
|
||||
ExpectContinueTimeout: 1 * time.Second,
|
||||
MaxIdleConns: 100,
|
||||
IdleConnTimeout: 90 * time.Second,
|
||||
}
|
||||
|
||||
if ds.BasicAuthEnabled {
|
||||
user := ds.BasicAuthUser
|
||||
password := ds.DecryptedSecureJSONData["basicAuthPassword"]
|
||||
basicAuthHeader := getBasicAuthHeader(user, password)
|
||||
customHeaders["Authorization"] = basicAuthHeader
|
||||
}
|
||||
|
||||
dsTransport := &dataSourceTransport{
|
||||
headers: customHeaders,
|
||||
transport: transport,
|
||||
}
|
||||
|
||||
ptc.cache[ds.ID] = cachedTransport{
|
||||
dataSourceTransport: dsTransport,
|
||||
updated: ds.Updated,
|
||||
}
|
||||
|
||||
return dsTransport, nil
|
||||
return client, nil
|
||||
}
|
||||
|
||||
func getTLSConfig(ds *backend.DataSourceInstanceSettings) (*tls.Config, error) {
|
||||
var tlsSkipVerify, tlsClientAuth, tlsAuthWithCACert bool
|
||||
func getTLSSkipVerify(ds *backend.DataSourceInstanceSettings) (bool, error) {
|
||||
var tlsSkipVerify bool
|
||||
jsonData, err := simplejson.NewJson(ds.JSONData)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return false, err
|
||||
}
|
||||
|
||||
if jsonData != nil {
|
||||
tlsClientAuth = jsonData.Get("tlsAuth").MustBool(false)
|
||||
tlsAuthWithCACert = jsonData.Get("tlsAuthWithCACert").MustBool(false)
|
||||
tlsSkipVerify = jsonData.Get("tlsSkipVerify").MustBool(false)
|
||||
}
|
||||
|
||||
tlsConfig := &tls.Config{
|
||||
InsecureSkipVerify: tlsSkipVerify,
|
||||
}
|
||||
|
||||
if tlsClientAuth || tlsAuthWithCACert {
|
||||
decrypted := ds.DecryptedSecureJSONData
|
||||
if tlsAuthWithCACert && len(decrypted["tlsCACert"]) > 0 {
|
||||
caPool := x509.NewCertPool()
|
||||
ok := caPool.AppendCertsFromPEM([]byte(decrypted["tlsCACert"]))
|
||||
if !ok {
|
||||
return nil, errors.New("Failed to parse TLS CA PEM certificate")
|
||||
}
|
||||
tlsConfig.RootCAs = caPool
|
||||
}
|
||||
|
||||
if tlsClientAuth {
|
||||
cert, err := tls.X509KeyPair([]byte(decrypted["tlsClientCert"]), []byte(decrypted["tlsClientKey"]))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tlsConfig.Certificates = []tls.Certificate{cert}
|
||||
}
|
||||
}
|
||||
|
||||
return tlsConfig, nil
|
||||
}
|
||||
|
||||
// getCustomHeaders returns a map with all the to be set headers
|
||||
// The map key represents the HeaderName and the value represents this header's value
|
||||
func getCustomHeaders(ds *backend.DataSourceInstanceSettings) map[string]string {
|
||||
headers := make(map[string]string)
|
||||
jsonData, err := simplejson.NewJson(ds.JSONData)
|
||||
if jsonData == nil || err != nil {
|
||||
return headers
|
||||
}
|
||||
|
||||
decrypted := ds.DecryptedSecureJSONData
|
||||
index := 1
|
||||
for {
|
||||
headerNameSuffix := fmt.Sprintf("httpHeaderName%d", index)
|
||||
headerValueSuffix := fmt.Sprintf("httpHeaderValue%d", index)
|
||||
|
||||
key := jsonData.Get(headerNameSuffix).MustString()
|
||||
if key == "" {
|
||||
// No (more) header values are available
|
||||
break
|
||||
}
|
||||
|
||||
if val, ok := decrypted[headerValueSuffix]; ok {
|
||||
headers[key] = val
|
||||
}
|
||||
index++
|
||||
}
|
||||
|
||||
return headers
|
||||
}
|
||||
|
||||
// getBasicAuthHeader returns a base64 encoded string from user and password.
|
||||
func getBasicAuthHeader(user string, password string) string {
|
||||
var userAndPass = user + ":" + password
|
||||
return "Basic " + base64.StdEncoding.EncodeToString([]byte(userAndPass))
|
||||
return tlsSkipVerify, nil
|
||||
}
|
||||
|
||||
@@ -45,6 +45,7 @@ func Init(logger log.Logger, mux *http.ServeMux) *datasource.ZabbixDatasource {
|
||||
|
||||
mux.HandleFunc("/", ds.RootHandler)
|
||||
mux.HandleFunc("/zabbix-api", ds.ZabbixAPIHandler)
|
||||
mux.HandleFunc("/db-connection-post", ds.DBConnectionPostProcessingHandler)
|
||||
// mux.Handle("/scenarios", getScenariosHandler(logger))
|
||||
|
||||
return ds
|
||||
|
||||
88
pkg/timeseries/agg_functions.go
Normal file
88
pkg/timeseries/agg_functions.go
Normal file
@@ -0,0 +1,88 @@
|
||||
package timeseries
|
||||
|
||||
import (
|
||||
"math"
|
||||
"sort"
|
||||
)
|
||||
|
||||
type AgggregationFunc = func(points []TimePoint) *float64
|
||||
|
||||
func AggAvg(points []TimePoint) *float64 {
|
||||
sum := AggSum(points)
|
||||
avg := *sum / float64(len(points))
|
||||
return &avg
|
||||
}
|
||||
|
||||
func AggSum(points []TimePoint) *float64 {
|
||||
var sum float64 = 0
|
||||
for _, p := range points {
|
||||
if p.Value != nil {
|
||||
sum += *p.Value
|
||||
}
|
||||
}
|
||||
return &sum
|
||||
}
|
||||
|
||||
func AggMax(points []TimePoint) *float64 {
|
||||
var max *float64 = nil
|
||||
for _, p := range points {
|
||||
if p.Value != nil {
|
||||
if max == nil {
|
||||
max = p.Value
|
||||
} else if *p.Value > *max {
|
||||
max = p.Value
|
||||
}
|
||||
}
|
||||
}
|
||||
return max
|
||||
}
|
||||
|
||||
func AggMin(points []TimePoint) *float64 {
|
||||
var min *float64 = nil
|
||||
for _, p := range points {
|
||||
if p.Value != nil {
|
||||
if min == nil {
|
||||
min = p.Value
|
||||
} else if *p.Value < *min {
|
||||
min = p.Value
|
||||
}
|
||||
}
|
||||
}
|
||||
return min
|
||||
}
|
||||
|
||||
func AggCount(points []TimePoint) *float64 {
|
||||
count := float64(len(points))
|
||||
return &count
|
||||
}
|
||||
|
||||
func AggFirst(points []TimePoint) *float64 {
|
||||
return points[0].Value
|
||||
}
|
||||
|
||||
func AggLast(points []TimePoint) *float64 {
|
||||
return points[len(points)-1].Value
|
||||
}
|
||||
|
||||
func AggMedian(points []TimePoint) *float64 {
|
||||
return AggPercentile(50)(points)
|
||||
}
|
||||
|
||||
func AggPercentile(n float64) AgggregationFunc {
|
||||
return func(points []TimePoint) *float64 {
|
||||
values := make([]float64, 0)
|
||||
for _, p := range points {
|
||||
if p.Value != nil {
|
||||
values = append(values, *p.Value)
|
||||
}
|
||||
}
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
values = sort.Float64Slice(values)
|
||||
percentileIndex := int(math.Floor(float64(len(values)) * n / 100))
|
||||
percentile := values[percentileIndex]
|
||||
return &percentile
|
||||
}
|
||||
}
|
||||
82
pkg/timeseries/align.go
Normal file
82
pkg/timeseries/align.go
Normal file
@@ -0,0 +1,82 @@
|
||||
package timeseries
|
||||
|
||||
import (
|
||||
"math"
|
||||
"sort"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Aligns point's time stamps according to provided interval.
|
||||
func (ts TimeSeries) Align(interval time.Duration) TimeSeries {
|
||||
if interval <= 0 || ts.Len() < 2 {
|
||||
return ts
|
||||
}
|
||||
|
||||
alignedTs := NewTimeSeries()
|
||||
var frameTs = ts[0].GetTimeFrame(interval)
|
||||
var pointFrameTs time.Time
|
||||
var point TimePoint
|
||||
|
||||
for i := 0; i < ts.Len(); i++ {
|
||||
point = ts[i]
|
||||
pointFrameTs = point.GetTimeFrame(interval)
|
||||
|
||||
if pointFrameTs.After(frameTs) {
|
||||
for frameTs.Before(pointFrameTs) {
|
||||
alignedTs = append(alignedTs, TimePoint{Time: frameTs, Value: nil})
|
||||
frameTs = frameTs.Add(interval)
|
||||
}
|
||||
}
|
||||
|
||||
alignedTs = append(alignedTs, TimePoint{Time: pointFrameTs, Value: point.Value})
|
||||
frameTs = frameTs.Add(interval)
|
||||
}
|
||||
|
||||
return alignedTs
|
||||
}
|
||||
|
||||
// Fill missing points in trend by null values
|
||||
func (ts TimeSeries) FillTrendWithNulls() TimeSeries {
|
||||
if ts.Len() < 2 {
|
||||
return ts
|
||||
}
|
||||
|
||||
interval := time.Hour
|
||||
alignedTs := NewTimeSeries()
|
||||
var frameTs = ts[0].GetTimeFrame(interval)
|
||||
var pointFrameTs time.Time
|
||||
var point TimePoint
|
||||
|
||||
for i := 0; i < ts.Len(); i++ {
|
||||
point = ts[i]
|
||||
pointFrameTs = point.GetTimeFrame(interval)
|
||||
|
||||
if pointFrameTs.After(frameTs) {
|
||||
for frameTs.Before(pointFrameTs) {
|
||||
alignedTs = append(alignedTs, TimePoint{Time: frameTs, Value: nil})
|
||||
frameTs = frameTs.Add(interval)
|
||||
}
|
||||
}
|
||||
|
||||
alignedTs = append(alignedTs, point)
|
||||
frameTs = frameTs.Add(interval)
|
||||
}
|
||||
|
||||
return alignedTs
|
||||
}
|
||||
|
||||
// Detects interval between data points in milliseconds based on median delta between points.
|
||||
func (ts TimeSeries) DetectInterval() time.Duration {
|
||||
if ts.Len() < 2 {
|
||||
return 0
|
||||
}
|
||||
|
||||
deltas := make([]int, 0)
|
||||
for i := 1; i < ts.Len(); i++ {
|
||||
delta := ts[i].Time.Sub(ts[i-1].Time)
|
||||
deltas = append(deltas, int(delta.Milliseconds()))
|
||||
}
|
||||
sort.Ints(deltas)
|
||||
midIndex := int(math.Floor(float64(len(deltas)) * 0.5))
|
||||
return time.Duration(deltas[midIndex]) * time.Millisecond
|
||||
}
|
||||
@@ -1,12 +1,33 @@
|
||||
package timeseries
|
||||
|
||||
import "time"
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbix"
|
||||
)
|
||||
|
||||
type TimePoint struct {
|
||||
Time time.Time
|
||||
Value *float64
|
||||
}
|
||||
|
||||
func (p *TimePoint) UnmarshalJSON(data []byte) error {
|
||||
point := &struct {
|
||||
Time int64
|
||||
Value *float64
|
||||
}{}
|
||||
|
||||
if err := json.Unmarshal(data, &point); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
p.Value = point.Value
|
||||
p.Time = time.Unix(point.Time, 0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type TimeSeries []TimePoint
|
||||
|
||||
func NewTimeSeries() TimeSeries {
|
||||
@@ -16,3 +37,20 @@ func NewTimeSeries() TimeSeries {
|
||||
func (ts *TimeSeries) Len() int {
|
||||
return len(*ts)
|
||||
}
|
||||
|
||||
type TimeSeriesData struct {
|
||||
TS TimeSeries
|
||||
Meta TimeSeriesMeta
|
||||
}
|
||||
|
||||
type TimeSeriesMeta struct {
|
||||
Name string
|
||||
Item *zabbix.Item
|
||||
|
||||
// Item update interval. nil means not supported intervals (flexible, schedule, etc)
|
||||
Interval *time.Duration
|
||||
}
|
||||
|
||||
type AggFunc = func(points []TimePoint) *float64
|
||||
|
||||
type TransformFunc = func(point TimePoint) TimePoint
|
||||
|
||||
128
pkg/timeseries/moving_average.go
Normal file
128
pkg/timeseries/moving_average.go
Normal file
@@ -0,0 +1,128 @@
|
||||
package timeseries
|
||||
|
||||
import "math"
|
||||
|
||||
func (ts TimeSeries) SimpleMovingAverage(n int) TimeSeries {
|
||||
if ts.Len() == 0 {
|
||||
return ts
|
||||
}
|
||||
|
||||
sma := []TimePoint{ts[0]}
|
||||
|
||||
// It's not possible to calculate MA if n greater than number of points
|
||||
n = int(math.Min(float64(ts.Len()), float64(n)))
|
||||
|
||||
// Initial window, use simple moving average
|
||||
windowCount := 0
|
||||
var windowSum float64 = 0
|
||||
for i := n; i > 0; i-- {
|
||||
point := ts[n-i]
|
||||
if point.Value != nil {
|
||||
windowSum += *point.Value
|
||||
windowCount++
|
||||
}
|
||||
}
|
||||
if windowCount > 0 {
|
||||
windowAvg := windowSum / float64(windowCount)
|
||||
// Actually, we should set timestamp from datapoints[n-1] and start calculation of SMA from n.
|
||||
// But in order to start SMA from first point (not from Nth) we should expand time range and request N additional
|
||||
// points outside left side of range. We can't do that, so this trick is used for pretty view of first N points.
|
||||
// We calculate AVG for first N points, but then start from 2nd point, not from Nth. In general, it means we
|
||||
// assume that previous N points (0-N, 0-(N-1), ..., 0-1) have the same average value as a first N points.
|
||||
sma[0] = TimePoint{Time: ts[0].Time, Value: &windowAvg}
|
||||
}
|
||||
|
||||
for i := 1; i < ts.Len(); i++ {
|
||||
leftEdge := int(math.Max(0, float64(i-n)))
|
||||
point := ts[i]
|
||||
leftPoint := ts[leftEdge]
|
||||
|
||||
// Remove left value
|
||||
if leftPoint.Value != nil {
|
||||
if windowCount > 0 {
|
||||
if i < n {
|
||||
windowSum -= windowSum / float64(windowCount)
|
||||
} else {
|
||||
windowSum -= *leftPoint.Value
|
||||
}
|
||||
windowCount--
|
||||
}
|
||||
}
|
||||
|
||||
// Insert next value
|
||||
if point.Value != nil {
|
||||
windowSum += *point.Value
|
||||
windowCount++
|
||||
windowAvg := windowSum / float64(windowCount)
|
||||
value := windowAvg
|
||||
sma = append(sma, TimePoint{Time: point.Time, Value: &value})
|
||||
} else {
|
||||
sma = append(sma, TimePoint{Time: point.Time, Value: nil})
|
||||
}
|
||||
}
|
||||
|
||||
return sma
|
||||
}
|
||||
|
||||
func (ts TimeSeries) ExponentialMovingAverage(an float64) TimeSeries {
|
||||
if ts.Len() == 0 {
|
||||
return ts
|
||||
}
|
||||
|
||||
// It's not possible to calculate MA if n greater than number of points
|
||||
an = math.Min(float64(ts.Len()), an)
|
||||
|
||||
// alpha coefficient should be between 0 and 1. If provided n <= 1, then use it as alpha directly. Otherwise, it's a
|
||||
// number of points in the window and alpha calculted from this information.
|
||||
var a float64
|
||||
var n int
|
||||
ema := []TimePoint{ts[0]}
|
||||
emaPrev := *ts[0].Value
|
||||
var emaCurrent float64
|
||||
|
||||
if an > 1 {
|
||||
// Calculate a from window size
|
||||
a = 2 / (an + 1)
|
||||
n = int(an)
|
||||
|
||||
// Initial window, use simple moving average
|
||||
windowCount := 0
|
||||
var windowSum float64 = 0
|
||||
for i := n; i > 0; i-- {
|
||||
point := ts[n-i]
|
||||
if point.Value != nil {
|
||||
windowSum += *point.Value
|
||||
windowCount++
|
||||
}
|
||||
}
|
||||
if windowCount > 0 {
|
||||
windowAvg := windowSum / float64(windowCount)
|
||||
// Actually, we should set timestamp from datapoints[n-1] and start calculation of EMA from n.
|
||||
// But in order to start EMA from first point (not from Nth) we should expand time range and request N additional
|
||||
// points outside left side of range. We can't do that, so this trick is used for pretty view of first N points.
|
||||
// We calculate AVG for first N points, but then start from 2nd point, not from Nth. In general, it means we
|
||||
// assume that previous N values (0-N, 0-(N-1), ..., 0-1) have the same average value as a first N values.
|
||||
ema[0] = TimePoint{Time: ts[0].Time, Value: &windowAvg}
|
||||
emaPrev = windowAvg
|
||||
n = 1
|
||||
}
|
||||
} else {
|
||||
// Use predefined a and start from 1st point (use it as initial EMA value)
|
||||
a = an
|
||||
n = 1
|
||||
}
|
||||
|
||||
for i := n; i < ts.Len(); i++ {
|
||||
point := ts[i]
|
||||
if point.Value != nil {
|
||||
emaCurrent = a*(*point.Value) + (1-a)*emaPrev
|
||||
emaPrev = emaCurrent
|
||||
value := emaCurrent
|
||||
ema = append(ema, TimePoint{Time: point.Time, Value: &value})
|
||||
} else {
|
||||
ema = append(ema, TimePoint{Time: point.Time, Value: nil})
|
||||
}
|
||||
}
|
||||
|
||||
return ema
|
||||
}
|
||||
52
pkg/timeseries/sort.go
Normal file
52
pkg/timeseries/sort.go
Normal file
@@ -0,0 +1,52 @@
|
||||
package timeseries
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// SortBy sorts series by value calculated with provided aggFunc in given order
|
||||
func SortBy(series []*TimeSeriesData, order string, aggFunc AggFunc) []*TimeSeriesData {
|
||||
aggregatedSeries := make([]TimeSeries, len(series))
|
||||
for i, s := range series {
|
||||
aggregatedSeries[i] = s.TS.GroupByRange(aggFunc)
|
||||
}
|
||||
|
||||
// Sort by aggregated value
|
||||
sort.Slice(series, func(i, j int) bool {
|
||||
if len(aggregatedSeries[i]) > 0 && len(aggregatedSeries[j]) > 0 {
|
||||
return *aggregatedSeries[i][0].Value < *aggregatedSeries[j][0].Value
|
||||
} else if len(aggregatedSeries[j]) > 0 {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
|
||||
if order == "desc" {
|
||||
reverseSeries := make([]*TimeSeriesData, len(series))
|
||||
for i := 0; i < len(series); i++ {
|
||||
reverseSeries[i] = series[len(series)-1-i]
|
||||
}
|
||||
series = reverseSeries
|
||||
}
|
||||
|
||||
return series
|
||||
}
|
||||
|
||||
func SortByItem(series []*TimeSeriesData) []*TimeSeriesData {
|
||||
sort.Slice(series, func(i, j int) bool {
|
||||
itemIDi, err := strconv.Atoi(series[i].Meta.Item.ID)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
itemIDj, err := strconv.Atoi(series[j].Meta.Item.ID)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return itemIDi < itemIDj
|
||||
})
|
||||
|
||||
return series
|
||||
}
|
||||
@@ -1,58 +1,286 @@
|
||||
package timeseries
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"math"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
)
|
||||
|
||||
// Aligns point's time stamps according to provided interval.
|
||||
func (ts TimeSeries) Align(interval time.Duration) TimeSeries {
|
||||
if interval <= 0 || ts.Len() < 2 {
|
||||
func NewTimeSeriesData() *TimeSeriesData {
|
||||
return &TimeSeriesData{
|
||||
TS: NewTimeSeries(),
|
||||
Meta: TimeSeriesMeta{},
|
||||
}
|
||||
}
|
||||
|
||||
func (tsd TimeSeriesData) Len() int {
|
||||
return len(tsd.TS)
|
||||
}
|
||||
|
||||
func (tsd *TimeSeriesData) Add(point TimePoint) *TimeSeriesData {
|
||||
if tsd.TS == nil {
|
||||
tsd.TS = NewTimeSeries()
|
||||
}
|
||||
|
||||
tsd.TS = append(tsd.TS, point)
|
||||
return tsd
|
||||
}
|
||||
|
||||
// GroupBy groups points in given interval by applying provided `aggFunc`. Source time series should be sorted by time.
|
||||
func (ts TimeSeries) GroupBy(interval time.Duration, aggFunc AggFunc) TimeSeries {
|
||||
if ts.Len() == 0 {
|
||||
return ts
|
||||
}
|
||||
|
||||
alignedTs := NewTimeSeries()
|
||||
var frameTs = ts[0].GetTimeFrame(interval)
|
||||
groupedSeries := NewTimeSeries()
|
||||
frame := make([]TimePoint, 0)
|
||||
frameTS := ts[0].GetTimeFrame(interval)
|
||||
var pointFrameTs time.Time
|
||||
var point TimePoint
|
||||
|
||||
for i := 1; i < ts.Len(); i++ {
|
||||
point = ts[i]
|
||||
for _, point := range ts {
|
||||
pointFrameTs = point.GetTimeFrame(interval)
|
||||
|
||||
if pointFrameTs.After(frameTs) {
|
||||
for frameTs.Before(pointFrameTs) {
|
||||
alignedTs = append(alignedTs, TimePoint{Time: frameTs, Value: nil})
|
||||
frameTs = frameTs.Add(interval)
|
||||
// Iterate over points and push it into the frame if point time stamp fit the frame
|
||||
if pointFrameTs == frameTS {
|
||||
frame = append(frame, point)
|
||||
} else if pointFrameTs.After(frameTS) {
|
||||
// If point outside frame, then we've done with current frame
|
||||
groupedSeries = append(groupedSeries, TimePoint{
|
||||
Time: frameTS,
|
||||
Value: aggFunc(frame),
|
||||
})
|
||||
|
||||
// Move frame window to next non-empty interval and fill empty by null
|
||||
frameTS = frameTS.Add(interval)
|
||||
for frameTS.Before(pointFrameTs) {
|
||||
groupedSeries = append(groupedSeries, TimePoint{
|
||||
Time: frameTS,
|
||||
Value: nil,
|
||||
})
|
||||
frameTS = frameTS.Add(interval)
|
||||
}
|
||||
frame = []TimePoint{point}
|
||||
}
|
||||
}
|
||||
|
||||
groupedSeries = append(groupedSeries, TimePoint{
|
||||
Time: frameTS,
|
||||
Value: aggFunc(frame),
|
||||
})
|
||||
|
||||
return groupedSeries
|
||||
}
|
||||
|
||||
func (ts TimeSeries) GroupByRange(aggFunc AggFunc) TimeSeries {
|
||||
if ts.Len() == 0 {
|
||||
return ts
|
||||
}
|
||||
|
||||
value := aggFunc(ts)
|
||||
return []TimePoint{
|
||||
{Time: ts[0].Time, Value: value},
|
||||
{Time: ts[ts.Len()-1].Time, Value: value},
|
||||
}
|
||||
}
|
||||
|
||||
func (ts TimeSeries) Delta() TimeSeries {
|
||||
deltaSeries := NewTimeSeries()
|
||||
for i := 1; i < ts.Len(); i++ {
|
||||
currentPoint := ts[i]
|
||||
previousPoint := ts[i-1]
|
||||
if currentPoint.Value != nil && previousPoint.Value != nil {
|
||||
deltaValue := *currentPoint.Value - *previousPoint.Value
|
||||
deltaSeries = append(deltaSeries, TimePoint{Time: ts[i].Time, Value: &deltaValue})
|
||||
} else {
|
||||
deltaSeries = append(deltaSeries, TimePoint{Time: ts[i].Time, Value: nil})
|
||||
}
|
||||
}
|
||||
|
||||
return deltaSeries
|
||||
}
|
||||
|
||||
func (ts TimeSeries) Rate() TimeSeries {
|
||||
rateSeries := NewTimeSeries()
|
||||
var valueDelta float64 = 0
|
||||
for i := 1; i < ts.Len(); i++ {
|
||||
currentPoint := ts[i]
|
||||
previousPoint := ts[i-1]
|
||||
timeDelta := currentPoint.Time.Sub(previousPoint.Time)
|
||||
|
||||
// Handle counter reset - use previous value
|
||||
if currentPoint.Value != nil && previousPoint.Value != nil && *currentPoint.Value >= *previousPoint.Value {
|
||||
valueDelta = (*currentPoint.Value - *previousPoint.Value) / timeDelta.Seconds()
|
||||
}
|
||||
|
||||
value := valueDelta
|
||||
rateSeries = append(rateSeries, TimePoint{Time: ts[i].Time, Value: &value})
|
||||
}
|
||||
|
||||
return rateSeries
|
||||
}
|
||||
|
||||
func (ts TimeSeries) Transform(transformFunc TransformFunc) TimeSeries {
|
||||
for i, p := range ts {
|
||||
ts[i] = transformFunc(p)
|
||||
}
|
||||
return ts
|
||||
}
|
||||
|
||||
func Filter(series []*TimeSeriesData, n int, order string, aggFunc AggFunc) []*TimeSeriesData {
|
||||
SortBy(series, "asc", aggFunc)
|
||||
|
||||
filteredSeries := make([]*TimeSeriesData, n)
|
||||
for i := 0; i < n; i++ {
|
||||
if order == "top" {
|
||||
filteredSeries[i] = series[len(series)-1-i]
|
||||
} else if order == "bottom" {
|
||||
filteredSeries[i] = series[i]
|
||||
}
|
||||
}
|
||||
|
||||
return filteredSeries
|
||||
}
|
||||
|
||||
func AggregateBy(series []*TimeSeriesData, interval time.Duration, aggFunc AggFunc) *TimeSeriesData {
|
||||
aggregatedSeries := NewTimeSeries()
|
||||
|
||||
// Combine all points into one time series
|
||||
for _, s := range series {
|
||||
aggregatedSeries = append(aggregatedSeries, s.TS...)
|
||||
}
|
||||
|
||||
// GroupBy works correctly only with sorted time series
|
||||
aggregatedSeries.Sort()
|
||||
|
||||
aggregatedSeries = aggregatedSeries.GroupBy(interval, aggFunc)
|
||||
aggregatedSeriesData := NewTimeSeriesData()
|
||||
aggregatedSeriesData.TS = aggregatedSeries
|
||||
return aggregatedSeriesData
|
||||
}
|
||||
|
||||
func (ts TimeSeries) Sort() {
|
||||
sorted := sort.SliceIsSorted(ts, ts.less())
|
||||
if !sorted {
|
||||
sort.Slice(ts, ts.less())
|
||||
}
|
||||
}
|
||||
|
||||
// Implements less() function for sorting slice
|
||||
func (ts TimeSeries) less() func(i, j int) bool {
|
||||
return func(i, j int) bool {
|
||||
return ts[i].Time.Before(ts[j].Time)
|
||||
}
|
||||
}
|
||||
|
||||
func SumSeries(series []*TimeSeriesData) *TimeSeriesData {
|
||||
// Build unique set of time stamps from all series
|
||||
interpolatedTimeStampsMap := make(map[time.Time]time.Time)
|
||||
for _, s := range series {
|
||||
for _, p := range s.TS {
|
||||
interpolatedTimeStampsMap[p.Time] = p.Time
|
||||
}
|
||||
}
|
||||
|
||||
// Convert to slice and sort
|
||||
interpolatedTimeStamps := make([]time.Time, 0)
|
||||
for _, ts := range interpolatedTimeStampsMap {
|
||||
interpolatedTimeStamps = append(interpolatedTimeStamps, ts)
|
||||
}
|
||||
sort.Slice(interpolatedTimeStamps, func(i, j int) bool {
|
||||
return interpolatedTimeStamps[i].Before(interpolatedTimeStamps[j])
|
||||
})
|
||||
|
||||
interpolatedSeries := make([]TimeSeries, 0)
|
||||
|
||||
for _, s := range series {
|
||||
if s.Len() == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
pointsToInterpolate := make([]TimePoint, 0)
|
||||
|
||||
currentPointIndex := 0
|
||||
for _, its := range interpolatedTimeStamps {
|
||||
currentPoint := s.TS[currentPointIndex]
|
||||
if its.Equal(currentPoint.Time) {
|
||||
if currentPointIndex < s.Len()-1 {
|
||||
currentPointIndex++
|
||||
}
|
||||
} else {
|
||||
pointsToInterpolate = append(pointsToInterpolate, TimePoint{Time: its, Value: nil})
|
||||
}
|
||||
}
|
||||
|
||||
alignedTs = append(alignedTs, TimePoint{Time: pointFrameTs, Value: point.Value})
|
||||
frameTs = frameTs.Add(interval)
|
||||
s.TS = append(s.TS, pointsToInterpolate...)
|
||||
s.TS.Sort()
|
||||
s.TS = interpolateSeries(s.TS)
|
||||
interpolatedSeries = append(interpolatedSeries, s.TS)
|
||||
}
|
||||
|
||||
return alignedTs
|
||||
sumSeries := NewTimeSeriesData()
|
||||
for i := 0; i < len(interpolatedTimeStamps); i++ {
|
||||
var sum float64 = 0
|
||||
for _, s := range interpolatedSeries {
|
||||
if s[i].Value != nil {
|
||||
sum += *s[i].Value
|
||||
}
|
||||
}
|
||||
sumSeries.TS = append(sumSeries.TS, TimePoint{Time: interpolatedTimeStamps[i], Value: &sum})
|
||||
}
|
||||
|
||||
return sumSeries
|
||||
}
|
||||
|
||||
// Detects interval between data points in milliseconds based on median delta between points.
|
||||
func (ts TimeSeries) DetectInterval() time.Duration {
|
||||
if ts.Len() < 2 {
|
||||
return 0
|
||||
}
|
||||
func interpolateSeries(series TimeSeries) TimeSeries {
|
||||
for i := series.Len() - 1; i >= 0; i-- {
|
||||
point := series[i]
|
||||
if point.Value == nil {
|
||||
left := findNearestLeft(series, i)
|
||||
right := findNearestRight(series, i)
|
||||
|
||||
deltas := make([]int, 0)
|
||||
for i := 1; i < ts.Len(); i++ {
|
||||
delta := ts[i].Time.Sub(ts[i-1].Time)
|
||||
deltas = append(deltas, int(delta.Milliseconds()))
|
||||
if left == nil && right == nil {
|
||||
continue
|
||||
}
|
||||
if left == nil {
|
||||
left = right
|
||||
}
|
||||
if right == nil {
|
||||
right = left
|
||||
}
|
||||
|
||||
pointValue := linearInterpolation(point.Time, *left, *right)
|
||||
point.Value = &pointValue
|
||||
series[i] = point
|
||||
}
|
||||
}
|
||||
sort.Ints(deltas)
|
||||
midIndex := int(math.Floor(float64(len(deltas)) * 0.5))
|
||||
return time.Duration(deltas[midIndex]) * time.Millisecond
|
||||
return series
|
||||
}
|
||||
|
||||
func linearInterpolation(ts time.Time, left, right TimePoint) float64 {
|
||||
if left.Time.Equal(right.Time) {
|
||||
return (*left.Value + *right.Value) / 2
|
||||
} else {
|
||||
return *left.Value + (*right.Value-*left.Value)/float64((right.Time.UnixNano()-left.Time.UnixNano()))*float64((ts.UnixNano()-left.Time.UnixNano()))
|
||||
}
|
||||
}
|
||||
|
||||
func findNearestRight(series TimeSeries, pointIndex int) *TimePoint {
|
||||
for i := pointIndex; i < series.Len(); i++ {
|
||||
if series[i].Value != nil {
|
||||
return &series[i]
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func findNearestLeft(series TimeSeries, pointIndex int) *TimePoint {
|
||||
for i := pointIndex; i > 0; i-- {
|
||||
if series[i].Value != nil {
|
||||
return &series[i]
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Gets point timestamp rounded according to provided interval.
|
||||
@@ -60,44 +288,6 @@ func (p *TimePoint) GetTimeFrame(interval time.Duration) time.Time {
|
||||
return p.Time.Truncate(interval)
|
||||
}
|
||||
|
||||
func alignDataPoints(frame *data.Frame, interval time.Duration) *data.Frame {
|
||||
if interval <= 0 || frame.Rows() < 2 {
|
||||
return frame
|
||||
}
|
||||
|
||||
timeFieldIdx := getTimeFieldIndex(frame)
|
||||
if timeFieldIdx < 0 {
|
||||
return frame
|
||||
}
|
||||
var frameTs = getPointTimeFrame(getTimestampAt(frame, 0), interval)
|
||||
var pointFrameTs *time.Time
|
||||
var pointsInserted = 0
|
||||
|
||||
for i := 1; i < frame.Rows(); i++ {
|
||||
pointFrameTs = getPointTimeFrame(getTimestampAt(frame, i), interval)
|
||||
if pointFrameTs == nil || frameTs == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if pointFrameTs.After(*frameTs) {
|
||||
for frameTs.Before(*pointFrameTs) {
|
||||
insertAt := i + pointsInserted
|
||||
err := insertNullPointAt(frame, *frameTs, insertAt)
|
||||
if err != nil {
|
||||
backend.Logger.Debug("Error inserting null point", "error", err)
|
||||
}
|
||||
*frameTs = frameTs.Add(interval)
|
||||
pointsInserted++
|
||||
}
|
||||
}
|
||||
|
||||
setTimeAt(frame, *pointFrameTs, i+pointsInserted)
|
||||
*frameTs = frameTs.Add(interval)
|
||||
}
|
||||
|
||||
return frame
|
||||
}
|
||||
|
||||
func getPointTimeFrame(ts *time.Time, interval time.Duration) *time.Time {
|
||||
if ts == nil {
|
||||
return nil
|
||||
@@ -131,19 +321,6 @@ func getTimestampAt(frame *data.Frame, index int) *time.Time {
|
||||
return &ts
|
||||
}
|
||||
|
||||
func insertNullPointAt(frame *data.Frame, frameTs time.Time, index int) error {
|
||||
for _, field := range frame.Fields {
|
||||
if field.Type() == data.FieldTypeTime {
|
||||
field.Insert(index, frameTs)
|
||||
} else if field.Type().Nullable() {
|
||||
field.Insert(index, nil)
|
||||
} else {
|
||||
return errors.New("field is not nullable")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func setTimeAt(frame *data.Frame, frameTs time.Time, index int) {
|
||||
for _, field := range frame.Fields {
|
||||
if field.Type() == data.FieldTypeTime {
|
||||
|
||||
58
pkg/timeseries/transform_functions.go
Normal file
58
pkg/timeseries/transform_functions.go
Normal file
@@ -0,0 +1,58 @@
|
||||
package timeseries
|
||||
|
||||
import "time"
|
||||
|
||||
func TransformScale(factor float64) TransformFunc {
|
||||
return func(point TimePoint) TimePoint {
|
||||
if point.Value != nil {
|
||||
newValue := *point.Value * factor
|
||||
point.Value = &newValue
|
||||
}
|
||||
return point
|
||||
}
|
||||
}
|
||||
|
||||
func TransformOffset(offset float64) TransformFunc {
|
||||
return func(point TimePoint) TimePoint {
|
||||
if point.Value != nil {
|
||||
newValue := *point.Value + offset
|
||||
point.Value = &newValue
|
||||
}
|
||||
return point
|
||||
}
|
||||
}
|
||||
|
||||
func TransformNull(nullValue float64) TransformFunc {
|
||||
return func(point TimePoint) TimePoint {
|
||||
if point.Value == nil {
|
||||
point.Value = &nullValue
|
||||
}
|
||||
return point
|
||||
}
|
||||
}
|
||||
|
||||
func TransformRemoveAboveValue(threshold float64) TransformFunc {
|
||||
return func(point TimePoint) TimePoint {
|
||||
if point.Value != nil && *point.Value > threshold {
|
||||
point.Value = nil
|
||||
}
|
||||
return point
|
||||
}
|
||||
}
|
||||
|
||||
func TransformRemoveBelowValue(threshold float64) TransformFunc {
|
||||
return func(point TimePoint) TimePoint {
|
||||
if point.Value != nil && *point.Value < threshold {
|
||||
point.Value = nil
|
||||
}
|
||||
return point
|
||||
}
|
||||
}
|
||||
|
||||
func TransformShiftTime(interval time.Duration) TransformFunc {
|
||||
return func(point TimePoint) TimePoint {
|
||||
shiftedTime := point.Time.Add(interval)
|
||||
point.Time = shiftedTime
|
||||
return point
|
||||
}
|
||||
}
|
||||
55
pkg/zabbix/cache.go
Normal file
55
pkg/zabbix/cache.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package zabbix
|
||||
|
||||
import (
|
||||
"crypto/sha1"
|
||||
"encoding/hex"
|
||||
"time"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/cache"
|
||||
)
|
||||
|
||||
var cachedMethods = map[string]bool{
|
||||
"hostgroup.get": true,
|
||||
"host.get": true,
|
||||
"application.get": true,
|
||||
"item.get": true,
|
||||
"service.get": true,
|
||||
"usermacro.get": true,
|
||||
"proxy.get": true,
|
||||
}
|
||||
|
||||
func IsCachedRequest(method string) bool {
|
||||
_, ok := cachedMethods[method]
|
||||
return ok
|
||||
}
|
||||
|
||||
// ZabbixCache is a cache for datasource instance.
|
||||
type ZabbixCache struct {
|
||||
cache *cache.Cache
|
||||
}
|
||||
|
||||
// NewZabbixCache creates a DatasourceCache with expiration(ttl) time and cleanupInterval.
|
||||
func NewZabbixCache(ttl time.Duration, cleanupInterval time.Duration) *ZabbixCache {
|
||||
return &ZabbixCache{
|
||||
cache.NewCache(ttl, cleanupInterval),
|
||||
}
|
||||
}
|
||||
|
||||
// GetAPIRequest gets request response from cache
|
||||
func (c *ZabbixCache) GetAPIRequest(request *ZabbixAPIRequest) (interface{}, bool) {
|
||||
requestHash := HashString(request.String())
|
||||
return c.cache.Get(requestHash)
|
||||
}
|
||||
|
||||
// SetAPIRequest writes request response to cache
|
||||
func (c *ZabbixCache) SetAPIRequest(request *ZabbixAPIRequest, response interface{}) {
|
||||
requestHash := HashString(request.String())
|
||||
c.cache.Set(requestHash, response)
|
||||
}
|
||||
|
||||
// HashString converts the given text string to hash string
|
||||
func HashString(text string) string {
|
||||
hash := sha1.New()
|
||||
hash.Write([]byte(text))
|
||||
return hex.EncodeToString(hash.Sum(nil))
|
||||
}
|
||||
357
pkg/zabbix/methods.go
Normal file
357
pkg/zabbix/methods.go
Normal file
@@ -0,0 +1,357 @@
|
||||
package zabbix
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
)
|
||||
|
||||
func (ds *Zabbix) GetHistory(ctx context.Context, items []*Item, timeRange backend.TimeRange) (History, error) {
|
||||
history := History{}
|
||||
// Zabbix stores history in different tables and `history` param required for query. So in one query it's only
|
||||
// possible to get history for items with one type. In order to get history for items with multiple types (numeric unsigned and numeric float),
|
||||
// items should be grouped by the `value_type` field.
|
||||
groupedItemids := make(map[int][]string, 0)
|
||||
for _, item := range items {
|
||||
groupedItemids[item.ValueType] = append(groupedItemids[item.ValueType], item.ID)
|
||||
}
|
||||
|
||||
for historyType, itemids := range groupedItemids {
|
||||
result, err := ds.getHistory(ctx, itemids, historyType, timeRange)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
history = append(history, result...)
|
||||
}
|
||||
|
||||
return history, nil
|
||||
}
|
||||
|
||||
func (ds *Zabbix) getHistory(ctx context.Context, itemids []string, historyType int, timeRange backend.TimeRange) (History, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": "extend",
|
||||
"itemids": itemids,
|
||||
"history": historyType,
|
||||
"time_from": timeRange.From.Unix(),
|
||||
"time_till": timeRange.To.Unix(),
|
||||
"sortfield": "clock",
|
||||
"sortorder": "ASC",
|
||||
}
|
||||
|
||||
result, err := ds.Request(ctx, &ZabbixAPIRequest{Method: "history.get", Params: params})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var history History
|
||||
err = convertTo(result, &history)
|
||||
return history, err
|
||||
}
|
||||
|
||||
func (ds *Zabbix) GetTrend(ctx context.Context, items []*Item, timeRange backend.TimeRange) (Trend, error) {
|
||||
itemids := make([]string, 0)
|
||||
for _, item := range items {
|
||||
itemids = append(itemids, item.ID)
|
||||
}
|
||||
|
||||
return ds.getTrend(ctx, itemids, timeRange)
|
||||
}
|
||||
|
||||
func (ds *Zabbix) getTrend(ctx context.Context, itemids []string, timeRange backend.TimeRange) (Trend, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": "extend",
|
||||
"itemids": itemids,
|
||||
"time_from": timeRange.From.Unix(),
|
||||
"time_till": timeRange.To.Unix(),
|
||||
"sortfield": "clock",
|
||||
"sortorder": "ASC",
|
||||
}
|
||||
|
||||
result, err := ds.Request(ctx, &ZabbixAPIRequest{Method: "trend.get", Params: params})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var trend Trend
|
||||
err = convertTo(result, &trend)
|
||||
return trend, err
|
||||
}
|
||||
|
||||
func (ds *Zabbix) GetItems(ctx context.Context, groupFilter string, hostFilter string, appFilter string, itemFilter string, itemType string) ([]*Item, error) {
|
||||
hosts, err := ds.GetHosts(ctx, groupFilter, hostFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var hostids []string
|
||||
for _, host := range hosts {
|
||||
hostids = append(hostids, host.ID)
|
||||
}
|
||||
|
||||
apps, err := ds.GetApps(ctx, groupFilter, hostFilter, appFilter)
|
||||
// Apps not supported in Zabbix 5.4 and higher
|
||||
if isAppMethodNotFoundError(err) {
|
||||
apps = []Application{}
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var appids []string
|
||||
for _, app := range apps {
|
||||
appids = append(appids, app.ID)
|
||||
}
|
||||
|
||||
var allItems []*Item
|
||||
if len(hostids) > 0 {
|
||||
allItems, err = ds.GetAllItems(ctx, hostids, nil, itemType)
|
||||
} else if len(appids) > 0 {
|
||||
allItems, err = ds.GetAllItems(ctx, nil, appids, itemType)
|
||||
}
|
||||
|
||||
return filterItemsByQuery(allItems, itemFilter)
|
||||
}
|
||||
|
||||
func filterItemsByQuery(items []*Item, filter string) ([]*Item, error) {
|
||||
re, err := parseFilter(filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var filteredItems []*Item
|
||||
for _, i := range items {
|
||||
name := i.Name
|
||||
if re != nil {
|
||||
if re.MatchString(name) {
|
||||
filteredItems = append(filteredItems, i)
|
||||
}
|
||||
} else if name == filter {
|
||||
filteredItems = append(filteredItems, i)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return filteredItems, nil
|
||||
}
|
||||
|
||||
func (ds *Zabbix) GetApps(ctx context.Context, groupFilter string, hostFilter string, appFilter string) ([]Application, error) {
|
||||
hosts, err := ds.GetHosts(ctx, groupFilter, hostFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var hostids []string
|
||||
for _, host := range hosts {
|
||||
hostids = append(hostids, host.ID)
|
||||
}
|
||||
allApps, err := ds.GetAllApps(ctx, hostids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return filterAppsByQuery(allApps, appFilter)
|
||||
}
|
||||
|
||||
func filterAppsByQuery(items []Application, filter string) ([]Application, error) {
|
||||
re, err := parseFilter(filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var filteredItems []Application
|
||||
for _, i := range items {
|
||||
name := i.Name
|
||||
if re != nil {
|
||||
if re.MatchString(name) {
|
||||
filteredItems = append(filteredItems, i)
|
||||
}
|
||||
} else if name == filter {
|
||||
filteredItems = append(filteredItems, i)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return filteredItems, nil
|
||||
}
|
||||
|
||||
func (ds *Zabbix) GetHosts(ctx context.Context, groupFilter string, hostFilter string) ([]Host, error) {
|
||||
groups, err := ds.GetGroups(ctx, groupFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var groupids []string
|
||||
for _, group := range groups {
|
||||
groupids = append(groupids, group.ID)
|
||||
}
|
||||
allHosts, err := ds.GetAllHosts(ctx, groupids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return filterHostsByQuery(allHosts, hostFilter)
|
||||
}
|
||||
|
||||
func filterHostsByQuery(items []Host, filter string) ([]Host, error) {
|
||||
re, err := parseFilter(filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var filteredItems []Host
|
||||
for _, i := range items {
|
||||
name := i.Name
|
||||
if re != nil {
|
||||
if re.MatchString(name) {
|
||||
filteredItems = append(filteredItems, i)
|
||||
}
|
||||
} else if name == filter {
|
||||
filteredItems = append(filteredItems, i)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return filteredItems, nil
|
||||
}
|
||||
|
||||
func (ds *Zabbix) GetGroups(ctx context.Context, groupFilter string) ([]Group, error) {
|
||||
allGroups, err := ds.GetAllGroups(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return filterGroupsByQuery(allGroups, groupFilter)
|
||||
}
|
||||
|
||||
func filterGroupsByQuery(items []Group, filter string) ([]Group, error) {
|
||||
re, err := parseFilter(filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var filteredItems []Group
|
||||
for _, i := range items {
|
||||
name := i.Name
|
||||
if re != nil {
|
||||
if re.MatchString(name) {
|
||||
filteredItems = append(filteredItems, i)
|
||||
}
|
||||
} else if name == filter {
|
||||
filteredItems = append(filteredItems, i)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return filteredItems, nil
|
||||
}
|
||||
|
||||
func (ds *Zabbix) GetAllItems(ctx context.Context, hostids []string, appids []string, itemtype string) ([]*Item, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": []string{"itemid", "name", "key_", "value_type", "hostid", "status", "state", "units", "valuemapid", "delay"},
|
||||
"sortfield": "name",
|
||||
"webitems": true,
|
||||
"filter": map[string]interface{}{},
|
||||
"selectHosts": []string{"hostid", "name"},
|
||||
"hostids": hostids,
|
||||
"applicationids": appids,
|
||||
}
|
||||
|
||||
filter := params["filter"].(map[string]interface{})
|
||||
if itemtype == "num" {
|
||||
filter["value_type"] = []int{0, 3}
|
||||
} else if itemtype == "text" {
|
||||
filter["value_type"] = []int{1, 2, 4}
|
||||
}
|
||||
|
||||
result, err := ds.Request(ctx, &ZabbixAPIRequest{Method: "item.get", Params: params})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var items []*Item
|
||||
err = convertTo(result, &items)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
items = expandItems(items)
|
||||
return items, err
|
||||
}
|
||||
|
||||
func (ds *Zabbix) GetItemsByIDs(ctx context.Context, itemids []string) ([]*Item, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"itemids": itemids,
|
||||
"output": []string{"itemid", "name", "key_", "value_type", "hostid", "status", "state", "units", "valuemapid", "delay"},
|
||||
"webitems": true,
|
||||
"selectHosts": []string{"hostid", "name"},
|
||||
}
|
||||
|
||||
result, err := ds.Request(ctx, &ZabbixAPIRequest{Method: "item.get", Params: params})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var items []*Item
|
||||
err = convertTo(result, &items)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
items = expandItems(items)
|
||||
return items, err
|
||||
}
|
||||
|
||||
func (ds *Zabbix) GetAllApps(ctx context.Context, hostids []string) ([]Application, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": "extend",
|
||||
"hostids": hostids,
|
||||
}
|
||||
|
||||
result, err := ds.Request(ctx, &ZabbixAPIRequest{Method: "application.get", Params: params})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var apps []Application
|
||||
err = convertTo(result, &apps)
|
||||
return apps, err
|
||||
}
|
||||
|
||||
func (ds *Zabbix) GetAllHosts(ctx context.Context, groupids []string) ([]Host, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": []string{"name", "host"},
|
||||
"sortfield": "name",
|
||||
"groupids": groupids,
|
||||
}
|
||||
|
||||
result, err := ds.Request(ctx, &ZabbixAPIRequest{Method: "host.get", Params: params})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var hosts []Host
|
||||
err = convertTo(result, &hosts)
|
||||
return hosts, err
|
||||
}
|
||||
|
||||
func (ds *Zabbix) GetAllGroups(ctx context.Context) ([]Group, error) {
|
||||
params := ZabbixAPIParams{
|
||||
"output": []string{"name"},
|
||||
"sortfield": "name",
|
||||
"real_hosts": true,
|
||||
}
|
||||
|
||||
result, err := ds.Request(ctx, &ZabbixAPIRequest{Method: "hostgroup.get", Params: params})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var groups []Group
|
||||
err = convertTo(result, &groups)
|
||||
return groups, err
|
||||
}
|
||||
|
||||
func isAppMethodNotFoundError(err error) bool {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
message := err.Error()
|
||||
return message == `Method not found. Incorrect API "application".`
|
||||
}
|
||||
95
pkg/zabbix/models.go
Normal file
95
pkg/zabbix/models.go
Normal file
@@ -0,0 +1,95 @@
|
||||
package zabbix
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ZabbixDatasourceSettingsDTO struct {
|
||||
Trends bool `json:"trends"`
|
||||
TrendsFrom string `json:"trendsFrom"`
|
||||
TrendsRange string `json:"trendsRange"`
|
||||
CacheTTL string `json:"cacheTTL"`
|
||||
Timeout string `json:"timeout"`
|
||||
|
||||
DisableReadOnlyUsersAck bool `json:"disableReadOnlyUsersAck"`
|
||||
}
|
||||
|
||||
type ZabbixDatasourceSettings struct {
|
||||
Trends bool
|
||||
TrendsFrom time.Duration
|
||||
TrendsRange time.Duration
|
||||
CacheTTL time.Duration
|
||||
Timeout time.Duration
|
||||
|
||||
DisableReadOnlyUsersAck bool `json:"disableReadOnlyUsersAck"`
|
||||
}
|
||||
|
||||
type ZabbixAPIParams = map[string]interface{}
|
||||
|
||||
type ZabbixAPIRequest struct {
|
||||
Method string `json:"method"`
|
||||
Params ZabbixAPIParams `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
func (r *ZabbixAPIRequest) String() string {
|
||||
jsonRequest, _ := json.Marshal(r.Params)
|
||||
return r.Method + string(jsonRequest)
|
||||
}
|
||||
|
||||
type Items []Item
|
||||
|
||||
type Item struct {
|
||||
ID string `json:"itemid,omitempty"`
|
||||
Key string `json:"key_,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
ValueType int `json:"value_type,omitempty,string"`
|
||||
HostID string `json:"hostid,omitempty"`
|
||||
Hosts []ItemHost `json:"hosts,omitempty"`
|
||||
Status string `json:"status,omitempty"`
|
||||
State string `json:"state,omitempty"`
|
||||
Delay string `json:"delay,omitempty"`
|
||||
Units string `json:"units,omitempty"`
|
||||
ValueMapID string `json:"valuemapid,omitempty"`
|
||||
}
|
||||
|
||||
type ItemHost struct {
|
||||
ID string `json:"hostid,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
}
|
||||
|
||||
type Trend []TrendPoint
|
||||
|
||||
type TrendPoint struct {
|
||||
ItemID string `json:"itemid,omitempty"`
|
||||
Clock int64 `json:"clock,omitempty,string"`
|
||||
Num string `json:"num,omitempty"`
|
||||
ValueMin string `json:"value_min,omitempty"`
|
||||
ValueAvg string `json:"value_avg,omitempty"`
|
||||
ValueMax string `json:"value_max,omitempty"`
|
||||
}
|
||||
|
||||
type History []HistoryPoint
|
||||
|
||||
type HistoryPoint struct {
|
||||
ItemID string `json:"itemid,omitempty"`
|
||||
Clock int64 `json:"clock,omitempty,string"`
|
||||
Value float64 `json:"value,omitempty,string"`
|
||||
NS int64 `json:"ns,omitempty,string"`
|
||||
}
|
||||
|
||||
type Group struct {
|
||||
Name string `json:"name"`
|
||||
ID string `json:"groupid"`
|
||||
}
|
||||
|
||||
type Host struct {
|
||||
Name string `json:"name"`
|
||||
Host string `json:"host"`
|
||||
ID string `json:"hostid"`
|
||||
}
|
||||
|
||||
type Application struct {
|
||||
Name string `json:"name"`
|
||||
ID string `json:"applicationid"`
|
||||
}
|
||||
64
pkg/zabbix/settings.go
Normal file
64
pkg/zabbix/settings.go
Normal file
@@ -0,0 +1,64 @@
|
||||
package zabbix
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/gtime"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
)
|
||||
|
||||
func readZabbixSettings(dsInstanceSettings *backend.DataSourceInstanceSettings) (*ZabbixDatasourceSettings, error) {
|
||||
zabbixSettingsDTO := &ZabbixDatasourceSettingsDTO{}
|
||||
|
||||
err := json.Unmarshal(dsInstanceSettings.JSONData, &zabbixSettingsDTO)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if zabbixSettingsDTO.TrendsFrom == "" {
|
||||
zabbixSettingsDTO.TrendsFrom = "7d"
|
||||
}
|
||||
if zabbixSettingsDTO.TrendsRange == "" {
|
||||
zabbixSettingsDTO.TrendsRange = "4d"
|
||||
}
|
||||
if zabbixSettingsDTO.CacheTTL == "" {
|
||||
zabbixSettingsDTO.CacheTTL = "1h"
|
||||
}
|
||||
|
||||
if zabbixSettingsDTO.Timeout == "" {
|
||||
zabbixSettingsDTO.Timeout = "30"
|
||||
}
|
||||
|
||||
trendsFrom, err := gtime.ParseInterval(zabbixSettingsDTO.TrendsFrom)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
trendsRange, err := gtime.ParseInterval(zabbixSettingsDTO.TrendsRange)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cacheTTL, err := gtime.ParseInterval(zabbixSettingsDTO.CacheTTL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
timeout, err := strconv.Atoi(zabbixSettingsDTO.Timeout)
|
||||
if err != nil {
|
||||
return nil, errors.New("failed to parse timeout: " + err.Error())
|
||||
}
|
||||
|
||||
zabbixSettings := &ZabbixDatasourceSettings{
|
||||
Trends: zabbixSettingsDTO.Trends,
|
||||
TrendsFrom: trendsFrom,
|
||||
TrendsRange: trendsRange,
|
||||
CacheTTL: cacheTTL,
|
||||
Timeout: time.Duration(timeout) * time.Second,
|
||||
}
|
||||
|
||||
return zabbixSettings, nil
|
||||
}
|
||||
31
pkg/zabbix/testing.go
Normal file
31
pkg/zabbix/testing.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package zabbix
|
||||
|
||||
import (
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbixapi"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
)
|
||||
|
||||
func MockZabbixClient(dsInfo *backend.DataSourceInstanceSettings, body string, statusCode int) (*Zabbix, error) {
|
||||
zabbixAPI, err := zabbixapi.MockZabbixAPI(body, statusCode)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
client, err := New(dsInfo, zabbixAPI)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return client, nil
|
||||
}
|
||||
|
||||
func MockZabbixClientResponse(client *Zabbix, body string, statusCode int) (*Zabbix, error) {
|
||||
zabbixAPI, err := zabbixapi.MockZabbixAPI(body, statusCode)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
client.api = zabbixAPI
|
||||
|
||||
return client, nil
|
||||
}
|
||||
21
pkg/zabbix/type_converters.go
Normal file
21
pkg/zabbix/type_converters.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package zabbix
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/bitly/go-simplejson"
|
||||
)
|
||||
|
||||
func convertTo(value *simplejson.Json, result interface{}) error {
|
||||
valueJSON, err := value.MarshalJSON()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(valueJSON, result)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
87
pkg/zabbix/utils.go
Normal file
87
pkg/zabbix/utils.go
Normal file
@@ -0,0 +1,87 @@
|
||||
package zabbix
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func (item *Item) ExpandItemName() string {
|
||||
name := item.Name
|
||||
key := item.Key
|
||||
|
||||
if strings.Index(key, "[") == -1 {
|
||||
return name
|
||||
}
|
||||
|
||||
keyRunes := []rune(item.Key)
|
||||
keyParamsStr := string(keyRunes[strings.Index(key, "[")+1 : strings.LastIndex(key, "]")])
|
||||
keyParams := splitKeyParams(keyParamsStr)
|
||||
|
||||
for i := len(keyParams); i >= 1; i-- {
|
||||
name = strings.ReplaceAll(name, fmt.Sprintf("$%v", i), keyParams[i-1])
|
||||
}
|
||||
|
||||
return name
|
||||
}
|
||||
|
||||
func expandItems(items []*Item) []*Item {
|
||||
for i := 0; i < len(items); i++ {
|
||||
items[i].Name = items[i].ExpandItemName()
|
||||
}
|
||||
return items
|
||||
}
|
||||
|
||||
func splitKeyParams(paramStr string) []string {
|
||||
paramRunes := []rune(paramStr)
|
||||
params := []string{}
|
||||
quoted := false
|
||||
inArray := false
|
||||
splitSymbol := ","
|
||||
param := ""
|
||||
|
||||
for _, r := range paramRunes {
|
||||
symbol := string(r)
|
||||
if symbol == `"` && inArray {
|
||||
param += symbol
|
||||
} else if symbol == `"` && quoted {
|
||||
quoted = false
|
||||
} else if symbol == `"` && !quoted {
|
||||
quoted = true
|
||||
} else if symbol == "[" && !quoted {
|
||||
inArray = true
|
||||
} else if symbol == "]" && !quoted {
|
||||
inArray = false
|
||||
} else if symbol == splitSymbol && !quoted && !inArray {
|
||||
params = append(params, param)
|
||||
param = ""
|
||||
} else {
|
||||
param += symbol
|
||||
}
|
||||
}
|
||||
|
||||
params = append(params, param)
|
||||
return params
|
||||
}
|
||||
|
||||
func parseFilter(filter string) (*regexp.Regexp, error) {
|
||||
regex := regexp.MustCompile(`^/(.+)/(.*)$`)
|
||||
flagRE := regexp.MustCompile("[imsU]+")
|
||||
|
||||
matches := regex.FindStringSubmatch(filter)
|
||||
if len(matches) <= 1 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
pattern := ""
|
||||
if matches[2] != "" {
|
||||
if flagRE.MatchString(matches[2]) {
|
||||
pattern += "(?" + matches[2] + ")"
|
||||
} else {
|
||||
return nil, fmt.Errorf("error parsing regexp: unsupported flags `%s` (expected [imsU])", matches[2])
|
||||
}
|
||||
}
|
||||
pattern += matches[1]
|
||||
|
||||
return regexp.Compile(pattern)
|
||||
}
|
||||
135
pkg/zabbix/zabbix.go
Normal file
135
pkg/zabbix/zabbix.go
Normal file
@@ -0,0 +1,135 @@
|
||||
package zabbix
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/zabbixapi"
|
||||
"github.com/bitly/go-simplejson"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||
)
|
||||
|
||||
// Zabbix is a wrapper for Zabbix API. It wraps Zabbix API queries and performs authentication, adds caching,
|
||||
// deduplication and other performance optimizations.
|
||||
type Zabbix struct {
|
||||
api *zabbixapi.ZabbixAPI
|
||||
dsInfo *backend.DataSourceInstanceSettings
|
||||
cache *ZabbixCache
|
||||
logger log.Logger
|
||||
}
|
||||
|
||||
// New returns new instance of Zabbix client.
|
||||
func New(dsInfo *backend.DataSourceInstanceSettings, zabbixAPI *zabbixapi.ZabbixAPI) (*Zabbix, error) {
|
||||
logger := log.New()
|
||||
|
||||
zabbixSettings, err := readZabbixSettings(dsInfo)
|
||||
if err != nil {
|
||||
logger.Error("Error parsing Zabbix settings", "error", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zabbixCache := NewZabbixCache(zabbixSettings.CacheTTL, 10*time.Minute)
|
||||
|
||||
return &Zabbix{
|
||||
api: zabbixAPI,
|
||||
dsInfo: dsInfo,
|
||||
cache: zabbixCache,
|
||||
logger: logger,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (zabbix *Zabbix) GetAPI() *zabbixapi.ZabbixAPI {
|
||||
return zabbix.api
|
||||
}
|
||||
|
||||
// Request wraps request with cache
|
||||
func (ds *Zabbix) Request(ctx context.Context, apiReq *ZabbixAPIRequest) (*simplejson.Json, error) {
|
||||
var resultJson *simplejson.Json
|
||||
var err error
|
||||
|
||||
cachedResult, queryExistInCache := ds.cache.GetAPIRequest(apiReq)
|
||||
if !queryExistInCache {
|
||||
resultJson, err = ds.request(ctx, apiReq.Method, apiReq.Params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if IsCachedRequest(apiReq.Method) {
|
||||
ds.logger.Debug("Writing result to cache", "method", apiReq.Method)
|
||||
ds.cache.SetAPIRequest(apiReq, resultJson)
|
||||
}
|
||||
} else {
|
||||
var ok bool
|
||||
resultJson, ok = cachedResult.(*simplejson.Json)
|
||||
if !ok {
|
||||
resultJson = simplejson.New()
|
||||
}
|
||||
}
|
||||
|
||||
return resultJson, nil
|
||||
}
|
||||
|
||||
// request checks authentication and makes a request to the Zabbix API.
|
||||
func (zabbix *Zabbix) request(ctx context.Context, method string, params ZabbixAPIParams) (*simplejson.Json, error) {
|
||||
zabbix.logger.Debug("Zabbix request", "method", method)
|
||||
|
||||
// Skip auth for methods that are not required it
|
||||
if method == "apiinfo.version" {
|
||||
return zabbix.api.RequestUnauthenticated(ctx, method, params)
|
||||
}
|
||||
|
||||
result, err := zabbix.api.Request(ctx, method, params)
|
||||
notAuthorized := isNotAuthorized(err)
|
||||
if err == zabbixapi.ErrNotAuthenticated || notAuthorized {
|
||||
if notAuthorized {
|
||||
zabbix.logger.Debug("Authentication token expired, performing re-login")
|
||||
}
|
||||
err = zabbix.Login(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return zabbix.request(ctx, method, params)
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (zabbix *Zabbix) Login(ctx context.Context) error {
|
||||
jsonData, err := simplejson.NewJson(zabbix.dsInfo.JSONData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
zabbixLogin := jsonData.Get("username").MustString()
|
||||
var zabbixPassword string
|
||||
if securePassword, exists := zabbix.dsInfo.DecryptedSecureJSONData["password"]; exists {
|
||||
zabbixPassword = securePassword
|
||||
} else {
|
||||
// Fallback
|
||||
zabbixPassword = jsonData.Get("password").MustString()
|
||||
}
|
||||
|
||||
err = zabbix.api.Authenticate(ctx, zabbixLogin, zabbixPassword)
|
||||
if err != nil {
|
||||
zabbix.logger.Error("Zabbix authentication error", "error", err)
|
||||
return err
|
||||
}
|
||||
zabbix.logger.Debug("Successfully authenticated", "url", zabbix.api.GetUrl().String(), "user", zabbixLogin)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func isNotAuthorized(err error) bool {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
message := err.Error()
|
||||
return strings.Contains(message, "Session terminated, re-login, please.") ||
|
||||
strings.Contains(message, "Not authorised.") ||
|
||||
strings.Contains(message, "Not authorized.")
|
||||
}
|
||||
89
pkg/zabbix/zabbix_test.go
Normal file
89
pkg/zabbix/zabbix_test.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package zabbix
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
var basicDatasourceInfo = &backend.DataSourceInstanceSettings{
|
||||
ID: 1,
|
||||
Name: "TestDatasource",
|
||||
URL: "http://zabbix.org/zabbix",
|
||||
JSONData: []byte(`{"username":"username", "password":"password", "cacheTTL":"10m"}`),
|
||||
}
|
||||
|
||||
var emptyParams = map[string]interface{}{}
|
||||
|
||||
func TestLogin(t *testing.T) {
|
||||
zabbixClient, _ := MockZabbixClient(basicDatasourceInfo, `{"result":"secretauth"}`, 200)
|
||||
err := zabbixClient.Login(context.Background())
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "secretauth", zabbixClient.api.GetAuth())
|
||||
}
|
||||
|
||||
func TestLoginError(t *testing.T) {
|
||||
zabbixClient, _ := MockZabbixClient(basicDatasourceInfo, `{"result":""}`, 500)
|
||||
err := zabbixClient.Login(context.Background())
|
||||
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, "", zabbixClient.api.GetAuth())
|
||||
}
|
||||
|
||||
func TestZabbixAPIQuery(t *testing.T) {
|
||||
zabbixClient, _ := MockZabbixClient(basicDatasourceInfo, `{"result":"test"}`, 200)
|
||||
resp, err := zabbixClient.Request(context.Background(), &ZabbixAPIRequest{Method: "test.get", Params: emptyParams})
|
||||
|
||||
assert.NoError(t, err)
|
||||
|
||||
result, err := resp.String()
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "test", result)
|
||||
}
|
||||
|
||||
func TestCachedQuery(t *testing.T) {
|
||||
// Using methods with caching enabled
|
||||
query := &ZabbixAPIRequest{Method: "host.get", Params: emptyParams}
|
||||
zabbixClient, _ := MockZabbixClient(basicDatasourceInfo, `{"result":"testOld"}`, 200)
|
||||
|
||||
// Run query first time
|
||||
resp, err := zabbixClient.Request(context.Background(), query)
|
||||
|
||||
assert.NoError(t, err)
|
||||
result, _ := resp.String()
|
||||
assert.Equal(t, "testOld", result)
|
||||
|
||||
// Mock request with new value
|
||||
zabbixClient, _ = MockZabbixClientResponse(zabbixClient, `{"result":"testNew"}`, 200)
|
||||
// Should not run actual API query and return first result
|
||||
resp, err = zabbixClient.Request(context.Background(), query)
|
||||
|
||||
assert.NoError(t, err)
|
||||
result, _ = resp.String()
|
||||
assert.Equal(t, "testOld", result)
|
||||
}
|
||||
|
||||
func TestNonCachedQuery(t *testing.T) {
|
||||
// Using methods with caching disabled
|
||||
query := &ZabbixAPIRequest{Method: "history.get", Params: emptyParams}
|
||||
zabbixClient, _ := MockZabbixClient(basicDatasourceInfo, `{"result":"testOld"}`, 200)
|
||||
|
||||
// Run query first time
|
||||
resp, err := zabbixClient.Request(context.Background(), query)
|
||||
|
||||
assert.NoError(t, err)
|
||||
result, _ := resp.String()
|
||||
assert.Equal(t, "testOld", result)
|
||||
|
||||
// Mock request with new value
|
||||
zabbixClient, _ = MockZabbixClientResponse(zabbixClient, `{"result":"testNew"}`, 200)
|
||||
// Should not run actual API query and return first result
|
||||
resp, err = zabbixClient.Request(context.Background(), query)
|
||||
|
||||
assert.NoError(t, err)
|
||||
result, _ = resp.String()
|
||||
assert.Equal(t, "testNew", result)
|
||||
}
|
||||
@@ -9,11 +9,8 @@ import (
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
"github.com/alexanderzobnin/grafana-zabbix/pkg/httpclient"
|
||||
"github.com/bitly/go-simplejson"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||
"golang.org/x/net/context/ctxhttp"
|
||||
)
|
||||
@@ -22,6 +19,7 @@ var (
|
||||
ErrNotAuthenticated = errors.New("zabbix api: not authenticated")
|
||||
)
|
||||
|
||||
// ZabbixAPI is a simple client responsible for making request to Zabbix API
|
||||
type ZabbixAPI struct {
|
||||
url *url.URL
|
||||
httpClient *http.Client
|
||||
@@ -32,14 +30,9 @@ type ZabbixAPI struct {
|
||||
type ZabbixAPIParams = map[string]interface{}
|
||||
|
||||
// New returns new ZabbixAPI instance initialized with given URL or error.
|
||||
func New(dsInfo *backend.DataSourceInstanceSettings, timeout time.Duration) (*ZabbixAPI, error) {
|
||||
func New(apiURL string, client *http.Client) (*ZabbixAPI, error) {
|
||||
apiLogger := log.New()
|
||||
zabbixURL, err := url.Parse(dsInfo.URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
client, err := httpclient.GetHttpClient(dsInfo, timeout)
|
||||
zabbixURL, err := url.Parse(apiURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { FC } from 'react';
|
||||
import { cx, css } from 'emotion';
|
||||
import { cx, css } from '@emotion/css';
|
||||
import { stylesFactory, useTheme } from '@grafana/ui';
|
||||
import { GrafanaTheme, GrafanaThemeType } from '@grafana/data';
|
||||
import { FAIcon } from '../FAIcon/FAIcon';
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import React from 'react';
|
||||
import { config, GrafanaBootConfig } from '@grafana/runtime';
|
||||
import { ThemeContext, getTheme } from '@grafana/ui';
|
||||
import { GrafanaThemeType } from '@grafana/data';
|
||||
import { ThemeContext } from '@grafana/ui';
|
||||
import { createTheme } from '@grafana/data';
|
||||
|
||||
export const ConfigContext = React.createContext<GrafanaBootConfig>(config);
|
||||
export const ConfigConsumer = ConfigContext.Consumer;
|
||||
@@ -14,10 +14,11 @@ export const provideConfig = (component: React.ComponentType<any>) => {
|
||||
return ConfigProvider;
|
||||
};
|
||||
|
||||
export const getCurrentThemeName = () =>
|
||||
config.bootData.user.lightTheme ? GrafanaThemeType.Light : GrafanaThemeType.Dark;
|
||||
|
||||
export const getCurrentTheme = () => getTheme(getCurrentThemeName());
|
||||
export const getCurrentTheme = () => createTheme({
|
||||
colors: {
|
||||
mode: config.bootData.user.lightTheme ? 'light' : 'dark',
|
||||
},
|
||||
});
|
||||
|
||||
export const ThemeProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
return (
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { FC } from 'react';
|
||||
import { cx } from 'emotion';
|
||||
import { cx } from '@emotion/css';
|
||||
|
||||
interface Props {
|
||||
icon: string;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { FC } from 'react';
|
||||
import { cx } from 'emotion';
|
||||
import { cx } from '@emotion/css';
|
||||
|
||||
interface Props {
|
||||
status: 'critical' | 'warning' | 'online' | 'ok' | 'problem';
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { FC } from 'react';
|
||||
import { cx, css } from 'emotion';
|
||||
import { cx, css } from '@emotion/css';
|
||||
import { Manager, Popper as ReactPopper, Reference } from 'react-popper';
|
||||
import Transition from 'react-transition-group/Transition';
|
||||
import { stylesFactory } from '@grafana/ui';
|
||||
|
||||
@@ -149,6 +149,7 @@ export class ZabbixVariableQueryEditor extends PureComponent<VariableQueryProps,
|
||||
<div className="gf-form">
|
||||
<InlineFormLabel width={10} tooltip="Original query string, read-only">Legacy Query</InlineFormLabel>
|
||||
<Input
|
||||
css=""
|
||||
value={legacyQuery}
|
||||
readOnly={true}
|
||||
/>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { FC } from 'react';
|
||||
import { css, cx } from 'emotion';
|
||||
import { css, cx } from '@emotion/css';
|
||||
import { EventsWithValidation, ValidationEvents, useTheme } from '@grafana/ui';
|
||||
import { GrafanaTheme } from '@grafana/data';
|
||||
import { isRegex, variableRegex } from '../utils';
|
||||
|
||||
@@ -7,12 +7,12 @@ export const DATAPOINT_VALUE = 0;
|
||||
export const DATAPOINT_TS = 1;
|
||||
|
||||
// Editor modes
|
||||
export const MODE_METRICS = 0;
|
||||
export const MODE_ITSERVICE = 1;
|
||||
export const MODE_TEXT = 2;
|
||||
export const MODE_ITEMID = 3;
|
||||
export const MODE_TRIGGERS = 4;
|
||||
export const MODE_PROBLEMS = 5;
|
||||
export const MODE_METRICS = '0';
|
||||
export const MODE_ITSERVICE = '1';
|
||||
export const MODE_TEXT = '2';
|
||||
export const MODE_ITEMID = '3';
|
||||
export const MODE_TRIGGERS = '4';
|
||||
export const MODE_PROBLEMS = '5';
|
||||
|
||||
// Triggers severity
|
||||
export const SEV_NOT_CLASSIFIED = 0;
|
||||
|
||||
@@ -1,143 +1,89 @@
|
||||
import _ from 'lodash';
|
||||
// Available in 7.0
|
||||
// import { getTemplateSrv } from '@grafana/runtime';
|
||||
import * as utils from './utils';
|
||||
import ts, { groupBy_perf as groupBy } from './timeseries';
|
||||
import { getTemplateSrv } from '@grafana/runtime';
|
||||
import { DataFrame, FieldType, TIME_SERIES_VALUE_FIELD_NAME } from '@grafana/data';
|
||||
|
||||
const SUM = ts.SUM;
|
||||
const COUNT = ts.COUNT;
|
||||
const AVERAGE = ts.AVERAGE;
|
||||
const MIN = ts.MIN;
|
||||
const MAX = ts.MAX;
|
||||
const MEDIAN = ts.MEDIAN;
|
||||
const PERCENTILE = ts.PERCENTILE;
|
||||
|
||||
const downsampleSeries = ts.downsample;
|
||||
const groupBy_exported = (interval, groupFunc, datapoints) => groupBy(datapoints, interval, groupFunc);
|
||||
const sumSeries = ts.sumSeries;
|
||||
const delta = ts.delta;
|
||||
const rate = ts.rate;
|
||||
const scale = (factor, datapoints) => ts.scale_perf(datapoints, factor);
|
||||
const offset = (delta, datapoints) => ts.offset(datapoints, delta);
|
||||
const simpleMovingAverage = (n, datapoints) => ts.simpleMovingAverage(datapoints, n);
|
||||
const expMovingAverage = (a, datapoints) => ts.expMovingAverage(datapoints, a);
|
||||
const percentile = (interval, n, datapoints) => groupBy(datapoints, interval, _.partial(PERCENTILE, n));
|
||||
|
||||
function limit(order, n, orderByFunc, timeseries) {
|
||||
const orderByCallback = aggregationFunctions[orderByFunc];
|
||||
const sortByIteratee = (ts) => {
|
||||
const values = _.map(ts.datapoints, (point) => {
|
||||
return point[0];
|
||||
});
|
||||
return orderByCallback(values);
|
||||
};
|
||||
const sortedTimeseries = _.sortBy(timeseries, sortByIteratee);
|
||||
if (order === 'bottom') {
|
||||
return sortedTimeseries.slice(0, n);
|
||||
} else {
|
||||
return sortedTimeseries.slice(-n);
|
||||
function setAlias(alias: string, frame: DataFrame) {
|
||||
if (frame.fields?.length <= 2) {
|
||||
const valueField = frame.fields.find(f => f.name === TIME_SERIES_VALUE_FIELD_NAME);
|
||||
if (valueField?.config?.custom?.scopedVars) {
|
||||
alias = getTemplateSrv().replace(alias, valueField?.config?.custom?.scopedVars);
|
||||
}
|
||||
frame.name = alias;
|
||||
return frame;
|
||||
}
|
||||
}
|
||||
|
||||
function removeAboveValue(n, datapoints) {
|
||||
return _.map(datapoints, point => {
|
||||
return [
|
||||
(point[0] > n) ? null : point[0],
|
||||
point[1]
|
||||
];
|
||||
});
|
||||
}
|
||||
|
||||
function removeBelowValue(n, datapoints) {
|
||||
return _.map(datapoints, point => {
|
||||
return [
|
||||
(point[0] < n) ? null : point[0],
|
||||
point[1]
|
||||
];
|
||||
});
|
||||
}
|
||||
|
||||
function transformNull(n, datapoints) {
|
||||
return _.map(datapoints, point => {
|
||||
return [
|
||||
(point[0] !== null) ? point[0] : n,
|
||||
point[1]
|
||||
];
|
||||
});
|
||||
}
|
||||
|
||||
function sortSeries(direction, timeseries: any[]) {
|
||||
return _.orderBy(timeseries, [ts => {
|
||||
return ts.target.toLowerCase();
|
||||
}], direction);
|
||||
}
|
||||
|
||||
function setAlias(alias, timeseries) {
|
||||
// TODO: use getTemplateSrv() when available (since 7.0)
|
||||
if (this.templateSrv && timeseries && timeseries.scopedVars) {
|
||||
alias = this.templateSrv.replace(alias, timeseries.scopedVars);
|
||||
for (let fieldIndex = 0; fieldIndex < frame.fields.length; fieldIndex++) {
|
||||
const field = frame.fields[fieldIndex];
|
||||
if (field.type !== FieldType.time) {
|
||||
if (field?.config?.custom?.scopedVars) {
|
||||
alias = getTemplateSrv().replace(alias, field?.config?.custom?.scopedVars);
|
||||
}
|
||||
field.name = alias;
|
||||
}
|
||||
}
|
||||
timeseries.target = alias;
|
||||
return timeseries;
|
||||
return frame;
|
||||
}
|
||||
|
||||
function replaceAlias(regexp, newAlias, timeseries) {
|
||||
let pattern;
|
||||
function replaceAlias(regexp: string, newAlias: string, frame: DataFrame) {
|
||||
let pattern: string | RegExp;
|
||||
if (utils.isRegex(regexp)) {
|
||||
pattern = utils.buildRegex(regexp);
|
||||
} else {
|
||||
pattern = regexp;
|
||||
}
|
||||
|
||||
let alias = timeseries.target.replace(pattern, newAlias);
|
||||
// TODO: use getTemplateSrv() when available (since 7.0)
|
||||
if (this.templateSrv && timeseries && timeseries.scopedVars) {
|
||||
alias = this.templateSrv.replace(alias, timeseries.scopedVars);
|
||||
if (frame.fields?.length <= 2) {
|
||||
let alias = frame.name.replace(pattern, newAlias);
|
||||
const valueField = frame.fields.find(f => f.name === TIME_SERIES_VALUE_FIELD_NAME);
|
||||
if (valueField?.state?.scopedVars) {
|
||||
alias = getTemplateSrv().replace(alias, valueField?.state?.scopedVars);
|
||||
}
|
||||
frame.name = alias;
|
||||
return frame;
|
||||
}
|
||||
timeseries.target = alias;
|
||||
return timeseries;
|
||||
|
||||
for (const field of frame.fields) {
|
||||
if (field.type !== FieldType.time) {
|
||||
let alias = field.name.replace(pattern, newAlias);
|
||||
if (field?.state?.scopedVars) {
|
||||
alias = getTemplateSrv().replace(alias, field?.state?.scopedVars);
|
||||
}
|
||||
field.name = alias;
|
||||
}
|
||||
}
|
||||
return frame;
|
||||
}
|
||||
|
||||
function setAliasByRegex(alias, timeseries) {
|
||||
timeseries.target = extractText(timeseries.target, alias);
|
||||
return timeseries;
|
||||
function setAliasByRegex(alias: string, frame: DataFrame) {
|
||||
if (frame.fields?.length <= 2) {
|
||||
try {
|
||||
frame.name = extractText(frame.name, alias);
|
||||
} catch (error) {
|
||||
console.error('Failed to apply RegExp:', error?.message || error);
|
||||
}
|
||||
return frame;
|
||||
}
|
||||
|
||||
for (const field of frame.fields) {
|
||||
if (field.type !== FieldType.time) {
|
||||
try {
|
||||
field.name = extractText(field.name, alias);
|
||||
} catch (error) {
|
||||
console.error('Failed to apply RegExp:', error?.message || error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return frame;
|
||||
}
|
||||
|
||||
function extractText(str, pattern) {
|
||||
function extractText(str: string, pattern: string) {
|
||||
const extractPattern = new RegExp(pattern);
|
||||
const extractedValue = extractPattern.exec(str);
|
||||
return extractedValue[0];
|
||||
}
|
||||
|
||||
function groupByWrapper(interval, groupFunc, datapoints) {
|
||||
const groupByCallback = aggregationFunctions[groupFunc];
|
||||
return groupBy(datapoints, interval, groupByCallback);
|
||||
}
|
||||
|
||||
function aggregateByWrapper(interval, aggregateFunc, datapoints) {
|
||||
// Flatten all points in frame and then just use groupBy()
|
||||
const flattenedPoints = ts.flattenDatapoints(datapoints);
|
||||
// groupBy_perf works with sorted series only
|
||||
const sortedPoints = ts.sortByTime(flattenedPoints);
|
||||
const groupByCallback = aggregationFunctions[aggregateFunc];
|
||||
return groupBy(sortedPoints, interval, groupByCallback);
|
||||
}
|
||||
|
||||
function aggregateWrapper(groupByCallback, interval, datapoints) {
|
||||
const flattenedPoints = ts.flattenDatapoints(datapoints);
|
||||
// groupBy_perf works with sorted series only
|
||||
const sortedPoints = ts.sortByTime(flattenedPoints);
|
||||
return groupBy(sortedPoints, interval, groupByCallback);
|
||||
}
|
||||
|
||||
function percentileAgg(interval, n, datapoints) {
|
||||
const flattenedPoints = ts.flattenDatapoints(datapoints);
|
||||
// groupBy_perf works with sorted series only
|
||||
const sortedPoints = ts.sortByTime(flattenedPoints);
|
||||
const groupByCallback = _.partial(PERCENTILE, n);
|
||||
return groupBy(sortedPoints, interval, groupByCallback);
|
||||
}
|
||||
|
||||
function timeShift(interval, range) {
|
||||
const shift = utils.parseTimeShiftInterval(interval) / 1000;
|
||||
return _.map(range, time => {
|
||||
@@ -145,71 +91,14 @@ function timeShift(interval, range) {
|
||||
});
|
||||
}
|
||||
|
||||
function unShiftTimeSeries(interval, datapoints) {
|
||||
const unshift = utils.parseTimeShiftInterval(interval);
|
||||
return _.map(datapoints, dp => {
|
||||
return [
|
||||
dp[0],
|
||||
dp[1] + unshift
|
||||
];
|
||||
});
|
||||
}
|
||||
|
||||
const metricFunctions = {
|
||||
groupBy: groupByWrapper,
|
||||
scale: scale,
|
||||
offset: offset,
|
||||
delta: delta,
|
||||
rate: rate,
|
||||
movingAverage: simpleMovingAverage,
|
||||
exponentialMovingAverage: expMovingAverage,
|
||||
percentile: percentile,
|
||||
transformNull: transformNull,
|
||||
aggregateBy: aggregateByWrapper,
|
||||
// Predefined aggs
|
||||
percentileAgg: percentileAgg,
|
||||
average: _.partial(aggregateWrapper, AVERAGE),
|
||||
min: _.partial(aggregateWrapper, MIN),
|
||||
max: _.partial(aggregateWrapper, MAX),
|
||||
median: _.partial(aggregateWrapper, MEDIAN),
|
||||
sum: _.partial(aggregateWrapper, SUM),
|
||||
count: _.partial(aggregateWrapper, COUNT),
|
||||
sumSeries: sumSeries,
|
||||
removeAboveValue: removeAboveValue,
|
||||
removeBelowValue: removeBelowValue,
|
||||
top: _.partial(limit, 'top'),
|
||||
bottom: _.partial(limit, 'bottom'),
|
||||
sortSeries: sortSeries,
|
||||
timeShift: timeShift,
|
||||
setAlias: setAlias,
|
||||
setAliasByRegex: setAliasByRegex,
|
||||
replaceAlias: replaceAlias
|
||||
};
|
||||
|
||||
const aggregationFunctions = {
|
||||
avg: AVERAGE,
|
||||
min: MIN,
|
||||
max: MAX,
|
||||
median: MEDIAN,
|
||||
sum: SUM,
|
||||
count: COUNT
|
||||
};
|
||||
|
||||
export default {
|
||||
downsampleSeries: downsampleSeries,
|
||||
groupBy: groupBy_exported,
|
||||
AVERAGE: AVERAGE,
|
||||
MIN: MIN,
|
||||
MAX: MAX,
|
||||
MEDIAN: MEDIAN,
|
||||
SUM: SUM,
|
||||
COUNT: COUNT,
|
||||
unShiftTimeSeries: unShiftTimeSeries,
|
||||
|
||||
get aggregationFunctions() {
|
||||
return aggregationFunctions;
|
||||
},
|
||||
|
||||
get metricFunctions() {
|
||||
return metricFunctions;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import _ from 'lodash';
|
||||
import { Observable } from 'rxjs';
|
||||
import config from 'grafana/app/core/config';
|
||||
import { contextSrv } from 'grafana/app/core/core';
|
||||
import * as dateMath from 'grafana/app/core/utils/datemath';
|
||||
@@ -6,15 +7,24 @@ import * as utils from './utils';
|
||||
import * as migrations from './migrations';
|
||||
import * as metricFunctions from './metricFunctions';
|
||||
import * as c from './constants';
|
||||
import { align, fillTrendsWithNulls } from './timeseries';
|
||||
import dataProcessor from './dataProcessor';
|
||||
import responseHandler from './responseHandler';
|
||||
import problemsHandler from './problemsHandler';
|
||||
import { Zabbix } from './zabbix/zabbix';
|
||||
import { ZabbixAPIError } from './zabbix/connectors/zabbix_api/zabbixAPIConnector';
|
||||
import { ZabbixMetricsQuery, ZabbixDSOptions, VariableQueryTypes, ShowProblemTypes, ProblemDTO } from './types';
|
||||
import { getBackendSrv, getTemplateSrv } from '@grafana/runtime';
|
||||
import { DataFrame, DataQueryRequest, DataQueryResponse, DataSourceApi, DataSourceInstanceSettings, FieldType, isDataFrame, LoadingState } from '@grafana/data';
|
||||
import { ProblemDTO, ShowProblemTypes, VariableQueryTypes, ZabbixDSOptions, ZabbixMetricsQuery } from './types';
|
||||
import { BackendSrvRequest, getBackendSrv, getTemplateSrv, toDataQueryResponse } from '@grafana/runtime';
|
||||
import {
|
||||
DataFrame,
|
||||
dataFrameFromJSON,
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
DataSourceApi,
|
||||
DataSourceInstanceSettings,
|
||||
FieldType,
|
||||
isDataFrame,
|
||||
LoadingState
|
||||
} from '@grafana/data';
|
||||
|
||||
export class ZabbixDatasource extends DataSourceApi<ZabbixMetricsQuery, ZabbixDSOptions> {
|
||||
name: string;
|
||||
@@ -48,17 +58,17 @@ export class ZabbixDatasource extends DataSourceApi<ZabbixMetricsQuery, ZabbixDS
|
||||
this.replaceTemplateVars = _.partial(replaceTemplateVars, this.templateSrv);
|
||||
|
||||
// General data source settings
|
||||
this.datasourceId = instanceSettings.id;
|
||||
this.name = instanceSettings.name;
|
||||
this.basicAuth = instanceSettings.basicAuth;
|
||||
this.withCredentials = instanceSettings.withCredentials;
|
||||
this.datasourceId = instanceSettings.id;
|
||||
this.name = instanceSettings.name;
|
||||
this.basicAuth = instanceSettings.basicAuth;
|
||||
this.withCredentials = instanceSettings.withCredentials;
|
||||
|
||||
const jsonData = migrations.migrateDSConfig(instanceSettings.jsonData);
|
||||
|
||||
// Use trends instead history since specified time
|
||||
this.trends = jsonData.trends;
|
||||
this.trendsFrom = jsonData.trendsFrom || '7d';
|
||||
this.trendsRange = jsonData.trendsRange || '4d';
|
||||
this.trends = jsonData.trends;
|
||||
this.trendsFrom = jsonData.trendsFrom || '7d';
|
||||
this.trendsRange = jsonData.trendsRange || '4d';
|
||||
|
||||
// Set cache update interval
|
||||
const ttl = jsonData.cacheTTL || '1h';
|
||||
@@ -94,70 +104,128 @@ export class ZabbixDatasource extends DataSourceApi<ZabbixMetricsQuery, ZabbixDS
|
||||
|
||||
/**
|
||||
* Query panel data. Calls for each panel in dashboard.
|
||||
* @param {Object} options Contains time range, targets and other info.
|
||||
* @param {Object} request Contains time range, targets and other info.
|
||||
* @return {Object} Grafana metrics object with timeseries data for each target.
|
||||
*/
|
||||
query(options: DataQueryRequest<any>): Promise<DataQueryResponse> {
|
||||
// Create request for each target
|
||||
const promises = _.map(options.targets, t => {
|
||||
query(request: DataQueryRequest<any>): Promise<DataQueryResponse> | Observable<DataQueryResponse> {
|
||||
// Migrate old targets
|
||||
const requestTargets = request.targets.map(t => {
|
||||
// Prevent changes of original object
|
||||
const target = _.cloneDeep(t);
|
||||
return migrations.migrate(target);
|
||||
});
|
||||
|
||||
const backendResponsePromise = this.backendQuery({ ...request, targets: requestTargets });
|
||||
const dbConnectionResponsePromise = this.dbConnectionQuery({ ...request, targets: requestTargets });
|
||||
const frontendResponsePromise = this.frontendQuery({ ...request, targets: requestTargets });
|
||||
|
||||
return Promise.all([backendResponsePromise, dbConnectionResponsePromise, frontendResponsePromise])
|
||||
.then(rsp => {
|
||||
// Merge backend and frontend queries results
|
||||
const [backendRes, dbConnectionRes, frontendRes] = rsp;
|
||||
if (dbConnectionRes.data) {
|
||||
backendRes.data = backendRes.data.concat(dbConnectionRes.data);
|
||||
}
|
||||
if (frontendRes.data) {
|
||||
backendRes.data = backendRes.data.concat(frontendRes.data);
|
||||
}
|
||||
|
||||
return {
|
||||
data: backendRes.data,
|
||||
state: LoadingState.Done,
|
||||
key: request.requestId,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async backendQuery(request: DataQueryRequest<any>): Promise<DataQueryResponse> {
|
||||
const { intervalMs, maxDataPoints, range, requestId } = request;
|
||||
const targets = request.targets.filter(this.isBackendTarget);
|
||||
|
||||
// Add range variables
|
||||
request.scopedVars = Object.assign({}, request.scopedVars, utils.getRangeScopedVars(request.range));
|
||||
|
||||
const queries = _.compact(targets.map((query) => {
|
||||
// Don't request for hidden targets
|
||||
if (t.hide) {
|
||||
if (query.hide) {
|
||||
return null;
|
||||
}
|
||||
|
||||
this.replaceTargetVariables(query, request);
|
||||
|
||||
return {
|
||||
...query,
|
||||
datasourceId: this.datasourceId,
|
||||
intervalMs,
|
||||
maxDataPoints,
|
||||
};
|
||||
}));
|
||||
|
||||
// Return early if no queries exist
|
||||
if (!queries.length) {
|
||||
return Promise.resolve({ data: [] });
|
||||
}
|
||||
|
||||
const body: any = { queries };
|
||||
|
||||
if (range) {
|
||||
body.range = range;
|
||||
body.from = range.from.valueOf().toString();
|
||||
body.to = range.to.valueOf().toString();
|
||||
}
|
||||
|
||||
let rsp: any;
|
||||
try {
|
||||
rsp = await getBackendSrv().fetch({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: body,
|
||||
requestId,
|
||||
}).toPromise();
|
||||
} catch (err) {
|
||||
return toDataQueryResponse(err);
|
||||
}
|
||||
|
||||
const resp = toDataQueryResponse(rsp);
|
||||
this.sortByRefId(resp);
|
||||
this.applyFrontendFunctions(resp, request);
|
||||
if (responseHandler.isConvertibleToWide(resp.data)) {
|
||||
console.log('Converting response to the wide format');
|
||||
resp.data = responseHandler.convertToWide(resp.data);
|
||||
}
|
||||
|
||||
return resp;
|
||||
}
|
||||
|
||||
async frontendQuery(request: DataQueryRequest<any>): Promise<DataQueryResponse> {
|
||||
const frontendTargets = request.targets.filter(t => !(this.isBackendTarget(t) || this.isDBConnectionTarget(t)));
|
||||
const promises = _.map(frontendTargets, target => {
|
||||
// Don't request for hidden targets
|
||||
if (target.hide) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let timeFrom = Math.ceil(dateMath.parse(options.range.from) / 1000);
|
||||
let timeTo = Math.ceil(dateMath.parse(options.range.to) / 1000);
|
||||
|
||||
// Add range variables
|
||||
options.scopedVars = Object.assign({}, options.scopedVars, utils.getRangeScopedVars(options.range));
|
||||
request.scopedVars = Object.assign({}, request.scopedVars, utils.getRangeScopedVars(request.range));
|
||||
this.replaceTargetVariables(target, request);
|
||||
const timeRange = this.buildTimeRange(request, target);
|
||||
|
||||
// Prevent changes of original object
|
||||
let target = _.cloneDeep(t);
|
||||
|
||||
// Migrate old targets
|
||||
target = migrations.migrate(target);
|
||||
this.replaceTargetVariables(target, options);
|
||||
|
||||
// Apply Time-related functions (timeShift(), etc)
|
||||
const timeFunctions = bindFunctionDefs(target.functions, 'Time');
|
||||
if (timeFunctions.length) {
|
||||
const [time_from, time_to] = utils.sequence(timeFunctions)([timeFrom, timeTo]);
|
||||
timeFrom = time_from;
|
||||
timeTo = time_to;
|
||||
}
|
||||
const timeRange = [timeFrom, timeTo];
|
||||
|
||||
const useTrends = this.isUseTrends(timeRange);
|
||||
|
||||
// Metrics or Text query
|
||||
if (!target.queryType || target.queryType === c.MODE_METRICS || target.queryType === c.MODE_TEXT) {
|
||||
if (target.queryType === c.MODE_TEXT) {
|
||||
// Text query
|
||||
// Don't request undefined targets
|
||||
if (!target.group || !target.host || !target.item) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (!target.queryType || target.queryType === c.MODE_METRICS) {
|
||||
return this.queryNumericData(target, timeRange, useTrends, options);
|
||||
} else if (target.queryType === c.MODE_TEXT) {
|
||||
return this.queryTextData(target, timeRange);
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
} else if (target.queryType === c.MODE_ITEMID) {
|
||||
// Item ID query
|
||||
if (!target.itemids) {
|
||||
return [];
|
||||
}
|
||||
return this.queryItemIdData(target, timeRange, useTrends, options);
|
||||
return this.queryTextData(target, timeRange);
|
||||
} else if (target.queryType === c.MODE_ITSERVICE) {
|
||||
// IT services query
|
||||
return this.queryITServiceData(target, timeRange, options);
|
||||
return this.queryITServiceData(target, timeRange, request);
|
||||
} else if (target.queryType === c.MODE_TRIGGERS) {
|
||||
// Triggers query
|
||||
return this.queryTriggersData(target, timeRange);
|
||||
} else if (target.queryType === c.MODE_PROBLEMS) {
|
||||
// Problems query
|
||||
return this.queryProblems(target, timeRange, options);
|
||||
return this.queryProblems(target, timeRange, request);
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
@@ -165,65 +233,72 @@ export class ZabbixDatasource extends DataSourceApi<ZabbixMetricsQuery, ZabbixDS
|
||||
|
||||
// Data for panel (all targets)
|
||||
return Promise.all(_.flatten(promises))
|
||||
.then(_.flatten)
|
||||
.then(data => {
|
||||
if (data && data.length > 0 && isDataFrame(data[0]) && !utils.isProblemsDataFrame(data[0])) {
|
||||
data = responseHandler.alignFrames(data);
|
||||
if (responseHandler.isConvertibleToWide(data)) {
|
||||
console.log('Converting response to the wide format');
|
||||
data = responseHandler.convertToWide(data);
|
||||
}
|
||||
.then(_.flatten)
|
||||
.then(data => {
|
||||
if (data && data.length > 0 && isDataFrame(data[0]) && !utils.isProblemsDataFrame(data[0])) {
|
||||
data = responseHandler.alignFrames(data);
|
||||
if (responseHandler.isConvertibleToWide(data)) {
|
||||
console.log('Converting response to the wide format');
|
||||
data = responseHandler.convertToWide(data);
|
||||
}
|
||||
return data;
|
||||
}).then(data => {
|
||||
return {
|
||||
data,
|
||||
state: LoadingState.Done,
|
||||
key: options.requestId,
|
||||
};
|
||||
});
|
||||
}
|
||||
return { data };
|
||||
});
|
||||
}
|
||||
|
||||
doTsdbRequest(options) {
|
||||
const tsdbRequestData: any = {
|
||||
queries: options.targets.map(target => {
|
||||
target.datasourceId = this.datasourceId;
|
||||
target.queryType = 'zabbixAPI';
|
||||
return target;
|
||||
}),
|
||||
};
|
||||
async dbConnectionQuery(request: DataQueryRequest<any>): Promise<DataQueryResponse> {
|
||||
const targets = request.targets.filter(this.isDBConnectionTarget);
|
||||
|
||||
if (options.range) {
|
||||
tsdbRequestData.from = options.range.from.valueOf().toString();
|
||||
tsdbRequestData.to = options.range.to.valueOf().toString();
|
||||
const queries = _.compact(targets.map((target) => {
|
||||
// Don't request for hidden targets
|
||||
if (target.hide) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Add range variables
|
||||
request.scopedVars = Object.assign({}, request.scopedVars, utils.getRangeScopedVars(request.range));
|
||||
this.replaceTargetVariables(target, request);
|
||||
const timeRange = this.buildTimeRange(request, target);
|
||||
const useTrends = this.isUseTrends(timeRange);
|
||||
|
||||
if (!target.queryType || target.queryType === c.MODE_METRICS) {
|
||||
return this.queryNumericData(target, timeRange, useTrends, request);
|
||||
} else if (target.queryType === c.MODE_ITEMID) {
|
||||
// Item ID query
|
||||
if (!target.itemids) {
|
||||
return [];
|
||||
}
|
||||
return this.queryItemIdData(target, timeRange, useTrends, request);
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
}));
|
||||
|
||||
const promises: Promise<DataQueryResponse> = Promise.all(queries)
|
||||
.then(_.flatten)
|
||||
.then(data => ({ data }));
|
||||
|
||||
return promises;
|
||||
}
|
||||
|
||||
buildTimeRange(request, target) {
|
||||
let timeFrom = Math.ceil(dateMath.parse(request.range.from) / 1000);
|
||||
let timeTo = Math.ceil(dateMath.parse(request.range.to) / 1000);
|
||||
|
||||
// Apply Time-related functions (timeShift(), etc)
|
||||
const timeFunctions = bindFunctionDefs(target.functions, 'Time');
|
||||
if (timeFunctions.length) {
|
||||
const [time_from, time_to] = utils.sequence(timeFunctions)([timeFrom, timeTo]);
|
||||
timeFrom = time_from;
|
||||
timeTo = time_to;
|
||||
}
|
||||
|
||||
return getBackendSrv().post('/api/tsdb/query', tsdbRequestData);
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<TSDBResponse>}
|
||||
*/
|
||||
doTSDBConnectionTest() {
|
||||
/**
|
||||
* @type {{ queries: ZabbixConnectionTestQuery[] }}
|
||||
*/
|
||||
const tsdbRequestData = {
|
||||
queries: [
|
||||
{
|
||||
datasourceId: this.datasourceId,
|
||||
queryType: 'connectionTest'
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
return getBackendSrv().post('/api/tsdb/query', tsdbRequestData);
|
||||
return [timeFrom, timeTo];
|
||||
}
|
||||
|
||||
/**
|
||||
* Query target data for Metrics
|
||||
*/
|
||||
async queryNumericData(target, timeRange, useTrends, options): Promise<DataFrame[]> {
|
||||
async queryNumericData(target, timeRange, useTrends, request): Promise<any> {
|
||||
const getItemOptions = {
|
||||
itemtype: 'num'
|
||||
};
|
||||
@@ -231,43 +306,74 @@ export class ZabbixDatasource extends DataSourceApi<ZabbixMetricsQuery, ZabbixDS
|
||||
const items = await this.zabbix.getItemsFromTarget(target, getItemOptions);
|
||||
|
||||
const queryStart = new Date().getTime();
|
||||
const result = await this.queryNumericDataForItems(items, target, timeRange, useTrends, options);
|
||||
const result = await this.queryNumericDataForItems(items, target, timeRange, useTrends, request);
|
||||
const queryEnd = new Date().getTime();
|
||||
|
||||
if (this.enableDebugLog) {
|
||||
console.log(`Datasource::Performance Query Time (${this.name}): ${queryEnd - queryStart}`);
|
||||
}
|
||||
|
||||
const valueMappings = await this.zabbix.getValueMappings();
|
||||
|
||||
const dataFrames = result.map(s => responseHandler.seriesToDataFrame(s, target, valueMappings));
|
||||
return dataFrames;
|
||||
return this.handleBackendPostProcessingResponse(result, request, target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Query history for numeric items
|
||||
*/
|
||||
queryNumericDataForItems(items, target: ZabbixMetricsQuery, timeRange, useTrends, options) {
|
||||
let getHistoryPromise;
|
||||
options.valueType = this.getTrendValueType(target);
|
||||
options.consolidateBy = getConsolidateBy(target) || options.valueType;
|
||||
const disableDataAlignment = this.disableDataAlignment || target.options?.disableDataAlignment;
|
||||
async queryNumericDataForItems(items, target: ZabbixMetricsQuery, timeRange, useTrends, request) {
|
||||
let history;
|
||||
request.valueType = this.getTrendValueType(target);
|
||||
request.consolidateBy = getConsolidateBy(target) || request.valueType;
|
||||
|
||||
if (useTrends) {
|
||||
getHistoryPromise = this.zabbix.getTrends(items, timeRange, options)
|
||||
.then(timeseries => {
|
||||
return !disableDataAlignment ? this.fillTrendTimeSeriesWithNulls(timeseries) : timeseries;
|
||||
});
|
||||
history = await this.zabbix.getTrends(items, timeRange, request);
|
||||
} else {
|
||||
getHistoryPromise = this.zabbix.getHistoryTS(items, timeRange, options)
|
||||
.then(timeseries => {
|
||||
return !disableDataAlignment ? this.alignTimeSeriesData(timeseries) : timeseries;
|
||||
});
|
||||
history = await this.zabbix.getHistoryTS(items, timeRange, request);
|
||||
}
|
||||
|
||||
return getHistoryPromise
|
||||
.then(timeseries => this.applyDataProcessingFunctions(timeseries, target))
|
||||
.then(timeseries => downsampleSeries(timeseries, options));
|
||||
const range = {
|
||||
from: timeRange[0],
|
||||
to: timeRange[1],
|
||||
};
|
||||
return await this.invokeDataProcessingQuery(history, target, range);
|
||||
}
|
||||
|
||||
async invokeDataProcessingQuery(timeSeriesData, query, timeRange) {
|
||||
// Request backend for data processing
|
||||
const requestOptions: BackendSrvRequest = {
|
||||
url: `/api/datasources/${this.datasourceId}/resources/db-connection-post`,
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
hideFromInspector: false,
|
||||
data: {
|
||||
series: timeSeriesData,
|
||||
query,
|
||||
timeRange,
|
||||
},
|
||||
};
|
||||
|
||||
const response: any = await getBackendSrv().fetch<any>(requestOptions).toPromise();
|
||||
return response.data;
|
||||
}
|
||||
|
||||
handleBackendPostProcessingResponse(response, request, target) {
|
||||
const frames = [];
|
||||
for (const frameJSON of response) {
|
||||
const frame = dataFrameFromJSON(frameJSON);
|
||||
frame.refId = target.refId;
|
||||
frames.push(frame);
|
||||
}
|
||||
|
||||
const resp = { data: frames };
|
||||
this.sortByRefId(resp);
|
||||
this.applyFrontendFunctions(resp, request);
|
||||
if (responseHandler.isConvertibleToWide(resp.data)) {
|
||||
console.log('Converting response to the wide format');
|
||||
resp.data = responseHandler.convertToWide(resp.data);
|
||||
}
|
||||
|
||||
return resp.data;
|
||||
}
|
||||
|
||||
getTrendValueType(target) {
|
||||
@@ -279,75 +385,27 @@ export class ZabbixDatasource extends DataSourceApi<ZabbixMetricsQuery, ZabbixDS
|
||||
return trendValueFunc ? trendValueFunc.params[0] : "avg";
|
||||
}
|
||||
|
||||
alignTimeSeriesData(timeseries: any[]) {
|
||||
for (const ts of timeseries) {
|
||||
const interval = utils.parseItemInterval(ts.scopedVars['__zbx_item_interval']?.value);
|
||||
ts.datapoints = align(ts.datapoints, interval);
|
||||
}
|
||||
return timeseries;
|
||||
}
|
||||
|
||||
fillTrendTimeSeriesWithNulls(timeseries: any[]) {
|
||||
for (const ts of timeseries) {
|
||||
ts.datapoints = fillTrendsWithNulls(ts.datapoints);
|
||||
}
|
||||
return timeseries;
|
||||
}
|
||||
|
||||
applyDataProcessingFunctions(timeseries_data, target) {
|
||||
const transformFunctions = bindFunctionDefs(target.functions, 'Transform');
|
||||
const aggregationFunctions = bindFunctionDefs(target.functions, 'Aggregate');
|
||||
const filterFunctions = bindFunctionDefs(target.functions, 'Filter');
|
||||
const aliasFunctions = bindFunctionDefs(target.functions, 'Alias');
|
||||
|
||||
// Apply transformation functions
|
||||
timeseries_data = _.cloneDeep(_.map(timeseries_data, timeseries => {
|
||||
timeseries.datapoints = utils.sequence(transformFunctions)(timeseries.datapoints);
|
||||
return timeseries;
|
||||
}));
|
||||
|
||||
// Apply filter functions
|
||||
if (filterFunctions.length) {
|
||||
timeseries_data = utils.sequence(filterFunctions)(timeseries_data);
|
||||
}
|
||||
|
||||
// Apply aggregations
|
||||
if (aggregationFunctions.length) {
|
||||
let dp = _.map(timeseries_data, 'datapoints');
|
||||
dp = utils.sequence(aggregationFunctions)(dp);
|
||||
|
||||
const aggFuncNames = _.map(metricFunctions.getCategories()['Aggregate'], 'name');
|
||||
const lastAgg = _.findLast(target.functions, func => {
|
||||
return _.includes(aggFuncNames, func.def.name);
|
||||
});
|
||||
|
||||
timeseries_data = [{
|
||||
target: lastAgg.text,
|
||||
datapoints: dp
|
||||
}];
|
||||
}
|
||||
|
||||
// Apply alias functions
|
||||
_.forEach(timeseries_data, utils.sequence(aliasFunctions).bind(this));
|
||||
|
||||
// Apply Time-related functions (timeShift(), etc)
|
||||
// Find timeShift() function and get specified trend value
|
||||
this.applyTimeShiftFunction(timeseries_data, target);
|
||||
|
||||
return timeseries_data;
|
||||
}
|
||||
|
||||
applyTimeShiftFunction(timeseries_data, target) {
|
||||
// Find timeShift() function and get specified interval
|
||||
const timeShiftFunc = _.find(target.functions, (func) => {
|
||||
return func.def.name === 'timeShift';
|
||||
sortByRefId(response: DataQueryResponse) {
|
||||
response.data.sort((a, b) => {
|
||||
if (a.refId < b.refId) {
|
||||
return -1;
|
||||
} else if (a.refId > b.refId) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
if (timeShiftFunc) {
|
||||
const shift = timeShiftFunc.params[0];
|
||||
_.forEach(timeseries_data, (series) => {
|
||||
series.datapoints = dataProcessor.unShiftTimeSeries(shift, series.datapoints);
|
||||
});
|
||||
}
|
||||
|
||||
applyFrontendFunctions(response: DataQueryResponse, request: DataQueryRequest<any>) {
|
||||
for (let i = 0; i < response.data.length; i++) {
|
||||
const frame: DataFrame = response.data[i];
|
||||
const target = getRequestTarget(request, frame.refId);
|
||||
|
||||
// Apply alias functions
|
||||
const aliasFunctions = bindFunctionDefs(target.functions, 'Alias');
|
||||
utils.sequence(aliasFunctions)(frame);
|
||||
}
|
||||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -384,44 +442,38 @@ export class ZabbixDatasource extends DataSourceApi<ZabbixMetricsQuery, ZabbixDS
|
||||
return this.zabbix.getItemsByIDs(itemids)
|
||||
.then(items => {
|
||||
return this.queryNumericDataForItems(items, target, timeRange, useTrends, options);
|
||||
})
|
||||
.then(result => {
|
||||
return result.map(s => responseHandler.seriesToDataFrame(s, target));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Query target data for IT Services
|
||||
*/
|
||||
queryITServiceData(target, timeRange, options) {
|
||||
async queryITServiceData(target, timeRange, request) {
|
||||
// Don't show undefined and hidden targets
|
||||
if (target.hide || (!target.itservice && !target.itServiceFilter) || !target.slaProperty) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let itServiceFilter;
|
||||
options.isOldVersion = target.itservice && !target.itServiceFilter;
|
||||
request.isOldVersion = target.itservice && !target.itServiceFilter;
|
||||
|
||||
if (options.isOldVersion) {
|
||||
if (request.isOldVersion) {
|
||||
// Backward compatibility
|
||||
itServiceFilter = '/.*/';
|
||||
} else {
|
||||
itServiceFilter = this.replaceTemplateVars(target.itServiceFilter, options.scopedVars);
|
||||
itServiceFilter = this.replaceTemplateVars(target.itServiceFilter, request.scopedVars);
|
||||
}
|
||||
|
||||
options.slaInterval = target.slaInterval;
|
||||
request.slaInterval = target.slaInterval;
|
||||
|
||||
return this.zabbix.getITServices(itServiceFilter)
|
||||
.then(itservices => {
|
||||
if (options.isOldVersion) {
|
||||
itservices = _.filter(itservices, {'serviceid': target.itservice?.serviceid});
|
||||
}
|
||||
return this.zabbix.getSLA(itservices, timeRange, target, options);})
|
||||
.then(itservicesdp => this.applyDataProcessingFunctions(itservicesdp, target))
|
||||
.then(result => {
|
||||
const dataFrames = result.map(s => responseHandler.seriesToDataFrame(s, target));
|
||||
return dataFrames;
|
||||
});
|
||||
let itservices = await this.zabbix.getITServices(itServiceFilter);
|
||||
if (request.isOldVersion) {
|
||||
itservices = _.filter(itservices, { 'serviceid': target.itservice?.serviceid });
|
||||
}
|
||||
const itservicesdp = await this.zabbix.getSLA(itservices, timeRange, target, request);
|
||||
const backendRequest = responseHandler.itServiceResponseToTimeSeries(itservicesdp, target.slaInterval);
|
||||
const processedResponse = await this.invokeDataProcessingQuery(backendRequest, target, {});
|
||||
return this.handleBackendPostProcessingResponse(processedResponse, request, target);
|
||||
}
|
||||
|
||||
queryTriggersData(target, timeRange) {
|
||||
@@ -500,7 +552,7 @@ export class ZabbixDatasource extends DataSourceApi<ZabbixMetricsQuery, ZabbixDS
|
||||
}
|
||||
|
||||
if (target.options?.acknowledged === 0 || target.options?.acknowledged === 1) {
|
||||
problemsOptions.acknowledged = target.options?.acknowledged ? true : false;
|
||||
problemsOptions.acknowledged = !!target.options?.acknowledged;
|
||||
}
|
||||
|
||||
if (target.options?.minSeverity) {
|
||||
@@ -594,8 +646,9 @@ export class ZabbixDatasource extends DataSourceApi<ZabbixMetricsQuery, ZabbixDS
|
||||
* Find metrics from templated request.
|
||||
*
|
||||
* @param {string} query Query from Templating
|
||||
* @param options
|
||||
* @return {string} Metric name - group, host, app or item or list
|
||||
* of metrics in "{metric1,metcic2,...,metricN}" format.
|
||||
* of metrics in "{metric1, metric2,..., metricN}" format.
|
||||
*/
|
||||
metricFindQuery(query, options) {
|
||||
let resultPromise;
|
||||
@@ -738,6 +791,10 @@ export class ZabbixDatasource extends DataSourceApi<ZabbixMetricsQuery, ZabbixDS
|
||||
target.textFilter = this.replaceTemplateVars(target.textFilter, options.scopedVars);
|
||||
}
|
||||
|
||||
if (target.itemids) {
|
||||
target.itemids = this.templateSrv.replace(target.itemids, options.scopedVars, zabbixItemIdsTemplateFormat);
|
||||
}
|
||||
|
||||
_.forEach(target.functions, func => {
|
||||
func.params = _.map(func.params, param => {
|
||||
if (typeof param === 'number') {
|
||||
@@ -759,6 +816,20 @@ export class ZabbixDatasource extends DataSourceApi<ZabbixMetricsQuery, ZabbixDS
|
||||
);
|
||||
return useTrends;
|
||||
}
|
||||
|
||||
isBackendTarget = (target: any): boolean => {
|
||||
if (this.enableDirectDBConnection) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return target.queryType === c.MODE_METRICS ||
|
||||
target.queryType === c.MODE_ITEMID;
|
||||
};
|
||||
|
||||
isDBConnectionTarget = (target: any): boolean => {
|
||||
return this.enableDirectDBConnection &&
|
||||
(target.queryType === c.MODE_METRICS || target.queryType === c.MODE_ITEMID);
|
||||
};
|
||||
}
|
||||
|
||||
function bindFunctionDefs(functionDefs, category) {
|
||||
@@ -784,18 +855,6 @@ function getConsolidateBy(target) {
|
||||
return consolidateBy;
|
||||
}
|
||||
|
||||
function downsampleSeries(timeseries_data, options) {
|
||||
const defaultAgg = dataProcessor.aggregationFunctions['avg'];
|
||||
const consolidateByFunc = dataProcessor.aggregationFunctions[options.consolidateBy] || defaultAgg;
|
||||
return _.map(timeseries_data, timeseries => {
|
||||
if (timeseries.datapoints.length > options.maxDataPoints) {
|
||||
timeseries.datapoints = dataProcessor
|
||||
.groupBy(options.interval, consolidateByFunc, timeseries.datapoints);
|
||||
}
|
||||
return timeseries;
|
||||
});
|
||||
}
|
||||
|
||||
function formatMetric(metricObj) {
|
||||
return {
|
||||
text: metricObj.name,
|
||||
@@ -845,8 +904,20 @@ function replaceTemplateVars(templateSrv, target, scopedVars) {
|
||||
return replacedTarget;
|
||||
}
|
||||
|
||||
function filterEnabledTargets(targets) {
|
||||
return _.filter(targets, target => {
|
||||
return !(target.hide || !target.group || !target.host || !target.item);
|
||||
export function base64StringToArrowTable(text: string) {
|
||||
const b64 = atob(text);
|
||||
const arr = Uint8Array.from(b64, (c) => {
|
||||
return c.charCodeAt(0);
|
||||
});
|
||||
return arr;
|
||||
}
|
||||
|
||||
function getRequestTarget(request: DataQueryRequest<any>, refId: string): any {
|
||||
for (let i = 0; i < request.targets.length; i++) {
|
||||
const target = request.targets[i];
|
||||
if (target.refId === refId) {
|
||||
return target;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ addFuncDef({
|
||||
category: 'Transform',
|
||||
params: [
|
||||
{ name: 'interval', type: 'string'},
|
||||
{ name: 'function', type: 'string', options: ['avg', 'min', 'max', 'sum', 'count', 'median'] }
|
||||
{ name: 'function', type: 'string', options: ['avg', 'min', 'max', 'sum', 'count', 'median', 'first', 'last'] }
|
||||
],
|
||||
defaultParams: ['1m', 'avg'],
|
||||
});
|
||||
@@ -124,6 +124,16 @@ addFuncDef({
|
||||
|
||||
// Aggregate
|
||||
|
||||
addFuncDef({
|
||||
name: 'aggregateBy',
|
||||
category: 'Aggregate',
|
||||
params: [
|
||||
{ name: 'interval', type: 'string' },
|
||||
{ name: 'function', type: 'string', options: ['avg', 'min', 'max', 'sum', 'count', 'median', 'first', 'last'] }
|
||||
],
|
||||
defaultParams: ['1m', 'avg'],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'sumSeries',
|
||||
category: 'Aggregate',
|
||||
@@ -131,24 +141,6 @@ addFuncDef({
|
||||
defaultParams: [],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'median',
|
||||
category: 'Aggregate',
|
||||
params: [
|
||||
{ name: 'interval', type: 'string'}
|
||||
],
|
||||
defaultParams: ['1m'],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'average',
|
||||
category: 'Aggregate',
|
||||
params: [
|
||||
{ name: 'interval', type: 'string' }
|
||||
],
|
||||
defaultParams: ['1m'],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'percentileAgg',
|
||||
category: 'Aggregate',
|
||||
@@ -159,52 +151,6 @@ addFuncDef({
|
||||
defaultParams: ['1m', 95],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'min',
|
||||
category: 'Aggregate',
|
||||
params: [
|
||||
{ name: 'interval', type: 'string' }
|
||||
],
|
||||
defaultParams: ['1m'],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'max',
|
||||
category: 'Aggregate',
|
||||
params: [
|
||||
{ name: 'interval', type: 'string' }
|
||||
],
|
||||
defaultParams: ['1m'],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'sum',
|
||||
category: 'Aggregate',
|
||||
params: [
|
||||
{ name: 'interval', type: 'string' }
|
||||
],
|
||||
defaultParams: ['1m'],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'count',
|
||||
category: 'Aggregate',
|
||||
params: [
|
||||
{ name: 'interval', type: 'string' }
|
||||
],
|
||||
defaultParams: ['1m'],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'aggregateBy',
|
||||
category: 'Aggregate',
|
||||
params: [
|
||||
{ name: 'interval', type: 'string' },
|
||||
{ name: 'function', type: 'string', options: ['avg', 'min', 'max', 'sum', 'count', 'median'] }
|
||||
],
|
||||
defaultParams: ['1m', 'avg'],
|
||||
});
|
||||
|
||||
// Filter
|
||||
|
||||
addFuncDef({
|
||||
@@ -212,7 +158,7 @@ addFuncDef({
|
||||
category: 'Filter',
|
||||
params: [
|
||||
{ name: 'number', type: 'int' },
|
||||
{ name: 'value', type: 'string', options: ['avg', 'min', 'max', 'sum', 'count', 'median'] }
|
||||
{ name: 'value', type: 'string', options: ['avg', 'min', 'max', 'sum', 'count', 'median', 'first', 'last'] }
|
||||
],
|
||||
defaultParams: [5, 'avg'],
|
||||
});
|
||||
@@ -222,7 +168,7 @@ addFuncDef({
|
||||
category: 'Filter',
|
||||
params: [
|
||||
{ name: 'number', type: 'int' },
|
||||
{ name: 'value', type: 'string', options: ['avg', 'min', 'max', 'sum', 'count', 'median'] }
|
||||
{ name: 'value', type: 'string', options: ['avg', 'min', 'max', 'sum', 'count', 'median', 'first', 'last'] }
|
||||
],
|
||||
defaultParams: [5, 'avg'],
|
||||
});
|
||||
|
||||
@@ -49,6 +49,11 @@ function migrateQueryType(target) {
|
||||
delete target.mode;
|
||||
}
|
||||
}
|
||||
|
||||
// queryType is a string in query model
|
||||
if (typeof target.queryType === 'number') {
|
||||
target.queryType = (target.queryType as number)?.toString();
|
||||
}
|
||||
}
|
||||
|
||||
function migrateSLA(target) {
|
||||
|
||||
@@ -4,7 +4,7 @@ import * as c from './constants';
|
||||
import * as utils from './utils';
|
||||
import * as metricFunctions from './metricFunctions';
|
||||
import * as migrations from './migrations';
|
||||
import { ShowProblemTypes, ZabbixMetricsQuery } from './types';
|
||||
import { ShowProblemTypes } from './types';
|
||||
import { CURRENT_SCHEMA_VERSION } from '../panel-triggers/migrations';
|
||||
import { getTemplateSrv, TemplateSrv } from '@grafana/runtime';
|
||||
|
||||
@@ -22,9 +22,9 @@ function getTargetDefaults() {
|
||||
'minSeverity': 3,
|
||||
'acknowledged': 2
|
||||
},
|
||||
trigger: {filter: ""},
|
||||
tags: {filter: ""},
|
||||
proxy: {filter: ""},
|
||||
trigger: { filter: "" },
|
||||
tags: { filter: "" },
|
||||
proxy: { filter: "" },
|
||||
options: {
|
||||
showDisabledItems: false,
|
||||
skipEmptyValues: false,
|
||||
@@ -82,7 +82,7 @@ export class ZabbixQueryController extends QueryCtrl {
|
||||
zabbix: any;
|
||||
replaceTemplateVars: any;
|
||||
templateSrv: TemplateSrv;
|
||||
editorModes: Array<{ value: string; text: string; queryType: number; }>;
|
||||
editorModes: Array<{ value: string; text: string; queryType: string; }>;
|
||||
slaPropertyList: Array<{ name: string; property: string; }>;
|
||||
slaIntervals: Array<{ text: string; value: string; }>;
|
||||
ackFilters: Array<{ text: string; value: number; }>;
|
||||
@@ -115,12 +115,12 @@ export class ZabbixQueryController extends QueryCtrl {
|
||||
this.templateSrv = getTemplateSrv();
|
||||
|
||||
this.editorModes = [
|
||||
{value: 'num', text: 'Metrics', queryType: c.MODE_METRICS},
|
||||
{value: 'text', text: 'Text', queryType: c.MODE_TEXT},
|
||||
{value: 'itservice', text: 'IT Services', queryType: c.MODE_ITSERVICE},
|
||||
{value: 'itemid', text: 'Item ID', queryType: c.MODE_ITEMID},
|
||||
{value: 'triggers', text: 'Triggers', queryType: c.MODE_TRIGGERS},
|
||||
{value: 'problems', text: 'Problems', queryType: c.MODE_PROBLEMS},
|
||||
{ value: 'num', text: 'Metrics', queryType: c.MODE_METRICS },
|
||||
{ value: 'text', text: 'Text', queryType: c.MODE_TEXT },
|
||||
{ value: 'itservice', text: 'IT Services', queryType: c.MODE_ITSERVICE },
|
||||
{ value: 'itemid', text: 'Item ID', queryType: c.MODE_ITEMID },
|
||||
{ value: 'triggers', text: 'Triggers', queryType: c.MODE_TRIGGERS },
|
||||
{ value: 'problems', text: 'Problems', queryType: c.MODE_PROBLEMS },
|
||||
];
|
||||
|
||||
this.$scope.editorMode = {
|
||||
@@ -133,11 +133,11 @@ export class ZabbixQueryController extends QueryCtrl {
|
||||
};
|
||||
|
||||
this.slaPropertyList = [
|
||||
{name: "Status", property: "status"},
|
||||
{name: "SLA", property: "sla"},
|
||||
{name: "OK time", property: "okTime"},
|
||||
{name: "Problem time", property: "problemTime"},
|
||||
{name: "Down time", property: "downtimeTime"}
|
||||
{ name: "Status", property: "status" },
|
||||
{ name: "SLA", property: "sla" },
|
||||
{ name: "OK time", property: "okTime" },
|
||||
{ name: "Problem time", property: "problemTime" },
|
||||
{ name: "Down time", property: "downtimeTime" }
|
||||
];
|
||||
|
||||
this.slaIntervals = [
|
||||
@@ -151,9 +151,9 @@ export class ZabbixQueryController extends QueryCtrl {
|
||||
];
|
||||
|
||||
this.ackFilters = [
|
||||
{text: 'all triggers', value: 2},
|
||||
{text: 'unacknowledged', value: 0},
|
||||
{text: 'acknowledged', value: 1},
|
||||
{ text: 'all triggers', value: 2 },
|
||||
{ text: 'unacknowledged', value: 0 },
|
||||
{ text: 'acknowledged', value: 1 },
|
||||
];
|
||||
|
||||
this.problemAckFilters = [
|
||||
@@ -165,12 +165,12 @@ export class ZabbixQueryController extends QueryCtrl {
|
||||
this.sortByFields = [
|
||||
{ text: 'Default', value: 'default' },
|
||||
{ text: 'Last change', value: 'lastchange' },
|
||||
{ text: 'Severity', value: 'severity' },
|
||||
{ text: 'Severity', value: 'severity' },
|
||||
];
|
||||
|
||||
this.showEventsFields = [
|
||||
{ text: 'All', value: [0,1] },
|
||||
{ text: 'OK', value: [0] },
|
||||
{ text: 'All', value: [0, 1] },
|
||||
{ text: 'OK', value: [0] },
|
||||
{ text: 'Problems', value: 1 }
|
||||
];
|
||||
|
||||
@@ -201,11 +201,12 @@ export class ZabbixQueryController extends QueryCtrl {
|
||||
this.onTargetBlur();
|
||||
});
|
||||
|
||||
this.init = function() {
|
||||
this.init = () => {
|
||||
let target = this.target;
|
||||
|
||||
// Migrate old targets
|
||||
target = migrations.migrate(target);
|
||||
this.refresh();
|
||||
|
||||
const scopeDefaults = {
|
||||
metric: {},
|
||||
@@ -217,6 +218,7 @@ export class ZabbixQueryController extends QueryCtrl {
|
||||
// Load default values
|
||||
const targetDefaults = getTargetDefaults();
|
||||
_.defaultsDeep(target, targetDefaults);
|
||||
this.initDefaultQueryMode(target);
|
||||
|
||||
if (this.panel.type === c.ZABBIX_PROBLEMS_PANEL_ID) {
|
||||
target.queryType = c.MODE_PROBLEMS;
|
||||
@@ -237,9 +239,9 @@ export class ZabbixQueryController extends QueryCtrl {
|
||||
}
|
||||
|
||||
if (target.queryType === c.MODE_METRICS ||
|
||||
target.queryType === c.MODE_TEXT ||
|
||||
target.queryType === c.MODE_TRIGGERS ||
|
||||
target.queryType === c.MODE_PROBLEMS) {
|
||||
target.queryType === c.MODE_TEXT ||
|
||||
target.queryType === c.MODE_TRIGGERS ||
|
||||
target.queryType === c.MODE_PROBLEMS) {
|
||||
this.initFilters();
|
||||
} else if (target.queryType === c.MODE_ITSERVICE) {
|
||||
this.suggestITServices();
|
||||
@@ -256,7 +258,7 @@ export class ZabbixQueryController extends QueryCtrl {
|
||||
}
|
||||
|
||||
initFilters() {
|
||||
const mode = _.find(this.editorModes, {'queryType': this.target.queryType});
|
||||
const mode = _.find(this.editorModes, { 'queryType': this.target.queryType });
|
||||
const itemtype = mode ? mode.value : null;
|
||||
const promises = [
|
||||
this.suggestGroups(),
|
||||
@@ -275,6 +277,17 @@ export class ZabbixQueryController extends QueryCtrl {
|
||||
return Promise.all(promises);
|
||||
}
|
||||
|
||||
initDefaultQueryMode(target) {
|
||||
if (!(target.queryType === c.MODE_METRICS ||
|
||||
target.queryType === c.MODE_TEXT ||
|
||||
target.queryType === c.MODE_ITSERVICE ||
|
||||
target.queryType === c.MODE_ITEMID ||
|
||||
target.queryType === c.MODE_TRIGGERS ||
|
||||
target.queryType === c.MODE_PROBLEMS)) {
|
||||
target.queryType = c.MODE_METRICS;
|
||||
}
|
||||
}
|
||||
|
||||
// Get list of metric names for bs-typeahead directive
|
||||
getMetricNames(metricList, addAllValue) {
|
||||
const metrics = _.uniq(_.map(this.metric[metricList], 'name'));
|
||||
@@ -416,7 +429,7 @@ export class ZabbixQueryController extends QueryCtrl {
|
||||
this.moveAliasFuncLast();
|
||||
|
||||
if (newFunc.params.length && newFunc.added ||
|
||||
newFunc.def.params.length === 0) {
|
||||
newFunc.def.params.length === 0) {
|
||||
this.targetChanged();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,19 @@ import _ from 'lodash';
|
||||
import TableModel from 'grafana/app/core/table_model';
|
||||
import * as c from './constants';
|
||||
import * as utils from './utils';
|
||||
import { ArrayVector, DataFrame, Field, FieldType, MutableDataFrame, MutableField, TIME_SERIES_TIME_FIELD_NAME, TIME_SERIES_VALUE_FIELD_NAME } from '@grafana/data';
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
dataFrameFromJSON,
|
||||
DataFrameJSON,
|
||||
Field,
|
||||
FieldType,
|
||||
getTimeField,
|
||||
MutableDataFrame,
|
||||
MutableField,
|
||||
TIME_SERIES_TIME_FIELD_NAME,
|
||||
TIME_SERIES_VALUE_FIELD_NAME,
|
||||
} from '@grafana/data';
|
||||
import { ZabbixMetricsQuery } from './types';
|
||||
|
||||
/**
|
||||
@@ -25,12 +37,12 @@ function convertHistory(history, items, addHostName, convertPointCallback) {
|
||||
* ]
|
||||
*/
|
||||
|
||||
// Group history by itemid
|
||||
// Group history by itemid
|
||||
const grouped_history = _.groupBy(history, 'itemid');
|
||||
const hosts = _.uniqBy(_.flatten(_.map(items, 'hosts')), 'hostid'); //uniqBy is needed to deduplicate
|
||||
|
||||
return _.map(grouped_history, (hist, itemid) => {
|
||||
const item = _.find(items, {'itemid': itemid}) as any;
|
||||
const item = _.find(items, { 'itemid': itemid }) as any;
|
||||
let alias = item.name;
|
||||
|
||||
// Add scopedVars for using in alias functions
|
||||
@@ -42,7 +54,7 @@ function convertHistory(history, items, addHostName, convertPointCallback) {
|
||||
};
|
||||
|
||||
if (_.keys(hosts).length > 0) {
|
||||
const host = _.find(hosts, {'hostid': item.hostid});
|
||||
const host = _.find(hosts, { 'hostid': item.hostid });
|
||||
scopedVars['__zbx_host'] = { value: host.host };
|
||||
scopedVars['__zbx_host_name'] = { value: host.name };
|
||||
|
||||
@@ -128,7 +140,7 @@ export function seriesToDataFrame(timeseries, target: ZabbixMetricsQuery, valueM
|
||||
}
|
||||
}
|
||||
|
||||
const fields: Field[] = [ timeFiled, valueFiled ];
|
||||
const fields: Field[] = [timeFiled, valueFiled];
|
||||
|
||||
const frame: DataFrame = {
|
||||
name: seriesName,
|
||||
@@ -141,6 +153,94 @@ export function seriesToDataFrame(timeseries, target: ZabbixMetricsQuery, valueM
|
||||
return mutableFrame;
|
||||
}
|
||||
|
||||
// Converts DataResponse to the format which backend works with (for data processing)
|
||||
export function dataResponseToTimeSeries(response: DataFrameJSON[], items) {
|
||||
const series = [];
|
||||
if (response.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
for (const frameJSON of response) {
|
||||
const frame = dataFrameFromJSON(frameJSON);
|
||||
const { timeField, timeIndex } = getTimeField(frame);
|
||||
for (let i = 0; i < frame.fields.length; i++) {
|
||||
const field = frame.fields[i];
|
||||
if (i === timeIndex || !field.values || !field.values.length) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const s = [];
|
||||
for (let j = 0; j < field.values.length; j++) {
|
||||
const v = field.values.get(j);
|
||||
if (v !== null) {
|
||||
s.push({ time: timeField.values.get(j) / 1000, value: v });
|
||||
}
|
||||
}
|
||||
|
||||
const itemid = field.name;
|
||||
const item = _.find(items, { 'itemid': itemid });
|
||||
|
||||
// Convert interval to nanoseconds in order to unmarshall it on the backend to time.Duration
|
||||
let interval = utils.parseItemInterval(item.delay) * 1000000;
|
||||
if (interval === 0) {
|
||||
interval = null;
|
||||
}
|
||||
|
||||
const timeSeriesData = {
|
||||
ts: s,
|
||||
meta: {
|
||||
name: item.name,
|
||||
item,
|
||||
interval,
|
||||
}
|
||||
};
|
||||
|
||||
series.push(timeSeriesData);
|
||||
}
|
||||
}
|
||||
|
||||
return series;
|
||||
}
|
||||
|
||||
export function itServiceResponseToTimeSeries(response: any, interval) {
|
||||
const series = [];
|
||||
if (response.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
for (const s of response) {
|
||||
const ts = [];
|
||||
|
||||
if (!s.datapoints) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const dp = s.datapoints;
|
||||
for (let i = 0; i < dp.length; i++) {
|
||||
ts.push({ time: dp[i][1] / 1000, value: dp[i][0] });
|
||||
}
|
||||
|
||||
// Convert interval to nanoseconds in order to unmarshall it on the backend to time.Duration
|
||||
let intervalNS = utils.parseItemInterval(interval) * 1000000;
|
||||
if (intervalNS === 0) {
|
||||
intervalNS = null;
|
||||
}
|
||||
|
||||
const timeSeriesData = {
|
||||
ts: ts,
|
||||
meta: {
|
||||
name: s.target,
|
||||
interval: null,
|
||||
item: {},
|
||||
}
|
||||
};
|
||||
|
||||
series.push(timeSeriesData);
|
||||
}
|
||||
|
||||
return series;
|
||||
}
|
||||
|
||||
export function isConvertibleToWide(data: DataFrame[]): boolean {
|
||||
if (!data || data.length < 2) {
|
||||
return false;
|
||||
@@ -192,7 +292,7 @@ export function alignFrames(data: MutableDataFrame[]): MutableDataFrame[] {
|
||||
const missingTimestamps = [];
|
||||
const missingValues = [];
|
||||
const frameInterval: number = timeField.config.custom?.itemInterval;
|
||||
for (let j = minTimestamp; j < firstTs; j+=frameInterval) {
|
||||
for (let j = minTimestamp; j < firstTs; j += frameInterval) {
|
||||
missingTimestamps.push(j);
|
||||
missingValues.push(null);
|
||||
}
|
||||
@@ -213,7 +313,7 @@ export function convertToWide(data: MutableDataFrame[]): DataFrame[] {
|
||||
return [];
|
||||
}
|
||||
|
||||
const fields: MutableField[] = [ timeField ];
|
||||
const fields: MutableField[] = [timeField];
|
||||
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
const valueField = data[i].fields.find(f => f.name === TIME_SERIES_VALUE_FIELD_NAME);
|
||||
@@ -263,10 +363,10 @@ function handleText(history, items, target, addHostName = true) {
|
||||
|
||||
function handleHistoryAsTable(history, items, target) {
|
||||
const table: any = new TableModel();
|
||||
table.addColumn({text: 'Host'});
|
||||
table.addColumn({text: 'Item'});
|
||||
table.addColumn({text: 'Key'});
|
||||
table.addColumn({text: 'Last value'});
|
||||
table.addColumn({ text: 'Host' });
|
||||
table.addColumn({ text: 'Item' });
|
||||
table.addColumn({ text: 'Key' });
|
||||
table.addColumn({ text: 'Last value' });
|
||||
|
||||
const grouped_history = _.groupBy(history, 'itemid');
|
||||
_.each(items, (item) => {
|
||||
@@ -365,9 +465,9 @@ function handleTriggersResponse(triggers, groups, timeRange) {
|
||||
const stats = getTriggerStats(triggers);
|
||||
const groupNames = _.map(groups, 'name');
|
||||
const table: any = new TableModel();
|
||||
table.addColumn({text: 'Host group'});
|
||||
table.addColumn({ text: 'Host group' });
|
||||
_.each(_.orderBy(c.TRIGGER_SEVERITY, ['val'], ['desc']), (severity) => {
|
||||
table.addColumn({text: severity.text});
|
||||
table.addColumn({ text: severity.text });
|
||||
});
|
||||
_.each(stats, (severity_stats, group) => {
|
||||
if (_.includes(groupNames, group)) {
|
||||
@@ -385,7 +485,7 @@ function getTriggerStats(triggers) {
|
||||
// let severity = _.map(c.TRIGGER_SEVERITY, 'text');
|
||||
const stats = {};
|
||||
_.each(groups, (group) => {
|
||||
stats[group] = {0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}; // severity:count
|
||||
stats[group] = { 0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0 }; // severity:count
|
||||
});
|
||||
_.each(triggers, (trigger) => {
|
||||
_.each(trigger.groups, (group) => {
|
||||
@@ -441,6 +541,8 @@ export default {
|
||||
handleTriggersResponse,
|
||||
sortTimeseries,
|
||||
seriesToDataFrame,
|
||||
dataResponseToTimeSeries,
|
||||
itServiceResponseToTimeSeries,
|
||||
isConvertibleToWide,
|
||||
convertToWide,
|
||||
alignFrames,
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
import _ from 'lodash';
|
||||
import dataProcessor from '../dataProcessor';
|
||||
|
||||
describe('dataProcessor', () => {
|
||||
let ctx = {};
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.datapoints = [
|
||||
[[10, 1500000000000], [2, 1500000001000], [7, 1500000002000], [1, 1500000003000]],
|
||||
[[9, 1500000000000], [3, 1500000001000], [4, 1500000002000], [8, 1500000003000]],
|
||||
];
|
||||
});
|
||||
|
||||
describe('When apply groupBy() functions', () => {
|
||||
it('should return series average', () => {
|
||||
let aggregateBy = dataProcessor.metricFunctions['groupBy'];
|
||||
const avg2s = _.map(ctx.datapoints, (dp) => aggregateBy('2s', 'avg', dp));
|
||||
expect(avg2s).toEqual([
|
||||
[[6, 1500000000000], [4, 1500000002000]],
|
||||
[[6, 1500000000000], [6, 1500000002000]],
|
||||
]);
|
||||
|
||||
const avg10s = _.map(ctx.datapoints, (dp) => aggregateBy('10s', 'avg', dp));
|
||||
expect(avg10s).toEqual([
|
||||
[[5, 1500000000000]],
|
||||
[[6, 1500000000000]],
|
||||
]);
|
||||
|
||||
// not aligned
|
||||
const dp = [[10, 1500000001000], [2, 1500000002000], [7, 1500000003000], [1, 1500000004000]];
|
||||
expect(aggregateBy('2s', 'avg', dp)).toEqual([
|
||||
[10, 1500000000000], [4.5, 1500000002000], [1, 1500000004000]
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('When apply aggregateBy() functions', () => {
|
||||
it('should return series average', () => {
|
||||
let aggregateBy = dataProcessor.metricFunctions['aggregateBy'];
|
||||
const avg1s = aggregateBy('1s', 'avg', ctx.datapoints);
|
||||
expect(avg1s).toEqual([
|
||||
[9.5, 1500000000000], [2.5, 1500000001000], [5.5, 1500000002000], [4.5, 1500000003000]
|
||||
]);
|
||||
|
||||
const avg10s = aggregateBy('10s', 'avg', ctx.datapoints);
|
||||
expect(avg10s).toEqual([
|
||||
[5.5, 1500000000000]
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,15 +1,17 @@
|
||||
import _ from 'lodash';
|
||||
import mocks from '../../test-setup/mocks';
|
||||
import { ZabbixDatasource } from "../datasource";
|
||||
import { zabbixTemplateFormat } from "../datasource";
|
||||
import { ZabbixDatasource, zabbixTemplateFormat } from "../datasource";
|
||||
import { dateMath } from '@grafana/data';
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
getBackendSrv: () => ({
|
||||
datasourceRequest: jest.fn().mockResolvedValue({data: {result: ''}}),
|
||||
datasourceRequest: jest.fn().mockResolvedValue({ data: { result: '' } }),
|
||||
fetch: () => ({
|
||||
toPromise: () => jest.fn().mockResolvedValue({ data: { result: '' } })
|
||||
}),
|
||||
}),
|
||||
loadPluginCss: () => {},
|
||||
}), {virtual: true});
|
||||
loadPluginCss: () => {
|
||||
},
|
||||
}), { virtual: true });
|
||||
|
||||
describe('ZabbixDatasource', () => {
|
||||
let ctx = {};
|
||||
@@ -27,24 +29,13 @@ describe('ZabbixDatasource', () => {
|
||||
}
|
||||
};
|
||||
|
||||
ctx.templateSrv = mocks.templateSrvMock;
|
||||
ctx.datasourceSrv = mocks.datasourceSrvMock;
|
||||
|
||||
ctx.ds = new ZabbixDatasource(ctx.instanceSettings, ctx.templateSrv);
|
||||
});
|
||||
|
||||
describe('When querying data', () => {
|
||||
beforeEach(() => {
|
||||
ctx.ds.replaceTemplateVars = (str) => str;
|
||||
});
|
||||
|
||||
ctx.options = {
|
||||
targets: [
|
||||
{
|
||||
group: {filter: ""},
|
||||
host: {filter: ""},
|
||||
application: {filter: ""},
|
||||
item: {filter: ""}
|
||||
group: { filter: "" },
|
||||
host: { filter: "" },
|
||||
application: { filter: "" },
|
||||
item: { filter: "" }
|
||||
}
|
||||
],
|
||||
range: {
|
||||
@@ -53,64 +44,24 @@ describe('ZabbixDatasource', () => {
|
||||
}
|
||||
};
|
||||
|
||||
it('should return an empty array when no targets are set', (done) => {
|
||||
let options = {
|
||||
targets: [],
|
||||
range: {from: 'now-6h', to: 'now'}
|
||||
};
|
||||
ctx.ds.query(options).then(result => {
|
||||
expect(result.data.length).toBe(0);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should use trends if it enabled and time more than trendsFrom', (done) => {
|
||||
let ranges = ['now-8d', 'now-169h', 'now-1M', 'now-1y'];
|
||||
|
||||
_.forEach(ranges, range => {
|
||||
ctx.options.range.from = dateMath.parse(range);
|
||||
ctx.ds.queryNumericData = jest.fn();
|
||||
ctx.ds.query(ctx.options);
|
||||
|
||||
// Check that useTrends options is true
|
||||
let callArgs = ctx.ds.queryNumericData.mock.calls[0];
|
||||
expect(callArgs[2]).toBe(true);
|
||||
ctx.ds.queryNumericData.mockClear();
|
||||
});
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it('shouldnt use trends if it enabled and time less than trendsFrom', (done) => {
|
||||
let ranges = ['now-7d', 'now-168h', 'now-1h', 'now-30m', 'now-30s'];
|
||||
|
||||
_.forEach(ranges, range => {
|
||||
ctx.options.range.from = dateMath.parse(range);
|
||||
ctx.ds.queryNumericData = jest.fn();
|
||||
ctx.ds.query(ctx.options);
|
||||
|
||||
// Check that useTrends options is false
|
||||
let callArgs = ctx.ds.queryNumericData.mock.calls[0];
|
||||
expect(callArgs[2]).toBe(false);
|
||||
ctx.ds.queryNumericData.mockClear();
|
||||
});
|
||||
done();
|
||||
});
|
||||
ctx.templateSrv = mocks.templateSrvMock;
|
||||
ctx.datasourceSrv = mocks.datasourceSrvMock;
|
||||
|
||||
ctx.ds = new ZabbixDatasource(ctx.instanceSettings, ctx.templateSrv);
|
||||
});
|
||||
|
||||
describe('When querying text data', () => {
|
||||
beforeEach(() => {
|
||||
ctx.ds.replaceTemplateVars = (str) => str;
|
||||
ctx.ds.zabbix.zabbixAPI.getHistory = jest.fn().mockReturnValue(Promise.resolve([
|
||||
{clock: "1500010200", itemid:"10100", ns:"900111000", value:"Linux first"},
|
||||
{clock: "1500010300", itemid:"10100", ns:"900111000", value:"Linux 2nd"},
|
||||
{clock: "1500010400", itemid:"10100", ns:"900111000", value:"Linux last"}
|
||||
{ clock: "1500010200", itemid: "10100", ns: "900111000", value: "Linux first" },
|
||||
{ clock: "1500010300", itemid: "10100", ns: "900111000", value: "Linux 2nd" },
|
||||
{ clock: "1500010400", itemid: "10100", ns: "900111000", value: "Linux last" }
|
||||
]));
|
||||
|
||||
ctx.ds.zabbix.getItemsFromTarget = jest.fn().mockReturnValue(Promise.resolve([
|
||||
{
|
||||
hosts: [{hostid: "10001", name: "Zabbix server"}],
|
||||
hosts: [{ hostid: "10001", name: "Zabbix server" }],
|
||||
itemid: "10100",
|
||||
name: "System information",
|
||||
key_: "system.uname",
|
||||
@@ -118,10 +69,10 @@ describe('ZabbixDatasource', () => {
|
||||
]));
|
||||
|
||||
ctx.options.targets = [{
|
||||
group: {filter: ""},
|
||||
host: {filter: "Zabbix server"},
|
||||
application: {filter: ""},
|
||||
item: {filter: "System information"},
|
||||
group: { filter: "" },
|
||||
host: { filter: "Zabbix server" },
|
||||
application: { filter: "" },
|
||||
item: { filter: "System information" },
|
||||
textFilter: "",
|
||||
useCaptureGroups: true,
|
||||
queryType: 2,
|
||||
@@ -138,7 +89,7 @@ describe('ZabbixDatasource', () => {
|
||||
|
||||
let tableData = result.data[0];
|
||||
expect(tableData.columns).toEqual([
|
||||
{text: 'Host'}, {text: 'Item'}, {text: 'Key'}, {text: 'Last value'}
|
||||
{ text: 'Host' }, { text: 'Item' }, { text: 'Key' }, { text: 'Last value' }
|
||||
]);
|
||||
expect(tableData.rows).toEqual([
|
||||
['Zabbix server', 'System information', 'system.uname', 'Linux last']
|
||||
@@ -159,22 +110,22 @@ describe('ZabbixDatasource', () => {
|
||||
it('should skip item when last value is empty', () => {
|
||||
ctx.ds.zabbix.getItemsFromTarget = jest.fn().mockReturnValue(Promise.resolve([
|
||||
{
|
||||
hosts: [{hostid: "10001", name: "Zabbix server"}],
|
||||
hosts: [{ hostid: "10001", name: "Zabbix server" }],
|
||||
itemid: "10100", name: "System information", key_: "system.uname"
|
||||
},
|
||||
{
|
||||
hosts: [{hostid: "10002", name: "Server02"}],
|
||||
hosts: [{ hostid: "10002", name: "Server02" }],
|
||||
itemid: "90109", name: "System information", key_: "system.uname"
|
||||
}
|
||||
]));
|
||||
|
||||
ctx.options.targets[0].options.skipEmptyValues = true;
|
||||
ctx.ds.zabbix.getHistory = jest.fn().mockReturnValue(Promise.resolve([
|
||||
{clock: "1500010200", itemid:"10100", ns:"900111000", value:"Linux first"},
|
||||
{clock: "1500010300", itemid:"10100", ns:"900111000", value:"Linux 2nd"},
|
||||
{clock: "1500010400", itemid:"10100", ns:"900111000", value:"Linux last"},
|
||||
{clock: "1500010200", itemid:"90109", ns:"900111000", value:"Non empty value"},
|
||||
{clock: "1500010500", itemid:"90109", ns:"900111000", value:""}
|
||||
{ clock: "1500010200", itemid: "10100", ns: "900111000", value: "Linux first" },
|
||||
{ clock: "1500010300", itemid: "10100", ns: "900111000", value: "Linux 2nd" },
|
||||
{ clock: "1500010400", itemid: "10100", ns: "900111000", value: "Linux last" },
|
||||
{ clock: "1500010200", itemid: "90109", ns: "900111000", value: "Non empty value" },
|
||||
{ clock: "1500010500", itemid: "90109", ns: "900111000", value: "" }
|
||||
]));
|
||||
return ctx.ds.query(ctx.options).then(result => {
|
||||
let tableData = result.data[0];
|
||||
@@ -249,9 +200,9 @@ describe('ZabbixDatasource', () => {
|
||||
|
||||
it('should return groups', (done) => {
|
||||
const tests = [
|
||||
{query: '*', expect: '/.*/'},
|
||||
{query: 'Backend', expect: 'Backend'},
|
||||
{query: 'Back*', expect: 'Back*'},
|
||||
{ query: '*', expect: '/.*/' },
|
||||
{ query: 'Backend', expect: 'Backend' },
|
||||
{ query: 'Back*', expect: 'Back*' },
|
||||
];
|
||||
|
||||
for (const test of tests) {
|
||||
@@ -274,10 +225,10 @@ describe('ZabbixDatasource', () => {
|
||||
|
||||
it('should return hosts', (done) => {
|
||||
const tests = [
|
||||
{query: '*.*', expect: ['/.*/', '/.*/']},
|
||||
{query: '.', expect: ['', '']},
|
||||
{query: 'Backend.*', expect: ['Backend', '/.*/']},
|
||||
{query: 'Back*.', expect: ['Back*', '']},
|
||||
{ query: '*.*', expect: ['/.*/', '/.*/'] },
|
||||
{ query: '.', expect: ['', ''] },
|
||||
{ query: 'Backend.*', expect: ['Backend', '/.*/'] },
|
||||
{ query: 'Back*.', expect: ['Back*', ''] },
|
||||
];
|
||||
|
||||
for (const test of tests) {
|
||||
@@ -290,10 +241,10 @@ describe('ZabbixDatasource', () => {
|
||||
|
||||
it('should return applications', (done) => {
|
||||
const tests = [
|
||||
{query: '*.*.*', expect: ['/.*/', '/.*/', '/.*/']},
|
||||
{query: '.*.', expect: ['', '/.*/', '']},
|
||||
{query: 'Backend.backend01.*', expect: ['Backend', 'backend01', '/.*/']},
|
||||
{query: 'Back*.*.', expect: ['Back*', '/.*/', '']}
|
||||
{ query: '*.*.*', expect: ['/.*/', '/.*/', '/.*/'] },
|
||||
{ query: '.*.', expect: ['', '/.*/', ''] },
|
||||
{ query: 'Backend.backend01.*', expect: ['Backend', 'backend01', '/.*/'] },
|
||||
{ query: 'Back*.*.', expect: ['Back*', '/.*/', ''] }
|
||||
];
|
||||
|
||||
for (const test of tests) {
|
||||
@@ -306,16 +257,16 @@ describe('ZabbixDatasource', () => {
|
||||
|
||||
it('should return items', (done) => {
|
||||
const tests = [
|
||||
{query: '*.*.*.*', expect: ['/.*/', '/.*/', '', '/.*/']},
|
||||
{query: '.*.*.*', expect: ['', '/.*/', '', '/.*/']},
|
||||
{query: 'Backend.backend01.*.*', expect: ['Backend', 'backend01', '', '/.*/']},
|
||||
{query: 'Back*.*.cpu.*', expect: ['Back*', '/.*/', 'cpu', '/.*/']}
|
||||
{ query: '*.*.*.*', expect: ['/.*/', '/.*/', '', '/.*/'] },
|
||||
{ query: '.*.*.*', expect: ['', '/.*/', '', '/.*/'] },
|
||||
{ query: 'Backend.backend01.*.*', expect: ['Backend', 'backend01', '', '/.*/'] },
|
||||
{ query: 'Back*.*.cpu.*', expect: ['Back*', '/.*/', 'cpu', '/.*/'] }
|
||||
];
|
||||
|
||||
for (const test of tests) {
|
||||
ctx.ds.metricFindQuery(test.query);
|
||||
expect(ctx.ds.zabbix.getItems)
|
||||
.toBeCalledWith(test.expect[0], test.expect[1], test.expect[2], test.expect[3]);
|
||||
.toBeCalledWith(test.expect[0], test.expect[1], test.expect[2], test.expect[3]);
|
||||
ctx.ds.zabbix.getItems.mockClear();
|
||||
}
|
||||
done();
|
||||
|
||||
@@ -5,8 +5,8 @@ const getAllMock = jest.fn().mockReturnValue([{ id: 42, name: 'foo', meta: {} }]
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
getDataSourceSrv: () => ({
|
||||
loadDatasource: loadDatasourceMock,
|
||||
getAll: getAllMock
|
||||
get: loadDatasourceMock,
|
||||
getList: getAllMock
|
||||
}),
|
||||
}));
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { compactQuery } from '../utils';
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
getDataSourceSrv: jest.fn(() => ({
|
||||
loadDatasource: jest.fn().mockResolvedValue(
|
||||
get: jest.fn().mockResolvedValue(
|
||||
{ id: 42, name: 'InfluxDB DS', meta: {} }
|
||||
),
|
||||
})),
|
||||
@@ -29,8 +29,11 @@ describe('InfluxDBConnector', () => {
|
||||
const { itemids, range, intervalSec, table, aggFunction } = ctx.defaultQueryParams;
|
||||
const query = ctx.influxDBConnector.buildHistoryQuery(itemids, table, range, intervalSec, aggFunction);
|
||||
const expected = compactQuery(`SELECT MAX("value")
|
||||
FROM "history" WHERE ("itemid" = '123' OR "itemid" = '234') AND "time" >= 15000s AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
FROM "history"
|
||||
WHERE ("itemid" = '123' OR "itemid" = '234')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
expect(query).toBe(expected);
|
||||
});
|
||||
@@ -40,8 +43,11 @@ describe('InfluxDBConnector', () => {
|
||||
const aggFunction = 'avg';
|
||||
const query = ctx.influxDBConnector.buildHistoryQuery(itemids, table, range, intervalSec, aggFunction);
|
||||
const expected = compactQuery(`SELECT MEAN("value")
|
||||
FROM "history" WHERE ("itemid" = '123' OR "itemid" = '234') AND "time" >= 15000s AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
FROM "history"
|
||||
WHERE ("itemid" = '123' OR "itemid" = '234')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
expect(query).toBe(expected);
|
||||
});
|
||||
@@ -55,8 +61,11 @@ describe('InfluxDBConnector', () => {
|
||||
{ itemid: '123', value_type: 3 }
|
||||
];
|
||||
const expectedQuery = compactQuery(`SELECT MEAN("value")
|
||||
FROM "history_uint" WHERE ("itemid" = '123') AND "time" >= 15000s AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
FROM "history_uint"
|
||||
WHERE ("itemid" = '123')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
ctx.influxDBConnector.getHistory(items, timeFrom, timeTill, options);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenCalledWith(expectedQuery);
|
||||
@@ -71,10 +80,12 @@ describe('InfluxDBConnector', () => {
|
||||
];
|
||||
const sharedQueryPart = `AND "time" >= 15000s AND "time" <= 15100s GROUP BY time(5s), "itemid" fill(none)`;
|
||||
const expectedQueryFirst = compactQuery(`SELECT MEAN("value")
|
||||
FROM "history" WHERE ("itemid" = '123') ${sharedQueryPart}
|
||||
FROM "history"
|
||||
WHERE ("itemid" = '123') ${sharedQueryPart}
|
||||
`);
|
||||
const expectedQuerySecond = compactQuery(`SELECT MEAN("value")
|
||||
FROM "history_uint" WHERE ("itemid" = '234') ${sharedQueryPart}
|
||||
FROM "history_uint"
|
||||
WHERE ("itemid" = '234') ${sharedQueryPart}
|
||||
`);
|
||||
ctx.influxDBConnector.getHistory(items, timeFrom, timeTill, options);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenCalledTimes(2);
|
||||
@@ -90,8 +101,11 @@ describe('InfluxDBConnector', () => {
|
||||
{ itemid: '123', value_type: 3 }
|
||||
];
|
||||
const expectedQuery = compactQuery(`SELECT MEAN("value")
|
||||
FROM "history_uint" WHERE ("itemid" = '123') AND "time" >= 15000s AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
FROM "history_uint"
|
||||
WHERE ("itemid" = '123')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
ctx.influxDBConnector.getTrends(items, timeFrom, timeTill, options);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenCalledWith(expectedQuery);
|
||||
@@ -104,8 +118,11 @@ describe('InfluxDBConnector', () => {
|
||||
{ itemid: '123', value_type: 3 }
|
||||
];
|
||||
const expectedQuery = compactQuery(`SELECT MEAN("value_avg")
|
||||
FROM "longterm"."history_uint" WHERE ("itemid" = '123') AND "time" >= 15000s AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
FROM "longterm"."history_uint"
|
||||
WHERE ("itemid" = '123')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
ctx.influxDBConnector.getTrends(items, timeFrom, timeTill, options);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenCalledWith(expectedQuery);
|
||||
@@ -118,8 +135,11 @@ describe('InfluxDBConnector', () => {
|
||||
{ itemid: '123', value_type: 3 }
|
||||
];
|
||||
const expectedQuery = compactQuery(`SELECT MAX("value_max")
|
||||
FROM "longterm"."history_uint" WHERE ("itemid" = '123') AND "time" >= 15000s AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
FROM "longterm"."history_uint"
|
||||
WHERE ("itemid" = '123')
|
||||
AND "time" >= 15000s
|
||||
AND "time" <= 15100s
|
||||
GROUP BY time(5s), "itemid" fill(none)
|
||||
`);
|
||||
ctx.influxDBConnector.getTrends(items, timeFrom, timeTill, options);
|
||||
expect(ctx.influxDBConnector.invokeInfluxDBQuery).toHaveBeenCalledWith(expectedQuery);
|
||||
|
||||
@@ -2,7 +2,7 @@ import _ from 'lodash';
|
||||
import moment from 'moment';
|
||||
import * as c from './constants';
|
||||
import { VariableQuery, VariableQueryTypes } from './types';
|
||||
import { MappingType, ValueMapping, getValueFormats, DataFrame, FieldType, rangeUtil } from '@grafana/data';
|
||||
import { DataFrame, FieldType, getValueFormats, MappingType, rangeUtil, ValueMapping } from '@grafana/data';
|
||||
|
||||
/*
|
||||
* This regex matches 3 types of variable reference with an optional format specifier
|
||||
@@ -57,7 +57,7 @@ function splitKeyParams(paramStr) {
|
||||
} else if (symbol === '"' && !quoted) {
|
||||
quoted = true;
|
||||
} else if (symbol === '[' && !quoted) {
|
||||
in_array = true;
|
||||
in_array = true;
|
||||
} else if (symbol === ']' && !quoted) {
|
||||
in_array = false;
|
||||
} else if (symbol === split_symbol && !quoted && !in_array) {
|
||||
@@ -218,7 +218,7 @@ export function getRangeScopedVars(range) {
|
||||
__range_ms: { text: msRange, value: msRange },
|
||||
__range_s: { text: sRange, value: sRange },
|
||||
__range: { text: regularRange, value: regularRange },
|
||||
__range_series: {text: c.RANGE_VARIABLE_VALUE, value: c.RANGE_VARIABLE_VALUE},
|
||||
__range_series: { text: c.RANGE_VARIABLE_VALUE, value: c.RANGE_VARIABLE_VALUE },
|
||||
};
|
||||
}
|
||||
|
||||
@@ -236,7 +236,7 @@ export function escapeRegex(value) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses Zabbix item update interval. Returns 0 in case of custom intervals.
|
||||
* Parses Zabbix item update interval (returns milliseconds). Returns 0 in case of custom intervals.
|
||||
*/
|
||||
export function parseItemInterval(interval: string): number {
|
||||
const normalizedInterval = normalizeZabbixInterval(interval);
|
||||
@@ -255,6 +255,7 @@ export function normalizeZabbixInterval(interval: string): string {
|
||||
return parsedInterval[1] + (parsedInterval.length > 2 ? parsedInterval[2] : 's');
|
||||
}
|
||||
|
||||
// Returns interval in milliseconds
|
||||
export function parseInterval(interval: string): number {
|
||||
const intervalPattern = /(^[\d]+)(y|M|w|d|h|m|s)/g;
|
||||
const momentInterval: any[] = intervalPattern.exec(interval);
|
||||
@@ -315,7 +316,7 @@ export function convertToZabbixAPIUrl(url) {
|
||||
* when waiting for result.
|
||||
*/
|
||||
export function callOnce(func, promiseKeeper) {
|
||||
return function() {
|
||||
return function () {
|
||||
if (!promiseKeeper) {
|
||||
promiseKeeper = Promise.resolve(
|
||||
func.apply(this, arguments)
|
||||
@@ -337,7 +338,7 @@ export function callOnce(func, promiseKeeper) {
|
||||
* @param {*} funcsArray functions to apply
|
||||
*/
|
||||
export function sequence(funcsArray) {
|
||||
return function(result) {
|
||||
return function (result) {
|
||||
for (let i = 0; i < funcsArray.length; i++) {
|
||||
result = funcsArray[i].call(this, result);
|
||||
}
|
||||
@@ -399,7 +400,7 @@ export function parseTags(tagStr: string): any[] {
|
||||
let tags: any[] = _.map(tagStr.split(','), (tag) => tag.trim());
|
||||
tags = _.map(tags, (tag) => {
|
||||
const tagParts = tag.split(':');
|
||||
return {tag: tagParts[0].trim(), value: tagParts[1].trim()};
|
||||
return { tag: tagParts[0].trim(), value: tagParts[1].trim() };
|
||||
});
|
||||
return tags;
|
||||
}
|
||||
@@ -457,10 +458,12 @@ export function getValueMapping(item, valueMappings: any[]): ValueMapping[] | nu
|
||||
|
||||
return (mapping.mappings as any[]).map((m, i) => {
|
||||
const valueMapping: ValueMapping = {
|
||||
id: i,
|
||||
// id: i,
|
||||
type: MappingType.ValueToText,
|
||||
value: m.value,
|
||||
text: m.newvalue,
|
||||
options: {
|
||||
value: m.value,
|
||||
text: m.newvalue,
|
||||
}
|
||||
};
|
||||
return valueMapping;
|
||||
});
|
||||
|
||||
@@ -1,173 +0,0 @@
|
||||
import _ from 'lodash';
|
||||
import { getDataSourceSrv } from '@grafana/runtime';
|
||||
|
||||
export const DEFAULT_QUERY_LIMIT = 10000;
|
||||
export const HISTORY_TO_TABLE_MAP = {
|
||||
'0': 'history',
|
||||
'1': 'history_str',
|
||||
'2': 'history_log',
|
||||
'3': 'history_uint',
|
||||
'4': 'history_text'
|
||||
};
|
||||
|
||||
export const TREND_TO_TABLE_MAP = {
|
||||
'0': 'trends',
|
||||
'3': 'trends_uint'
|
||||
};
|
||||
|
||||
export const consolidateByFunc = {
|
||||
'avg': 'AVG',
|
||||
'min': 'MIN',
|
||||
'max': 'MAX',
|
||||
'sum': 'SUM',
|
||||
'count': 'COUNT'
|
||||
};
|
||||
|
||||
export const consolidateByTrendColumns = {
|
||||
'avg': 'value_avg',
|
||||
'min': 'value_min',
|
||||
'max': 'value_max',
|
||||
'sum': 'num*value_avg' // sum of sums inside the one-hour trend period
|
||||
};
|
||||
|
||||
/**
|
||||
* Base class for external history database connectors. Subclasses should implement `getHistory()`, `getTrends()` and
|
||||
* `testDataSource()` methods, which describe how to fetch data from source other than Zabbix API.
|
||||
*/
|
||||
export class DBConnector {
|
||||
constructor(options) {
|
||||
this.datasourceId = options.datasourceId;
|
||||
this.datasourceName = options.datasourceName;
|
||||
this.datasourceTypeId = null;
|
||||
this.datasourceTypeName = null;
|
||||
}
|
||||
|
||||
static loadDatasource(dsId, dsName) {
|
||||
if (!dsName && dsId !== undefined) {
|
||||
let ds = _.find(getDataSourceSrv().getAll(), {'id': dsId});
|
||||
if (!ds) {
|
||||
return Promise.reject(`Data Source with ID ${dsId} not found`);
|
||||
}
|
||||
dsName = ds.name;
|
||||
}
|
||||
if (dsName) {
|
||||
return getDataSourceSrv().loadDatasource(dsName);
|
||||
} else {
|
||||
return Promise.reject(`Data Source name should be specified`);
|
||||
}
|
||||
}
|
||||
|
||||
loadDBDataSource() {
|
||||
return DBConnector.loadDatasource(this.datasourceId, this.datasourceName)
|
||||
.then(ds => {
|
||||
this.datasourceTypeId = ds.meta.id;
|
||||
this.datasourceTypeName = ds.meta.name;
|
||||
if (!this.datasourceName) {
|
||||
this.datasourceName = ds.name;
|
||||
}
|
||||
if (!this.datasourceId) {
|
||||
this.datasourceId = ds.id;
|
||||
}
|
||||
return ds;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Send test request to datasource in order to ensure it's working.
|
||||
*/
|
||||
testDataSource() {
|
||||
throw new ZabbixNotImplemented('testDataSource()');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get history data from external sources.
|
||||
*/
|
||||
getHistory() {
|
||||
throw new ZabbixNotImplemented('getHistory()');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get trends data from external sources.
|
||||
*/
|
||||
getTrends() {
|
||||
throw new ZabbixNotImplemented('getTrends()');
|
||||
}
|
||||
|
||||
handleGrafanaTSResponse(history, items, addHostName = true) {
|
||||
return convertGrafanaTSResponse(history, items, addHostName);
|
||||
}
|
||||
}
|
||||
|
||||
// Define Zabbix DB Connector exception type for non-implemented methods
|
||||
export class ZabbixNotImplemented {
|
||||
constructor(methodName) {
|
||||
this.code = null;
|
||||
this.name = 'ZabbixNotImplemented';
|
||||
this.message = `Zabbix DB Connector Error: method ${methodName || ''} should be implemented in subclass of DBConnector`;
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.message;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts time series returned by the data source into format that Grafana expects
|
||||
* time_series is Array of series:
|
||||
* ```
|
||||
* [{
|
||||
* name: string,
|
||||
* points: Array<[value: number, timestamp: number]>
|
||||
* }]
|
||||
* ```
|
||||
*/
|
||||
function convertGrafanaTSResponse(time_series, items, addHostName) {
|
||||
//uniqBy is needed to deduplicate
|
||||
const hosts = _.uniqBy(_.flatten(_.map(items, 'hosts')), 'hostid');
|
||||
let grafanaSeries = _.map(_.compact(time_series), series => {
|
||||
const itemid = series.name;
|
||||
const item = _.find(items, {'itemid': itemid});
|
||||
let alias = item.name;
|
||||
|
||||
// Add scopedVars for using in alias functions
|
||||
const scopedVars = {
|
||||
'__zbx_item': { value: item.name },
|
||||
'__zbx_item_name': { value: item.name },
|
||||
'__zbx_item_key': { value: item.key_ },
|
||||
'__zbx_item_interval': { value: item.delay },
|
||||
};
|
||||
|
||||
if (_.keys(hosts).length > 0) {
|
||||
const host = _.find(hosts, {'hostid': item.hostid});
|
||||
scopedVars['__zbx_host'] = { value: host.host };
|
||||
scopedVars['__zbx_host_name'] = { value: host.name };
|
||||
|
||||
// Only add host when multiple hosts selected
|
||||
if (_.keys(hosts).length > 1 && addHostName) {
|
||||
alias = host.name + ": " + alias;
|
||||
}
|
||||
}
|
||||
// CachingProxy deduplicates requests and returns one time series for equal queries.
|
||||
// Clone is needed to prevent changing of series object shared between all targets.
|
||||
const datapoints = _.cloneDeep(series.points);
|
||||
return {
|
||||
target: alias,
|
||||
datapoints,
|
||||
scopedVars,
|
||||
item
|
||||
};
|
||||
});
|
||||
|
||||
return _.sortBy(grafanaSeries, 'target');
|
||||
}
|
||||
|
||||
const defaults = {
|
||||
DBConnector,
|
||||
DEFAULT_QUERY_LIMIT,
|
||||
HISTORY_TO_TABLE_MAP,
|
||||
TREND_TO_TABLE_MAP,
|
||||
consolidateByFunc,
|
||||
consolidateByTrendColumns
|
||||
};
|
||||
|
||||
export default defaults;
|
||||
97
src/datasource-zabbix/zabbix/connectors/dbConnector.ts
Normal file
97
src/datasource-zabbix/zabbix/connectors/dbConnector.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import _ from 'lodash';
|
||||
import { getDataSourceSrv } from '@grafana/runtime';
|
||||
|
||||
export const DEFAULT_QUERY_LIMIT = 10000;
|
||||
|
||||
export const HISTORY_TO_TABLE_MAP = {
|
||||
'0': 'history',
|
||||
'1': 'history_str',
|
||||
'2': 'history_log',
|
||||
'3': 'history_uint',
|
||||
'4': 'history_text'
|
||||
};
|
||||
|
||||
export const TREND_TO_TABLE_MAP = {
|
||||
'0': 'trends',
|
||||
'3': 'trends_uint'
|
||||
};
|
||||
|
||||
export const consolidateByFunc = {
|
||||
'avg': 'AVG',
|
||||
'min': 'MIN',
|
||||
'max': 'MAX',
|
||||
'sum': 'SUM',
|
||||
'count': 'COUNT'
|
||||
};
|
||||
|
||||
export const consolidateByTrendColumns = {
|
||||
'avg': 'value_avg',
|
||||
'min': 'value_min',
|
||||
'max': 'value_max',
|
||||
'sum': 'num*value_avg' // sum of sums inside the one-hour trend period
|
||||
};
|
||||
|
||||
export interface IDBConnector {
|
||||
getHistory(): any;
|
||||
|
||||
getTrends(): any;
|
||||
|
||||
testDataSource(): any;
|
||||
}
|
||||
|
||||
/**
|
||||
* Base class for external history database connectors. Subclasses should implement `getHistory()`, `getTrends()` and
|
||||
* `testDataSource()` methods, which describe how to fetch data from source other than Zabbix API.
|
||||
*/
|
||||
export class DBConnector {
|
||||
protected datasourceId: any;
|
||||
private datasourceName: any;
|
||||
protected datasourceTypeId: any;
|
||||
private datasourceTypeName: any;
|
||||
|
||||
constructor(options) {
|
||||
this.datasourceId = options.datasourceId;
|
||||
this.datasourceName = options.datasourceName;
|
||||
this.datasourceTypeId = null;
|
||||
this.datasourceTypeName = null;
|
||||
}
|
||||
|
||||
static loadDatasource(dsId, dsName) {
|
||||
if (!dsName && dsId !== undefined) {
|
||||
const ds = _.find(getDataSourceSrv().getList(), { 'id': dsId });
|
||||
if (!ds) {
|
||||
return Promise.reject(`Data Source with ID ${dsId} not found`);
|
||||
}
|
||||
dsName = ds.name;
|
||||
}
|
||||
if (dsName) {
|
||||
return getDataSourceSrv().get(dsName);
|
||||
} else {
|
||||
return Promise.reject(`Data Source name should be specified`);
|
||||
}
|
||||
}
|
||||
|
||||
loadDBDataSource() {
|
||||
return DBConnector.loadDatasource(this.datasourceId, this.datasourceName)
|
||||
.then(ds => {
|
||||
this.datasourceTypeId = ds.meta.id;
|
||||
this.datasourceTypeName = ds.meta.name;
|
||||
if (!this.datasourceName) {
|
||||
this.datasourceName = ds.name;
|
||||
}
|
||||
if (!this.datasourceId) {
|
||||
this.datasourceId = ds.id;
|
||||
}
|
||||
return ds;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
DBConnector,
|
||||
DEFAULT_QUERY_LIMIT,
|
||||
HISTORY_TO_TABLE_MAP,
|
||||
TREND_TO_TABLE_MAP,
|
||||
consolidateByFunc,
|
||||
consolidateByTrendColumns
|
||||
};
|
||||
@@ -1,6 +1,6 @@
|
||||
import _ from 'lodash';
|
||||
import { compactQuery } from '../../../utils';
|
||||
import { DBConnector, HISTORY_TO_TABLE_MAP, consolidateByTrendColumns } from '../dbConnector';
|
||||
import { consolidateByTrendColumns, DBConnector, HISTORY_TO_TABLE_MAP } from '../dbConnector';
|
||||
|
||||
const consolidateByFunc = {
|
||||
'avg': 'MEAN',
|
||||
@@ -11,6 +11,9 @@ const consolidateByFunc = {
|
||||
};
|
||||
|
||||
export class InfluxDBConnector extends DBConnector {
|
||||
private retentionPolicy: any;
|
||||
private influxDS: any;
|
||||
|
||||
constructor(options) {
|
||||
super(options);
|
||||
this.retentionPolicy = options.retentionPolicy;
|
||||
@@ -26,16 +29,19 @@ export class InfluxDBConnector extends DBConnector {
|
||||
testDataSource() {
|
||||
return this.influxDS.testDatasource().then(result => {
|
||||
if (result.status && result.status === 'error') {
|
||||
return Promise.reject({ data: {
|
||||
message: `InfluxDB connection error: ${result.message}`
|
||||
}});
|
||||
return Promise.reject({
|
||||
data: {
|
||||
message: `InfluxDB connection error: ${result.message}`
|
||||
}
|
||||
});
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
getHistory(items, timeFrom, timeTill, options) {
|
||||
let { intervalMs, consolidateBy, retentionPolicy } = options;
|
||||
const { intervalMs, retentionPolicy } = options;
|
||||
let { consolidateBy } = options;
|
||||
const intervalSec = Math.ceil(intervalMs / 1000);
|
||||
|
||||
const range = { timeFrom, timeTill };
|
||||
@@ -71,9 +77,12 @@ export class InfluxDBConnector extends DBConnector {
|
||||
}
|
||||
const aggregation = consolidateByFunc[aggFunction] || aggFunction;
|
||||
const where_clause = this.buildWhereClause(itemids);
|
||||
const query = `SELECT ${aggregation}("${value}") FROM ${measurement}
|
||||
WHERE ${where_clause} AND "time" >= ${timeFrom}s AND "time" <= ${timeTill}s
|
||||
GROUP BY time(${intervalSec}s), "itemid" fill(none)`;
|
||||
const query = `SELECT ${aggregation}("${value}")
|
||||
FROM ${measurement}
|
||||
WHERE ${where_clause}
|
||||
AND "time" >= ${timeFrom}s
|
||||
AND "time" <= ${timeTill}s
|
||||
GROUP BY time(${intervalSec}s), "itemid" fill(none)`;
|
||||
return compactQuery(query);
|
||||
}
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
/**
|
||||
* MySQL queries
|
||||
*/
|
||||
|
||||
function historyQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction) {
|
||||
let query = `
|
||||
SELECT CAST(itemid AS CHAR) AS metric, MIN(clock) AS time_sec, ${aggFunction}(value) AS value
|
||||
FROM ${table}
|
||||
WHERE itemid IN (${itemids})
|
||||
AND clock > ${timeFrom} AND clock < ${timeTill}
|
||||
GROUP BY (clock-${timeFrom}) DIV ${intervalSec}, metric
|
||||
ORDER BY time_sec ASC
|
||||
`;
|
||||
return query;
|
||||
}
|
||||
|
||||
function trendsQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction, valueColumn) {
|
||||
let query = `
|
||||
SELECT CAST(itemid AS CHAR) AS metric, MIN(clock) AS time_sec, ${aggFunction}(${valueColumn}) AS value
|
||||
FROM ${table}
|
||||
WHERE itemid IN (${itemids})
|
||||
AND clock > ${timeFrom} AND clock < ${timeTill}
|
||||
GROUP BY (clock-${timeFrom}) DIV ${intervalSec}, metric
|
||||
ORDER BY time_sec ASC
|
||||
`;
|
||||
return query;
|
||||
}
|
||||
|
||||
const TEST_QUERY = `SELECT CAST(itemid AS CHAR) AS metric, clock AS time_sec, value_avg AS value FROM trends_uint LIMIT 1`;
|
||||
|
||||
function testQuery() {
|
||||
return TEST_QUERY;
|
||||
}
|
||||
|
||||
const mysql = {
|
||||
historyQuery,
|
||||
trendsQuery,
|
||||
testQuery
|
||||
};
|
||||
|
||||
export default mysql;
|
||||
44
src/datasource-zabbix/zabbix/connectors/sql/mysql.ts
Normal file
44
src/datasource-zabbix/zabbix/connectors/sql/mysql.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
/**
|
||||
* MySQL queries
|
||||
*/
|
||||
|
||||
function historyQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction) {
|
||||
return `
|
||||
SELECT CAST(itemid AS CHAR) AS metric, MIN(clock) AS time_sec, ${aggFunction}(value) AS value
|
||||
FROM ${table}
|
||||
WHERE itemid IN (${itemids})
|
||||
AND clock
|
||||
> ${timeFrom}
|
||||
AND clock
|
||||
< ${timeTill}
|
||||
GROUP BY (clock-${timeFrom}) DIV ${intervalSec}, metric
|
||||
ORDER BY time_sec ASC
|
||||
`;
|
||||
}
|
||||
|
||||
function trendsQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction, valueColumn) {
|
||||
return `
|
||||
SELECT CAST(itemid AS CHAR) AS metric, MIN(clock) AS time_sec, ${aggFunction}(${valueColumn}) AS value
|
||||
FROM ${table}
|
||||
WHERE itemid IN (${itemids})
|
||||
AND clock
|
||||
> ${timeFrom}
|
||||
AND clock
|
||||
< ${timeTill}
|
||||
GROUP BY (clock-${timeFrom}) DIV ${intervalSec}, metric
|
||||
ORDER BY time_sec ASC
|
||||
`;
|
||||
}
|
||||
|
||||
function testQuery() {
|
||||
return `SELECT CAST(itemid AS CHAR) AS metric, clock AS time_sec, value_avg AS value
|
||||
FROM trends_uint LIMIT 1`;
|
||||
}
|
||||
|
||||
const mysql = {
|
||||
historyQuery,
|
||||
trendsQuery,
|
||||
testQuery
|
||||
};
|
||||
|
||||
export default mysql;
|
||||
@@ -1,48 +0,0 @@
|
||||
/**
|
||||
* Postgres queries
|
||||
*/
|
||||
|
||||
const ITEMID_FORMAT = 'FM99999999999999999999';
|
||||
|
||||
function historyQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction) {
|
||||
let time_expression = `clock / ${intervalSec} * ${intervalSec}`;
|
||||
let query = `
|
||||
SELECT to_char(itemid, '${ITEMID_FORMAT}') AS metric, ${time_expression} AS time, ${aggFunction}(value) AS value
|
||||
FROM ${table}
|
||||
WHERE itemid IN (${itemids})
|
||||
AND clock > ${timeFrom} AND clock < ${timeTill}
|
||||
GROUP BY 1, 2
|
||||
ORDER BY time ASC
|
||||
`;
|
||||
return query;
|
||||
}
|
||||
|
||||
function trendsQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction, valueColumn) {
|
||||
let time_expression = `clock / ${intervalSec} * ${intervalSec}`;
|
||||
let query = `
|
||||
SELECT to_char(itemid, '${ITEMID_FORMAT}') AS metric, ${time_expression} AS time, ${aggFunction}(${valueColumn}) AS value
|
||||
FROM ${table}
|
||||
WHERE itemid IN (${itemids})
|
||||
AND clock > ${timeFrom} AND clock < ${timeTill}
|
||||
GROUP BY 1, 2
|
||||
ORDER BY time ASC
|
||||
`;
|
||||
return query;
|
||||
}
|
||||
|
||||
const TEST_QUERY = `
|
||||
SELECT to_char(itemid, '${ITEMID_FORMAT}') AS metric, clock AS time, value_avg AS value
|
||||
FROM trends_uint LIMIT 1
|
||||
`;
|
||||
|
||||
function testQuery() {
|
||||
return TEST_QUERY;
|
||||
}
|
||||
|
||||
const postgres = {
|
||||
historyQuery,
|
||||
trendsQuery,
|
||||
testQuery
|
||||
};
|
||||
|
||||
export default postgres;
|
||||
52
src/datasource-zabbix/zabbix/connectors/sql/postgres.ts
Normal file
52
src/datasource-zabbix/zabbix/connectors/sql/postgres.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
/**
|
||||
* Postgres queries
|
||||
*/
|
||||
|
||||
const ITEMID_FORMAT = 'FM99999999999999999999';
|
||||
|
||||
function historyQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction) {
|
||||
const time_expression = `clock / ${intervalSec} * ${intervalSec}`;
|
||||
return `
|
||||
SELECT to_char(itemid, '${ITEMID_FORMAT}') AS metric, ${time_expression} AS time, ${aggFunction}(value) AS value
|
||||
FROM ${table}
|
||||
WHERE itemid IN (${itemids})
|
||||
AND clock
|
||||
> ${timeFrom}
|
||||
AND clock
|
||||
< ${timeTill}
|
||||
GROUP BY 1, 2
|
||||
ORDER BY time ASC
|
||||
`;
|
||||
}
|
||||
|
||||
function trendsQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction, valueColumn) {
|
||||
const time_expression = `clock / ${intervalSec} * ${intervalSec}`;
|
||||
return `
|
||||
SELECT to_char(itemid, '${ITEMID_FORMAT}') AS metric, ${time_expression} AS time, ${aggFunction}(${valueColumn}) AS value
|
||||
FROM ${table}
|
||||
WHERE itemid IN (${itemids})
|
||||
AND clock
|
||||
> ${timeFrom}
|
||||
AND clock
|
||||
< ${timeTill}
|
||||
GROUP BY 1, 2
|
||||
ORDER BY time ASC
|
||||
`;
|
||||
}
|
||||
|
||||
const TEST_QUERY = `
|
||||
SELECT to_char(itemid, '${ITEMID_FORMAT}') AS metric, clock AS time, value_avg AS value
|
||||
FROM trends_uint LIMIT 1
|
||||
`;
|
||||
|
||||
function testQuery() {
|
||||
return TEST_QUERY;
|
||||
}
|
||||
|
||||
const postgres = {
|
||||
historyQuery,
|
||||
trendsQuery,
|
||||
testQuery
|
||||
};
|
||||
|
||||
export default postgres;
|
||||
@@ -11,6 +11,9 @@ const supportedDatabases = {
|
||||
};
|
||||
|
||||
export class SQLConnector extends DBConnector {
|
||||
private limit: number;
|
||||
private sqlDialect: any;
|
||||
|
||||
constructor(options) {
|
||||
super(options);
|
||||
|
||||
@@ -35,28 +38,18 @@ export class SQLConnector extends DBConnector {
|
||||
* Try to invoke test query for one of Zabbix database tables.
|
||||
*/
|
||||
testDataSource() {
|
||||
let testQuery = this.sqlDialect.testQuery();
|
||||
const testQuery = this.sqlDialect.testQuery();
|
||||
return this.invokeSQLQuery(testQuery);
|
||||
}
|
||||
|
||||
getHistory(items, timeFrom, timeTill, options) {
|
||||
let {intervalMs, consolidateBy} = options;
|
||||
let intervalSec = Math.ceil(intervalMs / 1000);
|
||||
|
||||
// The interval must match the time range exactly n times, otherwise
|
||||
// the resulting first and last data points will yield invalid values in the
|
||||
// calculated average value in downsampleSeries - when using consolidateBy(avg)
|
||||
let numOfIntervals = Math.ceil((timeTill - timeFrom) / intervalSec);
|
||||
intervalSec = (timeTill - timeFrom) / numOfIntervals;
|
||||
|
||||
consolidateBy = consolidateBy || 'avg';
|
||||
let aggFunction = dbConnector.consolidateByFunc[consolidateBy];
|
||||
const { aggFunction, intervalSec } = getAggFunc(timeFrom, timeTill, options);
|
||||
|
||||
// Group items by value type and perform request for each value type
|
||||
let grouped_items = _.groupBy(items, 'value_type');
|
||||
let promises = _.map(grouped_items, (items, value_type) => {
|
||||
let itemids = _.map(items, 'itemid').join(', ');
|
||||
let table = HISTORY_TO_TABLE_MAP[value_type];
|
||||
const grouped_items = _.groupBy(items, 'value_type');
|
||||
const promises = _.map(grouped_items, (items, value_type) => {
|
||||
const itemids = _.map(items, 'itemid').join(', ');
|
||||
const table = HISTORY_TO_TABLE_MAP[value_type];
|
||||
let query = this.sqlDialect.historyQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction);
|
||||
|
||||
query = compactQuery(query);
|
||||
@@ -69,23 +62,14 @@ export class SQLConnector extends DBConnector {
|
||||
}
|
||||
|
||||
getTrends(items, timeFrom, timeTill, options) {
|
||||
let { intervalMs, consolidateBy } = options;
|
||||
let intervalSec = Math.ceil(intervalMs / 1000);
|
||||
|
||||
// The interval must match the time range exactly n times, otherwise
|
||||
// the resulting first and last data points will yield invalid values in the
|
||||
// calculated average value in downsampleSeries - when using consolidateBy(avg)
|
||||
let numOfIntervals = Math.ceil((timeTill - timeFrom) / intervalSec);
|
||||
intervalSec = (timeTill - timeFrom) / numOfIntervals;
|
||||
|
||||
consolidateBy = consolidateBy || 'avg';
|
||||
let aggFunction = dbConnector.consolidateByFunc[consolidateBy];
|
||||
const { consolidateBy } = options;
|
||||
const { aggFunction, intervalSec } = getAggFunc(timeFrom, timeTill, options);
|
||||
|
||||
// Group items by value type and perform request for each value type
|
||||
let grouped_items = _.groupBy(items, 'value_type');
|
||||
let promises = _.map(grouped_items, (items, value_type) => {
|
||||
let itemids = _.map(items, 'itemid').join(', ');
|
||||
let table = TREND_TO_TABLE_MAP[value_type];
|
||||
const grouped_items = _.groupBy(items, 'value_type');
|
||||
const promises = _.map(grouped_items, (items, value_type) => {
|
||||
const itemids = _.map(items, 'itemid').join(', ');
|
||||
const table = TREND_TO_TABLE_MAP[value_type];
|
||||
let valueColumn = _.includes(['avg', 'min', 'max', 'sum'], consolidateBy) ? consolidateBy : 'avg';
|
||||
valueColumn = dbConnector.consolidateByTrendColumns[valueColumn];
|
||||
let query = this.sqlDialect.trendsQuery(itemids, table, timeFrom, timeTill, intervalSec, aggFunction, valueColumn);
|
||||
@@ -100,7 +84,7 @@ export class SQLConnector extends DBConnector {
|
||||
}
|
||||
|
||||
invokeSQLQuery(query) {
|
||||
let queryDef = {
|
||||
const queryDef = {
|
||||
refId: 'A',
|
||||
format: 'time_series',
|
||||
datasourceId: this.datasourceId,
|
||||
@@ -109,19 +93,35 @@ export class SQLConnector extends DBConnector {
|
||||
};
|
||||
|
||||
return getBackendSrv().datasourceRequest({
|
||||
url: '/api/tsdb/query',
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
queries: [queryDef],
|
||||
}
|
||||
})
|
||||
.then(response => {
|
||||
let results = response.data.results;
|
||||
const results = response.data.results;
|
||||
if (results['A']) {
|
||||
return results['A'].series;
|
||||
return results['A'].frames;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function getAggFunc(timeFrom, timeTill, options) {
|
||||
const { intervalMs } = options;
|
||||
let { consolidateBy } = options;
|
||||
let intervalSec = Math.ceil(intervalMs / 1000);
|
||||
|
||||
// The interval must match the time range exactly n times, otherwise
|
||||
// the resulting first and last data points will yield invalid values in the
|
||||
// calculated average value in downsampleSeries - when using consolidateBy(avg)
|
||||
const numOfIntervals = Math.ceil((timeTill - timeFrom) / intervalSec);
|
||||
intervalSec = (timeTill - timeFrom) / numOfIntervals;
|
||||
|
||||
consolidateBy = consolidateBy || 'avg';
|
||||
const aggFunction = dbConnector.consolidateByFunc[consolidateBy];
|
||||
return { aggFunction, intervalSec };
|
||||
}
|
||||
@@ -76,7 +76,7 @@ export class ZabbixAPIConnector {
|
||||
requestOptions.headers.Authorization = this.requestOptions.basicAuth;
|
||||
}
|
||||
|
||||
const response = await getBackendSrv().datasourceRequest(requestOptions);
|
||||
const response = await getBackendSrv().fetch<any>(requestOptions).toPromise();
|
||||
return response?.data?.result;
|
||||
}
|
||||
|
||||
|
||||
@@ -4,13 +4,12 @@ import semver from 'semver';
|
||||
import * as utils from '../utils';
|
||||
import responseHandler from '../responseHandler';
|
||||
import { CachingProxy } from './proxy/cachingProxy';
|
||||
// import { ZabbixNotImplemented } from './connectors/dbConnector';
|
||||
import { DBConnector } from './connectors/dbConnector';
|
||||
import { ZabbixAPIConnector } from './connectors/zabbix_api/zabbixAPIConnector';
|
||||
import { SQLConnector } from './connectors/sql/sqlConnector';
|
||||
import { InfluxDBConnector } from './connectors/influxdb/influxdbConnector';
|
||||
import { ZabbixConnector } from './types';
|
||||
import { joinTriggersWithProblems, joinTriggersWithEvents } from '../problemsHandler';
|
||||
import { joinTriggersWithEvents, joinTriggersWithProblems } from '../problemsHandler';
|
||||
import { ProblemDTO } from '../types';
|
||||
|
||||
interface AppsResponse extends Array<any> {
|
||||
@@ -274,7 +273,7 @@ export class Zabbix implements ZabbixConnector {
|
||||
})
|
||||
.then(items => {
|
||||
if (!options.showDisabledItems) {
|
||||
items = _.filter(items, {'status': '0'});
|
||||
items = _.filter(items, { 'status': '0' });
|
||||
}
|
||||
|
||||
return items;
|
||||
@@ -432,7 +431,7 @@ export class Zabbix implements ZabbixConnector {
|
||||
const [timeFrom, timeTo] = timeRange;
|
||||
if (this.enableDirectDBConnection) {
|
||||
return this.getHistoryDB(items, timeFrom, timeTo, options)
|
||||
.then(history => this.dbConnector.handleGrafanaTSResponse(history, items));
|
||||
.then(history => responseHandler.dataResponseToTimeSeries(history, items));
|
||||
} else {
|
||||
return this.zabbixAPI.getHistory(items, timeFrom, timeTo)
|
||||
.then(history => responseHandler.handleHistory(history, items));
|
||||
@@ -443,7 +442,7 @@ export class Zabbix implements ZabbixConnector {
|
||||
const [timeFrom, timeTo] = timeRange;
|
||||
if (this.enableDirectDBConnection) {
|
||||
return this.getTrendsDB(items, timeFrom, timeTo, options)
|
||||
.then(history => this.dbConnector.handleGrafanaTSResponse(history, items));
|
||||
.then(history => responseHandler.dataResponseToTimeSeries(history, items));
|
||||
} else {
|
||||
const valueType = options.consolidateBy || options.valueType;
|
||||
return this.zabbixAPI.getTrend(items, timeFrom, timeTo)
|
||||
@@ -473,7 +472,7 @@ export class Zabbix implements ZabbixConnector {
|
||||
return this.zabbixAPI.getSLA(itServiceIds, timeRange, options)
|
||||
.then(slaResponse => {
|
||||
return _.map(itServiceIds, serviceid => {
|
||||
const itservice = _.find(itservices, {'serviceid': serviceid});
|
||||
const itservice = _.find(itservices, { 'serviceid': serviceid });
|
||||
return responseHandler.handleSLAResponse(itservice, target.slaProperty, slaResponse);
|
||||
});
|
||||
});
|
||||
@@ -489,7 +488,7 @@ export class Zabbix implements ZabbixConnector {
|
||||
* @return array with finded element or empty array
|
||||
*/
|
||||
function findByName(list, name) {
|
||||
const finded = _.find(list, {'name': name});
|
||||
const finded = _.find(list, { 'name': name });
|
||||
if (finded) {
|
||||
return [finded];
|
||||
} else {
|
||||
@@ -506,7 +505,7 @@ function findByName(list, name) {
|
||||
* @return {[type]} array with finded element or empty array
|
||||
*/
|
||||
function filterByName(list, name) {
|
||||
const finded = _.filter(list, {'name': name});
|
||||
const finded = _.filter(list, { 'name': name });
|
||||
if (finded) {
|
||||
return finded;
|
||||
} else {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
import { cx, css } from 'emotion';
|
||||
import { cx, css } from '@emotion/css';
|
||||
import { ZBX_ACK_ACTION_ADD_MESSAGE, ZBX_ACK_ACTION_ACK, ZBX_ACK_ACTION_CHANGE_SEVERITY, ZBX_ACK_ACTION_CLOSE } from '../../datasource-zabbix/constants';
|
||||
import { Button, VerticalGroup, Spinner, Modal, Input, Checkbox, RadioButtonGroup, stylesFactory, withTheme, Themeable, TextArea } from '@grafana/ui';
|
||||
import { FAIcon } from '../../components';
|
||||
@@ -147,8 +147,9 @@ export class AckModalUnthemed extends PureComponent<Props, State> {
|
||||
const { canClose } = this.props;
|
||||
|
||||
const actions = [
|
||||
<Checkbox key="ack" label="Acknowledge" value={this.state.acknowledge} onChange={this.onAcknowledgeToggle} />,
|
||||
<Checkbox css="" key="ack" label="Acknowledge" value={this.state.acknowledge} onChange={this.onAcknowledgeToggle} />,
|
||||
<Checkbox
|
||||
css=""
|
||||
key="change-severity"
|
||||
label="Change severity"
|
||||
description=""
|
||||
@@ -164,7 +165,14 @@ export class AckModalUnthemed extends PureComponent<Props, State> {
|
||||
onChange={this.onChangeSelectedSeverity}
|
||||
/>,
|
||||
canClose &&
|
||||
<Checkbox key="close" label="Close problem" disabled={!canClose} value={this.state.closeProblem} onChange={this.onCloseProblemToggle} />,
|
||||
<Checkbox
|
||||
css=""
|
||||
key="close"
|
||||
label="Close problem"
|
||||
disabled={!canClose}
|
||||
value={this.state.closeProblem}
|
||||
onChange={this.onCloseProblemToggle}
|
||||
/>,
|
||||
];
|
||||
|
||||
// <VerticalGroup /> doesn't handle empty elements properly, so don't return it
|
||||
@@ -197,6 +205,7 @@ export class AckModalUnthemed extends PureComponent<Props, State> {
|
||||
<div className={inputGroupClass}>
|
||||
<label className="gf-form-hint">
|
||||
<TextArea className={inputClass}
|
||||
css=""
|
||||
type="text"
|
||||
name="message"
|
||||
placeholder="Message"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { FC } from 'react';
|
||||
import { cx, css } from 'emotion';
|
||||
import { cx, css } from '@emotion/css';
|
||||
import { GFHeartIcon } from '../../../components';
|
||||
import { ProblemDTO } from '../../../datasource-zabbix/types';
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
import { cx, css } from 'emotion';
|
||||
import { cx, css } from '@emotion/css';
|
||||
import { GrafanaTheme, SelectableValue } from '@grafana/data';
|
||||
import { Button, Spinner, Modal, Select, stylesFactory, withTheme, Themeable } from '@grafana/ui';
|
||||
import { ZBXScript, APIExecuteScriptResponse } from '../../datasource-zabbix/zabbix/connectors/zabbix_api/types';
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React from 'react';
|
||||
import { css } from 'emotion';
|
||||
import { css } from '@emotion/css';
|
||||
import { RTCell } from '../../types';
|
||||
import { ProblemDTO } from '../../../datasource-zabbix/types';
|
||||
import { FAIcon } from '../../../components';
|
||||
|
||||
@@ -102,106 +102,110 @@ export class ProblemDetails extends PureComponent<ProblemDetailsProps, ProblemDe
|
||||
|
||||
return (
|
||||
<div className={`problem-details-container ${displayClass}`}>
|
||||
<div className="problem-details">
|
||||
<div className="problem-details-row">
|
||||
<div className="problem-value-container">
|
||||
<div className="problem-age">
|
||||
<FAIcon icon="clock-o" />
|
||||
<span>{age}</span>
|
||||
<div className="problem-details-head">
|
||||
<div className="problem-actions-left">
|
||||
<ExploreButton problem={problem} panelId={this.props.panelId} />
|
||||
</div>
|
||||
{problem.showAckButton &&
|
||||
<div className="problem-actions">
|
||||
<ModalController>
|
||||
{({ showModal, hideModal }) => (
|
||||
<ExecScriptButton
|
||||
className="problem-action-button"
|
||||
onClick={() => {
|
||||
showModal(ExecScriptModal, {
|
||||
getScripts: this.getScripts,
|
||||
onSubmit: this.onExecuteScript,
|
||||
onDismiss: hideModal,
|
||||
});
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
</ModalController>
|
||||
<ModalController>
|
||||
{({ showModal, hideModal }) => (
|
||||
<AckButton
|
||||
className="problem-action-button"
|
||||
onClick={() => {
|
||||
showModal(AckModal, {
|
||||
canClose: problem.manual_close === '1',
|
||||
severity: problemSeverity,
|
||||
onSubmit: this.ackProblem,
|
||||
onDismiss: hideModal,
|
||||
});
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
</ModalController>
|
||||
</div>
|
||||
}
|
||||
<ProblemStatusBar problem={problem} alerts={alerts} className={compactStatusBar && 'compact'} />
|
||||
</div>
|
||||
<div className="problem-details-body">
|
||||
<div className="problem-details">
|
||||
<div className="problem-details-row">
|
||||
<div className="problem-value-container">
|
||||
<div className="problem-age">
|
||||
<FAIcon icon="clock-o" />
|
||||
<span>{age}</span>
|
||||
</div>
|
||||
{problem.items && <ProblemItems items={problem.items} />}
|
||||
</div>
|
||||
{problem.items && <ProblemItems items={problem.items} />}
|
||||
</div>
|
||||
<div className="problem-actions-left">
|
||||
<ExploreButton problem={problem} panelId={this.props.panelId} />
|
||||
</div>
|
||||
<ProblemStatusBar problem={problem} alerts={alerts} className={compactStatusBar && 'compact'} />
|
||||
{problem.showAckButton &&
|
||||
<div className="problem-actions">
|
||||
<ModalController>
|
||||
{({ showModal, hideModal }) => (
|
||||
<ExecScriptButton
|
||||
className="navbar-button navbar-button--settings"
|
||||
onClick={() => {
|
||||
showModal(ExecScriptModal, {
|
||||
getScripts: this.getScripts,
|
||||
onSubmit: this.onExecuteScript,
|
||||
onDismiss: hideModal,
|
||||
});
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
</ModalController>
|
||||
<ModalController>
|
||||
{({ showModal, hideModal }) => (
|
||||
<AckButton
|
||||
className="navbar-button navbar-button--settings"
|
||||
onClick={() => {
|
||||
showModal(AckModal, {
|
||||
canClose: problem.manual_close === '1',
|
||||
severity: problemSeverity,
|
||||
onSubmit: this.ackProblem,
|
||||
onDismiss: hideModal,
|
||||
});
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
</ModalController>
|
||||
{problem.comments &&
|
||||
<div className="problem-description-row">
|
||||
<div className="problem-description">
|
||||
<Tooltip placement="right" content={problem.comments}>
|
||||
<span className="description-label">Description: </span>
|
||||
</Tooltip>
|
||||
<span>{problem.comments}</span>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
{problem.tags && problem.tags.length > 0 &&
|
||||
<div className="problem-tags">
|
||||
{problem.tags && problem.tags.map(tag =>
|
||||
<EventTag
|
||||
key={tag.tag + tag.value}
|
||||
tag={tag}
|
||||
highlight={tag.tag === problem.correlation_tag}
|
||||
onClick={this.handleTagClick}
|
||||
/>)
|
||||
}
|
||||
</div>
|
||||
}
|
||||
{this.props.showTimeline && this.state.events.length > 0 &&
|
||||
<ProblemTimeline events={this.state.events} timeRange={this.props.timeRange} />
|
||||
}
|
||||
{showAcknowledges && !wideLayout &&
|
||||
<div className="problem-ack-container">
|
||||
<h6><FAIcon icon="reply-all" /> Acknowledges</h6>
|
||||
<AcknowledgesList acknowledges={problem.acknowledges} />
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
{problem.comments &&
|
||||
<div className="problem-description-row">
|
||||
<div className="problem-description">
|
||||
<Tooltip placement="right" content={problem.comments}>
|
||||
<span className="description-label">Description: </span>
|
||||
</Tooltip>
|
||||
<span>{problem.comments}</span>
|
||||
{showAcknowledges && wideLayout &&
|
||||
<div className="problem-details-middle">
|
||||
<div className="problem-ack-container">
|
||||
<h6><FAIcon icon="reply-all" /> Acknowledges</h6>
|
||||
<AcknowledgesList acknowledges={problem.acknowledges} />
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
{problem.tags && problem.tags.length > 0 &&
|
||||
<div className="problem-tags">
|
||||
{problem.tags && problem.tags.map(tag =>
|
||||
<EventTag
|
||||
key={tag.tag + tag.value}
|
||||
tag={tag}
|
||||
highlight={tag.tag === problem.correlation_tag}
|
||||
onClick={this.handleTagClick}
|
||||
/>)
|
||||
}
|
||||
</div>
|
||||
}
|
||||
{this.props.showTimeline && this.state.events.length > 0 &&
|
||||
<ProblemTimeline events={this.state.events} timeRange={this.props.timeRange} />
|
||||
}
|
||||
{showAcknowledges && !wideLayout &&
|
||||
<div className="problem-ack-container">
|
||||
<h6><FAIcon icon="reply-all" /> Acknowledges</h6>
|
||||
<AcknowledgesList acknowledges={problem.acknowledges} />
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
{showAcknowledges && wideLayout &&
|
||||
<div className="problem-details-middle">
|
||||
<div className="problem-ack-container">
|
||||
<h6><FAIcon icon="reply-all" /> Acknowledges</h6>
|
||||
<AcknowledgesList acknowledges={problem.acknowledges} />
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
<div className="problem-details-right">
|
||||
<div className="problem-details-right-item">
|
||||
<FAIcon icon="database" />
|
||||
<span>{problem.datasource}</span>
|
||||
</div>
|
||||
{problem.proxy &&
|
||||
<div className="problem-details-right">
|
||||
<div className="problem-details-right-item">
|
||||
<FAIcon icon="cloud" />
|
||||
<span>{problem.proxy}</span>
|
||||
<FAIcon icon="database" />
|
||||
<span>{problem.datasource}</span>
|
||||
</div>
|
||||
}
|
||||
{problem.groups && <ProblemGroups groups={problem.groups} className="problem-details-right-item" />}
|
||||
{problem.hosts && <ProblemHosts hosts={problem.hosts} className="problem-details-right-item" />}
|
||||
{problem.proxy &&
|
||||
<div className="problem-details-right-item">
|
||||
<FAIcon icon="cloud" />
|
||||
<span>{problem.proxy}</span>
|
||||
</div>
|
||||
}
|
||||
{problem.groups && <ProblemGroups groups={problem.groups} className="problem-details-right-item" />}
|
||||
{problem.hosts && <ProblemHosts hosts={problem.hosts} className="problem-details-right-item" />}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
13
src/panel-triggers/specs/matchMedia.mock
Normal file
13
src/panel-triggers/specs/matchMedia.mock
Normal file
@@ -0,0 +1,13 @@
|
||||
Object.defineProperty(window, 'matchMedia', {
|
||||
writable: true,
|
||||
value: jest.fn().mockImplementation(query => ({
|
||||
matches: false,
|
||||
media: query,
|
||||
onchange: null,
|
||||
addListener: jest.fn(), // deprecated
|
||||
removeListener: jest.fn(), // deprecated
|
||||
addEventListener: jest.fn(),
|
||||
removeEventListener: jest.fn(),
|
||||
dispatchEvent: jest.fn(),
|
||||
})),
|
||||
});
|
||||
@@ -1,25 +1,25 @@
|
||||
import _ from 'lodash';
|
||||
import mocks from '../../test-setup/mocks';
|
||||
import {TriggerPanelCtrl} from '../triggers_panel_ctrl';
|
||||
import { DEFAULT_TARGET, DEFAULT_SEVERITY, PANEL_DEFAULTS } from '../triggers_panel_ctrl';
|
||||
import './matchMedia.mock';
|
||||
import { DEFAULT_SEVERITY, DEFAULT_TARGET, PANEL_DEFAULTS, TriggerPanelCtrl } from '../triggers_panel_ctrl';
|
||||
import { CURRENT_SCHEMA_VERSION } from '../migrations';
|
||||
|
||||
jest.mock('@grafana/runtime', () => {
|
||||
return {
|
||||
getDataSourceSrv: () => ({
|
||||
getMetricSources: () => {
|
||||
return [{ meta: {id: 'alexanderzobnin-zabbix-datasource'}, value: {}, name: 'zabbix_default' }];
|
||||
return [{ meta: { id: 'alexanderzobnin-zabbix-datasource' }, value: {}, name: 'zabbix_default' }];
|
||||
},
|
||||
get: () => Promise.resolve({})
|
||||
}),
|
||||
};
|
||||
}, {virtual: true});
|
||||
}, { virtual: true });
|
||||
|
||||
describe('Triggers Panel schema migration', () => {
|
||||
let ctx: any = {};
|
||||
let updatePanelCtrl;
|
||||
|
||||
const timeoutMock = () => {};
|
||||
const timeoutMock = () => {
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
ctx = {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import _ from 'lodash';
|
||||
import { TriggerPanelCtrl } from '../triggers_panel_ctrl';
|
||||
import { PANEL_DEFAULTS, DEFAULT_TARGET } from '../triggers_panel_ctrl';
|
||||
import './matchMedia.mock';
|
||||
import { DEFAULT_TARGET, PANEL_DEFAULTS, TriggerPanelCtrl } from '../triggers_panel_ctrl';
|
||||
|
||||
let datasourceSrvMock, zabbixDSMock;
|
||||
|
||||
@@ -8,19 +8,21 @@ jest.mock('@grafana/runtime', () => {
|
||||
return {
|
||||
getDataSourceSrv: () => datasourceSrvMock,
|
||||
};
|
||||
}, {virtual: true});
|
||||
}, { virtual: true });
|
||||
|
||||
describe('TriggerPanelCtrl', () => {
|
||||
let ctx: any = {};
|
||||
let createPanelCtrl: () => any;
|
||||
|
||||
beforeEach(() => {
|
||||
ctx = { scope: {
|
||||
panel: {
|
||||
...PANEL_DEFAULTS,
|
||||
sortProblems: 'lastchange',
|
||||
ctx = {
|
||||
scope: {
|
||||
panel: {
|
||||
...PANEL_DEFAULTS,
|
||||
sortProblems: 'lastchange',
|
||||
}
|
||||
}
|
||||
}};
|
||||
};
|
||||
ctx.scope.panel.targets = [{
|
||||
...DEFAULT_TARGET,
|
||||
datasource: 'zabbix_default',
|
||||
@@ -43,10 +45,10 @@ describe('TriggerPanelCtrl', () => {
|
||||
ctx.panelCtrl = createPanelCtrl();
|
||||
|
||||
ctx.dataFramesReceived = generateDataFramesResponse([
|
||||
{id: "1", timestamp: "1510000010", severity: 5},
|
||||
{id: "2", timestamp: "1510000040", severity: 3},
|
||||
{id: "3", timestamp: "1510000020", severity: 4},
|
||||
{id: "4", timestamp: "1510000030", severity: 2},
|
||||
{ id: "1", timestamp: "1510000010", severity: 5 },
|
||||
{ id: "2", timestamp: "1510000040", severity: 3 },
|
||||
{ id: "3", timestamp: "1510000020", severity: 4 },
|
||||
{ id: "4", timestamp: "1510000030", severity: 2 },
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -68,7 +70,7 @@ describe('TriggerPanelCtrl', () => {
|
||||
|
||||
it('should format triggers', (done) => {
|
||||
ctx.panelCtrl.onDataFramesReceived(ctx.dataFramesReceived).then(() => {
|
||||
const formattedTrigger: any = _.find(ctx.panelCtrl.renderData, {triggerid: "1"});
|
||||
const formattedTrigger: any = _.find(ctx.panelCtrl.renderData, { triggerid: "1" });
|
||||
expect(formattedTrigger.host).toBe('backend01');
|
||||
expect(formattedTrigger.hostTechName).toBe('backend01_tech');
|
||||
expect(formattedTrigger.datasource).toBe('zabbix_default');
|
||||
@@ -167,7 +169,7 @@ const defaultProblem: any = {
|
||||
"value": "1"
|
||||
};
|
||||
|
||||
function generateDataFramesResponse(problemDescs: any[] = [{id: 1}]): any {
|
||||
function generateDataFramesResponse(problemDescs: any[] = [{ id: 1 }]): any {
|
||||
const problems = problemDescs.map(problem => generateProblem(problem.id, problem.timestamp, problem.severity));
|
||||
|
||||
return [
|
||||
@@ -205,5 +207,5 @@ function generateProblem(id, timestamp?, severity?): any {
|
||||
}
|
||||
|
||||
function getProblemById(id, ctx): any {
|
||||
return _.find(ctx.panelCtrl.renderData, {triggerid: id.toString()});
|
||||
return _.find(ctx.panelCtrl.renderData, { triggerid: id.toString() });
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
{"name": "Metric Editor", "path": "img/screenshot-metric_editor.png"},
|
||||
{"name": "Triggers", "path": "img/screenshot-triggers.png"}
|
||||
],
|
||||
"version": "4.1.4",
|
||||
"version": "4.2-pre",
|
||||
"updated": "2021-03-09"
|
||||
},
|
||||
|
||||
|
||||
@@ -215,6 +215,7 @@
|
||||
|
||||
.problem-details-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
padding-top: 4px;
|
||||
background-color: $problem-details-background;
|
||||
border-bottom-width: 0px;
|
||||
@@ -227,7 +228,7 @@
|
||||
transition-property: opacity, max-height;
|
||||
|
||||
&.show {
|
||||
max-height: 20rem;
|
||||
max-height: 32rem;
|
||||
opacity: 1;
|
||||
box-shadow: inset -3px 3px 10px $problem-container-shadow;
|
||||
}
|
||||
@@ -244,6 +245,15 @@
|
||||
}
|
||||
}
|
||||
|
||||
.problem-details-head {
|
||||
display: flex;
|
||||
padding: 0.5rem 1rem 0.5rem 1.2rem;
|
||||
}
|
||||
|
||||
.problem-details-body {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.problem-details {
|
||||
position: relative;
|
||||
flex: 10 1 auto;
|
||||
@@ -256,7 +266,8 @@
|
||||
|
||||
.problem-description {
|
||||
position: relative;
|
||||
height: 4.5rem;
|
||||
max-height: 4.5rem;
|
||||
min-height: 3rem;
|
||||
overflow: hidden;
|
||||
|
||||
&:after {
|
||||
@@ -358,7 +369,11 @@
|
||||
|
||||
.problem-actions {
|
||||
display: flex;
|
||||
margin-left: 1.6rem;
|
||||
margin-right: 1.6rem;
|
||||
}
|
||||
|
||||
.problem-action-button {
|
||||
margin-right: 0.2rem;
|
||||
}
|
||||
|
||||
.problem-actions-left {
|
||||
@@ -376,6 +391,7 @@
|
||||
}
|
||||
|
||||
.problem-ack-container {
|
||||
max-height: 8rem;
|
||||
margin-top: 0.6rem;
|
||||
margin-left: -0.6rem;
|
||||
padding: 1.2rem 0.6rem;
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
"noImplicitAny": false,
|
||||
"noUnusedLocals": false,
|
||||
"baseUrl": "./src",
|
||||
"strictFunctionTypes": false
|
||||
"strictFunctionTypes": false,
|
||||
"skipLibCheck": true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const path = require('path');
|
||||
const webpack = require('webpack');
|
||||
const CopyWebpackPlugin = require('copy-webpack-plugin');
|
||||
const CleanWebpackPlugin = require('clean-webpack-plugin');
|
||||
// const CleanWebpackPlugin = require('clean-webpack-plugin');
|
||||
const ExtractTextPlugin = require('extract-text-webpack-plugin');
|
||||
|
||||
const ExtractTextPluginLight = new ExtractTextPlugin('./css/grafana-zabbix.light.css');
|
||||
@@ -27,8 +27,29 @@ module.exports = {
|
||||
},
|
||||
externals: [
|
||||
// remove the line below if you don't want to use builtin versions
|
||||
'jquery', 'lodash', 'moment', 'angular', 'emotion',
|
||||
'react', 'react-dom', '@grafana/ui', '@grafana/data', '@grafana/runtime',
|
||||
'lodash',
|
||||
'jquery',
|
||||
'moment',
|
||||
'slate',
|
||||
'emotion',
|
||||
'@emotion/react',
|
||||
'@emotion/css',
|
||||
'prismjs',
|
||||
'slate-plain-serializer',
|
||||
'@grafana/slate-react',
|
||||
'react',
|
||||
'react-dom',
|
||||
'react-redux',
|
||||
'redux',
|
||||
'rxjs',
|
||||
'react-router-dom',
|
||||
'd3',
|
||||
'angular',
|
||||
'@grafana/ui',
|
||||
'@grafana/runtime',
|
||||
'@grafana/data',
|
||||
'monaco-editor',
|
||||
'react-monaco-editor',
|
||||
function (context, request, callback) {
|
||||
var prefix = 'grafana/';
|
||||
if (request.indexOf(prefix) === 0) {
|
||||
@@ -47,9 +68,6 @@ module.exports = {
|
||||
{ from: '../README.md' },
|
||||
{ from: '**/img/*' },
|
||||
]),
|
||||
new CleanWebpackPlugin(['dist'], {
|
||||
root: resolve('.')
|
||||
}),
|
||||
ExtractTextPluginLight,
|
||||
ExtractTextPluginDark,
|
||||
],
|
||||
@@ -59,21 +77,39 @@ module.exports = {
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.js$/,
|
||||
exclude: /(external)/,
|
||||
use: {
|
||||
loader: 'babel-loader',
|
||||
query: {
|
||||
presets: ['@babel/preset-env']
|
||||
}
|
||||
}
|
||||
test: /\.tsx?$/,
|
||||
loaders: [
|
||||
{
|
||||
loader: 'babel-loader',
|
||||
options: {
|
||||
presets: [['@babel/preset-env', { modules: false }]],
|
||||
plugins: ['angularjs-annotate'],
|
||||
sourceMaps: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
loader: 'ts-loader',
|
||||
options: {
|
||||
onlyCompileBundledFiles: true,
|
||||
transpileOnly: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
exclude: /(node_modules)/,
|
||||
},
|
||||
{
|
||||
test: /\.tsx?$/,
|
||||
exclude: /node_modules|external/,
|
||||
test: /\.jsx?$/,
|
||||
loaders: [
|
||||
"ts-loader"
|
||||
{
|
||||
loader: 'babel-loader',
|
||||
options: {
|
||||
presets: [['@babel/preset-env', { modules: false }]],
|
||||
plugins: ['angularjs-annotate'],
|
||||
sourceMaps: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
exclude: /(node_modules)/,
|
||||
},
|
||||
{
|
||||
test: /\.html$/,
|
||||
|
||||
Reference in New Issue
Block a user