diff --git a/.circleci/config.yml b/.circleci/config.yml index e631e0a8d33..a4bb2d67855 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -83,13 +83,14 @@ jobs: - checkout - run: 'go get -u github.com/alecthomas/gometalinter' - run: 'go get -u github.com/tsenart/deadcode' + - run: 'go get -u github.com/jgautheron/goconst/cmd/goconst' - run: 'go get -u github.com/gordonklaus/ineffassign' - run: 'go get -u github.com/opennota/check/cmd/structcheck' - run: 'go get -u github.com/mdempsky/unconvert' - run: 'go get -u github.com/opennota/check/cmd/varcheck' - run: name: run linters - command: 'gometalinter --enable-gc --vendor --deadline 10m --disable-all --enable=deadcode --enable=ineffassign --enable=structcheck --enable=unconvert --enable=varcheck ./...' + command: 'gometalinter --enable-gc --vendor --deadline 10m --disable-all --enable=deadcode --enable=goconst --enable=ineffassign --enable=structcheck --enable=unconvert --enable=varcheck ./...' - run: name: run go vet command: 'go vet ./pkg/...' @@ -157,14 +158,18 @@ jobs: name: sha-sum packages command: 'go run build.go sha-dist' - run: - name: Build Grafana.com publisher + name: Build Grafana.com master publisher command: 'go build -o scripts/publish scripts/build/publish.go' + - run: + name: Build Grafana.com release publisher + command: 'cd scripts/build/release_publisher && go build -o release_publisher .' - persist_to_workspace: root: . paths: - dist/grafana* - scripts/*.sh - scripts/publish + - scripts/build/release_publisher/release_publisher build: docker: @@ -298,8 +303,8 @@ jobs: name: deploy to s3 command: 'aws s3 sync ./dist s3://$BUCKET_NAME/release' - run: - name: Trigger Windows build - command: './scripts/trigger_windows_build.sh ${APPVEYOR_TOKEN} ${CIRCLE_SHA1} release' + name: Deploy to Grafana.com + command: './scripts/build/publish.sh' workflows: version: 2 diff --git a/.gitignore b/.gitignore index bf97948d178..20e8fffb3b1 100644 --- a/.gitignore +++ b/.gitignore @@ -40,8 +40,8 @@ public/css/*.min.css conf/custom.ini fig.yml -docker-compose.yml -docker-compose.yaml +devenv/docker-compose.yml +devenv/docker-compose.yaml /conf/provisioning/**/custom.yaml /conf/provisioning/**/dev.yaml /conf/ldap_dev.toml @@ -72,3 +72,6 @@ debug.test *.orig /devenv/bulk-dashboards/*.json +/devenv/bulk_alerting_dashboards/*.json + +/scripts/build/release_publisher/release_publisher diff --git a/CHANGELOG.md b/CHANGELOG.md index 5d32615ce97..95862a0727a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,23 +1,32 @@ -# 5.4.0 (unreleased) +# 5.3.0 (unreleased) + +# 5.3.0-beta3 (2018-10-03) + +* **Stackdriver**: Fix for missing ngInject [#13511](https://github.com/grafana/grafana/pull/13511) +* **Permissions**: Fix for broken permissions selector [#13507](https://github.com/grafana/grafana/issues/13507) +* **Alerting**: Alert reminders deduping not working as expected when running multiple Grafana instances [#13492](https://github.com/grafana/grafana/issues/13492) + +# 5.3.0-beta2 (2018-10-01) ### New Features * **Annotations**: Enable template variables in tagged annotations queries [#9735](https://github.com/grafana/grafana/issues/9735) +* **Stackdriver**: Support for Google Stackdriver Datasource [#13289](https://github.com/grafana/grafana/pull/13289) ### Minor +* **Provisioning**: Dashboard Provisioning now support symlinks that changes target [#12534](https://github.com/grafana/grafana/issues/12534), thx [@auhlig](https://github.com/auhlig) * **OAuth**: Allow oauth email attribute name to be configurable [#12986](https://github.com/grafana/grafana/issues/12986), thx [@bobmshannon](https://github.com/bobmshannon) * **Tags**: Default sort order for GetDashboardTags [#11681](https://github.com/grafana/grafana/pull/11681), thx [@Jonnymcc](https://github.com/Jonnymcc) * **Prometheus**: Label completion queries respect dashboard time range [#12251](https://github.com/grafana/grafana/pull/12251), thx [@mtanda](https://github.com/mtanda) * **Prometheus**: Allow to display annotations based on Prometheus series value [#10159](https://github.com/grafana/grafana/issues/10159), thx [@mtanda](https://github.com/mtanda) * **Prometheus**: Adhoc-filtering for Prometheus dashboards [#13212](https://github.com/grafana/grafana/issues/13212) * **Singlestat**: Fix gauge display accuracy for percents [#13270](https://github.com/grafana/grafana/issues/13270), thx [@tianon](https://github.com/tianon) - -# 5.3.0 (unreleased) - -### Minor - +* **Dashboard**: Prevent auto refresh from starting when loading dashboard with absolute time range [#12030](https://github.com/grafana/grafana/issues/12030) +* **Templating**: New templating variable type `Text box` that allows free text input [#3173](https://github.com/grafana/grafana/issues/3173) * **Alerting**: Link to view full size image in Microsoft Teams alert notifier [#13121](https://github.com/grafana/grafana/issues/13121), thx [@holiiveira](https://github.com/holiiveira) +* **Alerting**: Fixes a bug where all alerts would send reminders after upgrade & restart [#13402](https://github.com/grafana/grafana/pull/13402) +* **Alerting**: Concurrent render limit for graphs used in notifications [#13401](https://github.com/grafana/grafana/pull/13401) * **Postgres/MySQL/MSSQL**: Add support for replacing $__interval and $__interval_ms in alert queries [#11555](https://github.com/grafana/grafana/issues/11555), thx [@svenklemm](https://github.com/svenklemm) # 5.3.0-beta1 (2018-09-06) @@ -318,7 +327,7 @@ See [security announcement](https://community.grafana.com/t/grafana-5-2-3-and-4- * **Dashboard**: Sizing and positioning of settings menu icons [#11572](https://github.com/grafana/grafana/pull/11572) * **Dashboard**: Add search filter/tabs to new panel control [#10427](https://github.com/grafana/grafana/issues/10427) * **Folders**: User with org viewer role should not be able to save/move dashboards in/to general folder [#11553](https://github.com/grafana/grafana/issues/11553) -* **Influxdb**: Dont assume the first column in table response is time. [#11476](https://github.com/grafana/grafana/issues/11476), thx [@hahnjo](https://github.com/hahnjo) +* **Influxdb**: Don't assume the first column in table response is time. [#11476](https://github.com/grafana/grafana/issues/11476), thx [@hahnjo](https://github.com/hahnjo) ### Tech * Backend code simplification [#11613](https://github.com/grafana/grafana/pull/11613), thx [@knweiss](https://github.com/knweiss) @@ -505,7 +514,7 @@ See [security announcement](https://community.grafana.com/t/grafana-5-2-3-and-4- # 4.6.2 (2017-11-16) ## Important -* **Prometheus**: Fixes bug with new prometheus alerts in Grafana. Make sure to download this version if your using Prometheus for alerting. More details in the issue. [#9777](https://github.com/grafana/grafana/issues/9777) +* **Prometheus**: Fixes bug with new prometheus alerts in Grafana. Make sure to download this version if you're using Prometheus for alerting. More details in the issue. [#9777](https://github.com/grafana/grafana/issues/9777) ## Fixes * **Color picker**: Bug after using textbox input field to change/paste color string [#9769](https://github.com/grafana/grafana/issues/9769) @@ -1464,7 +1473,7 @@ Grafana 2.x is fundamentally different from 1.x; it now ships with an integrated **New features** - [Issue #1623](https://github.com/grafana/grafana/issues/1623). Share Dashboard: Dashboard snapshot sharing (dash and data snapshot), save to local or save to public snapshot dashboard snapshots.raintank.io site -- [Issue #1622](https://github.com/grafana/grafana/issues/1622). Share Panel: The share modal now has an embed option, gives you an iframe that you can use to embedd a single graph on another web site +- [Issue #1622](https://github.com/grafana/grafana/issues/1622). Share Panel: The share modal now has an embed option, gives you an iframe that you can use to embed a single graph on another web site - [Issue #718](https://github.com/grafana/grafana/issues/718). Dashboard: When saving a dashboard and another user has made changes in between the user is prompted with a warning if he really wants to overwrite the other's changes - [Issue #1331](https://github.com/grafana/grafana/issues/1331). Graph & Singlestat: New axis/unit format selector and more units (kbytes, Joule, Watt, eV), and new design for graph axis & grid tab and single stat options tab views - [Issue #1241](https://github.com/grafana/grafana/issues/1242). Timepicker: New option in timepicker (under dashboard settings), to change ``now`` to be for example ``now-1m``, useful when you want to ignore last minute because it contains incomplete data diff --git a/Gopkg.lock b/Gopkg.lock index bd247d691dd..041f784f770 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -19,6 +19,12 @@ packages = ["."] revision = "7677a1d7c1137cd3dd5ba7a076d0c898a1ef4520" +[[projects]] + branch = "master" + name = "github.com/VividCortex/mysqlerr" + packages = ["."] + revision = "6c6b55f8796f578c870b7e19bafb16103bc40095" + [[projects]] name = "github.com/aws/aws-sdk-go" packages = [ @@ -673,6 +679,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "81a37e747b875cf870c1b9486fa3147e704dea7db8ba86f7cb942d3ddc01d3e3" + inputs-digest = "6e9458f912a5f0eb3430b968f1b4dbc4e3b7671b282cf4fe1573419a6d9ba0d4" solver-name = "gps-cdcl" solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml index 6c91ec37221..c5b4b31cb32 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -203,3 +203,7 @@ ignored = [ [[constraint]] name = "github.com/denisenkom/go-mssqldb" revision = "270bc3860bb94dd3a3ffd047377d746c5e276726" + +[[constraint]] + name = "github.com/VividCortex/mysqlerr" + branch = "master" diff --git a/Gruntfile.js b/Gruntfile.js index 8a71fb44148..2d5990b5f58 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -25,7 +25,6 @@ module.exports = function (grunt) { } } - config.coverage = grunt.option('coverage'); config.phjs = grunt.option('phjsToRelease'); config.pkg.version = grunt.option('pkgVer') || config.pkg.version; diff --git a/build.go b/build.go index 561dd70df0e..69fbf3bada8 100644 --- a/build.go +++ b/build.go @@ -22,6 +22,11 @@ import ( "time" ) +const ( + windows = "windows" + linux = "linux" +) + var ( //versionRe = regexp.MustCompile(`-[0-9]{1,3}-g[0-9a-f]{5,10}`) goarch string @@ -110,17 +115,16 @@ func main() { case "package": grunt(gruntBuildArg("build")...) grunt(gruntBuildArg("package")...) - if goos == "linux" { + if goos == linux { createLinuxPackages() } case "package-only": grunt(gruntBuildArg("package")...) - if goos == "linux" { + if goos == linux { createLinuxPackages() } - case "pkg-rpm": grunt(gruntBuildArg("release")...) createRpmPackages() @@ -379,7 +383,7 @@ func ensureGoPath() { } func grunt(params ...string) { - if runtime.GOOS == "windows" { + if runtime.GOOS == windows { runPrint(`.\node_modules\.bin\grunt`, params...) } else { runPrint("./node_modules/.bin/grunt", params...) @@ -417,11 +421,11 @@ func test(pkg string) { func build(binaryName, pkg string, tags []string) { binary := fmt.Sprintf("./bin/%s-%s/%s", goos, goarch, binaryName) if isDev { - //dont include os and arch in output path in dev environment + //don't include os and arch in output path in dev environment binary = fmt.Sprintf("./bin/%s", binaryName) } - if goos == "windows" { + if goos == windows { binary += ".exe" } @@ -485,11 +489,11 @@ func clean() { func setBuildEnv() { os.Setenv("GOOS", goos) - if goos == "windows" { + if goos == windows { // require windows >=7 os.Setenv("CGO_CFLAGS", "-D_WIN32_WINNT=0x0601") } - if goarch != "amd64" || goos != "linux" { + if goarch != "amd64" || goos != linux { // needed for all other archs cgo = true } diff --git a/codecov.yml b/codecov.yml deleted file mode 100644 index b2a839365ac..00000000000 --- a/codecov.yml +++ /dev/null @@ -1,11 +0,0 @@ -coverage: - precision: 2 - round: down - range: "50...100" - - status: - project: yes - patch: yes - changes: no - -comment: off diff --git a/conf/defaults.ini b/conf/defaults.ini index 15b8927e65a..eb8debc0094 100644 --- a/conf/defaults.ini +++ b/conf/defaults.ini @@ -474,6 +474,10 @@ error_or_timeout = alerting # Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok) nodata_or_nullvalues = no_data +# Alert notifications can include images, but rendering many images at the same time can overload the server +# This limit will protect the server from render overloading and make sure notifications are sent out quickly +concurrent_render_limit = 5 + #################################### Explore ############################# [explore] # Enable the Explore section diff --git a/conf/sample.ini b/conf/sample.ini index 2ef254f79b9..e6a03718d19 100644 --- a/conf/sample.ini +++ b/conf/sample.ini @@ -393,6 +393,10 @@ log_queries = # Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok) ;nodata_or_nullvalues = no_data +# Alert notifications can include images, but rendering many images at the same time can overload the server +# This limit will protect the server from render overloading and make sure notifications are sent out quickly +;concurrent_render_limit = 5 + #################################### Explore ############################# [explore] # Enable the Explore section @@ -431,7 +435,7 @@ log_queries = ;sampler_param = 1 #################################### Grafana.com integration ########################## -# Url used to to import dashboards directly from Grafana.com +# Url used to import dashboards directly from Grafana.com [grafana_com] ;url = https://grafana.com diff --git a/devenv/bulk_alerting_dashboards/bulk_alerting_dashboards.yaml b/devenv/bulk_alerting_dashboards/bulk_alerting_dashboards.yaml new file mode 100644 index 00000000000..1ede5dcd30a --- /dev/null +++ b/devenv/bulk_alerting_dashboards/bulk_alerting_dashboards.yaml @@ -0,0 +1,9 @@ +apiVersion: 1 + +providers: + - name: 'Bulk alerting dashboards' + folder: 'Bulk alerting dashboards' + type: file + options: + path: devenv/bulk_alerting_dashboards + diff --git a/devenv/bulk_alerting_dashboards/bulkdash_alerting.jsonnet b/devenv/bulk_alerting_dashboards/bulkdash_alerting.jsonnet new file mode 100644 index 00000000000..a7acd57745d --- /dev/null +++ b/devenv/bulk_alerting_dashboards/bulkdash_alerting.jsonnet @@ -0,0 +1,168 @@ +{ + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "id": null, + "links": [], + "panels": [ + { + "alert": { + "conditions": [ + { + "evaluator": { + "params": [ + 65 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "A", + "5m", + "now" + ] + }, + "reducer": { + "params": [], + "type": "avg" + }, + "type": "query" + } + ], + "executionErrorState": "alerting", + "frequency": "10s", + "handler": 1, + "name": "bulk alerting", + "noDataState": "no_data", + "notifications": [] + }, + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-prometheus", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "$$hashKey": "object:117", + "expr": "go_goroutines", + "format": "time_series", + "intervalFactor": 1, + "refId": "A" + } + ], + "thresholds": [ + { + "colorMode": "critical", + "fill": true, + "line": true, + "op": "gt", + "value": 50 + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + } + ], + "schemaVersion": 16, + "style": "dark", + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "New dashboard", + "uid": null, + "version": 0 +} \ No newline at end of file diff --git a/devenv/dev-dashboards/panel_tests_slow_queries_and_annotations.json b/devenv/dev-dashboards/panel_tests_slow_queries_and_annotations.json new file mode 100644 index 00000000000..08bf6dce9d0 --- /dev/null +++ b/devenv/dev-dashboards/panel_tests_slow_queries_and_annotations.json @@ -0,0 +1,1166 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + }, + { + "datasource": "-- Grafana --", + "enable": true, + "hide": false, + "iconColor": "rgba(255, 96, 96, 1)", + "limit": 100, + "matchAny": false, + "name": "annotations", + "showIn": 0, + "tags": [ + "asd" + ], + "type": "tags" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 7, + "w": 13, + "x": 0, + "y": 0 + }, + "id": 6, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "30s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 7, + "w": 11, + "x": 13, + "y": 0 + }, + "id": 7, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "30s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 7 + }, + "id": 8, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "30s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 7 + }, + "id": 18, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "30s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 7 + }, + "id": 17, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "30s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 0, + "y": 14 + }, + "id": 10, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 8, + "y": 14 + }, + "id": 9, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 16, + "y": 14 + }, + "id": 11, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 0, + "y": 19 + }, + "id": 14, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 8, + "y": 19 + }, + "id": 15, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 16, + "y": 19 + }, + "id": 12, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 6, + "w": 16, + "x": 0, + "y": 24 + }, + "id": 13, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 6, + "w": 8, + "x": 16, + "y": 24 + }, + "id": 16, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "schemaVersion": 16, + "style": "dark", + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "Panel tests - Slow Queries & Annotations", + "uid": "xtY_uCAiz", + "version": 11 +} diff --git a/devenv/docker/blocks/apache_proxy/docker-compose.yaml b/devenv/docker/blocks/apache_proxy/docker-compose.yaml new file mode 100644 index 00000000000..3791213f05a --- /dev/null +++ b/devenv/docker/blocks/apache_proxy/docker-compose.yaml @@ -0,0 +1,9 @@ +# This will proxy all requests for http://localhost:10081/grafana/ to +# http://localhost:3000 (Grafana running locally) +# +# Please note that you'll need to change the root_url in the Grafana configuration: +# root_url = %(protocol)s://%(domain)s:10081/grafana/ + + apacheproxy: + build: docker/blocks/apache_proxy + network_mode: host diff --git a/devenv/docker/blocks/collectd/docker-compose.yaml b/devenv/docker/blocks/collectd/docker-compose.yaml new file mode 100644 index 00000000000..c5e189b58d8 --- /dev/null +++ b/devenv/docker/blocks/collectd/docker-compose.yaml @@ -0,0 +1,11 @@ + collectd: + build: docker/blocks/collectd + environment: + HOST_NAME: myserver + GRAPHITE_HOST: graphite + GRAPHITE_PORT: 2003 + GRAPHITE_PREFIX: collectd. + REPORT_BY_CPU: 'false' + COLLECT_INTERVAL: 10 + links: + - graphite diff --git a/devenv/docker/blocks/elastic/docker-compose.yaml b/devenv/docker/blocks/elastic/docker-compose.yaml new file mode 100644 index 00000000000..2eba60f38be --- /dev/null +++ b/devenv/docker/blocks/elastic/docker-compose.yaml @@ -0,0 +1,15 @@ + elasticsearch: + image: elasticsearch:2.4.1 + command: elasticsearch -Des.network.host=0.0.0.0 + ports: + - "9200:9200" + - "9300:9300" + volumes: + - ./blocks/elastic/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml + + fake-elastic-data: + image: grafana/fake-data-gen + network_mode: bridge + environment: + FD_DATASOURCE: elasticsearch + FD_PORT: 9200 diff --git a/devenv/docker/blocks/elastic1/docker-compose.yaml b/devenv/docker/blocks/elastic1/docker-compose.yaml new file mode 100644 index 00000000000..518ae76e6ee --- /dev/null +++ b/devenv/docker/blocks/elastic1/docker-compose.yaml @@ -0,0 +1,8 @@ + elasticsearch1: + image: elasticsearch:1.7.6 + command: elasticsearch -Des.network.host=0.0.0.0 + ports: + - "11200:9200" + - "11300:9300" + volumes: + - ./blocks/elastic/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml diff --git a/devenv/docker/blocks/elastic5/docker-compose.yaml b/devenv/docker/blocks/elastic5/docker-compose.yaml new file mode 100644 index 00000000000..7148aa18c42 --- /dev/null +++ b/devenv/docker/blocks/elastic5/docker-compose.yaml @@ -0,0 +1,15 @@ +# You need to run 'sysctl -w vm.max_map_count=262144' on the host machine + + elasticsearch5: + image: elasticsearch:5 + command: elasticsearch + ports: + - "10200:9200" + - "10300:9300" + + fake-elastic5-data: + image: grafana/fake-data-gen + network_mode: bridge + environment: + FD_DATASOURCE: elasticsearch + FD_PORT: 10200 diff --git a/devenv/docker/blocks/elastic6/docker-compose.yaml b/devenv/docker/blocks/elastic6/docker-compose.yaml new file mode 100644 index 00000000000..dd2439f88e4 --- /dev/null +++ b/devenv/docker/blocks/elastic6/docker-compose.yaml @@ -0,0 +1,15 @@ +# You need to run 'sysctl -w vm.max_map_count=262144' on the host machine + + elasticsearch6: + image: docker.elastic.co/elasticsearch/elasticsearch-oss:6.2.4 + command: elasticsearch + ports: + - "11200:9200" + - "11300:9300" + + fake-elastic6-data: + image: grafana/fake-data-gen + network_mode: bridge + environment: + FD_DATASOURCE: elasticsearch6 + FD_PORT: 11200 diff --git a/devenv/docker/blocks/graphite/docker-compose.yaml b/devenv/docker/blocks/graphite/docker-compose.yaml new file mode 100644 index 00000000000..acebd2bd9c0 --- /dev/null +++ b/devenv/docker/blocks/graphite/docker-compose.yaml @@ -0,0 +1,16 @@ + graphite09: + build: docker/blocks/graphite + ports: + - "8080:80" + - "2003:2003" + volumes: + - /etc/localtime:/etc/localtime:ro + - /etc/timezone:/etc/timezone:ro + + fake-graphite-data: + image: grafana/fake-data-gen + network_mode: bridge + environment: + FD_DATASOURCE: graphite + FD_PORT: 2003 + diff --git a/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/aggregation-rules.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/aggregation-rules.conf index c9520124a2a..792bbfd6857 100644 --- a/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/aggregation-rules.conf +++ b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/aggregation-rules.conf @@ -8,7 +8,7 @@ # 'avg'. The name of the aggregate metric will be derived from # 'output_template' filling in any captured fields from 'input_pattern'. # -# For example, if you're metric naming scheme is: +# For example, if your metric naming scheme is: # # .applications... # diff --git a/devenv/docker/blocks/graphite1/docker-compose.yaml b/devenv/docker/blocks/graphite1/docker-compose.yaml new file mode 100644 index 00000000000..1fa3e738ba8 --- /dev/null +++ b/devenv/docker/blocks/graphite1/docker-compose.yaml @@ -0,0 +1,21 @@ + graphite: + build: + context: docker/blocks/graphite1 + args: + version: master + ports: + - "8080:80" + - "2003:2003" + - "8125:8125/udp" + - "8126:8126" + volumes: + - /etc/localtime:/etc/localtime:ro + - /etc/timezone:/etc/timezone:ro + + fake-graphite-data: + image: grafana/fake-data-gen + network_mode: bridge + environment: + FD_DATASOURCE: graphite + FD_PORT: 2003 + diff --git a/devenv/docker/blocks/graphite11/docker-compose.yaml b/devenv/docker/blocks/graphite11/docker-compose.yaml new file mode 100644 index 00000000000..4b0d837a619 --- /dev/null +++ b/devenv/docker/blocks/graphite11/docker-compose.yaml @@ -0,0 +1,18 @@ + graphite11: + image: graphiteapp/graphite-statsd + ports: + - "8180:80" + - "2103-2104:2003-2004" + - "2123-2124:2023-2024" + - "8225:8125/udp" + - "8226:8126" + + fake-graphite11-data: + image: grafana/fake-data-gen + network_mode: bridge + environment: + FD_DATASOURCE: graphite + FD_PORT: 2103 + FD_GRAPHITE_VERSION: 1.1 + depends_on: + - graphite11 \ No newline at end of file diff --git a/devenv/docker/blocks/influxdb/docker-compose.yaml b/devenv/docker/blocks/influxdb/docker-compose.yaml new file mode 100644 index 00000000000..3434f5d09b9 --- /dev/null +++ b/devenv/docker/blocks/influxdb/docker-compose.yaml @@ -0,0 +1,17 @@ + influxdb: + image: influxdb:latest + container_name: influxdb + ports: + - "2004:2004" + - "8083:8083" + - "8086:8086" + volumes: + - ./blocks/influxdb/influxdb.conf:/etc/influxdb/influxdb.conf + + fake-influxdb-data: + image: grafana/fake-data-gen + network_mode: bridge + environment: + FD_DATASOURCE: influxdb + FD_PORT: 8086 + diff --git a/devenv/docker/blocks/jaeger/docker-compose.yaml b/devenv/docker/blocks/jaeger/docker-compose.yaml new file mode 100644 index 00000000000..2b57c863425 --- /dev/null +++ b/devenv/docker/blocks/jaeger/docker-compose.yaml @@ -0,0 +1,6 @@ + jaeger: + image: jaegertracing/all-in-one:latest + ports: + - "127.0.0.1:6831:6831/udp" + - "16686:16686" + diff --git a/devenv/docker/blocks/memcached/docker-compose.yaml b/devenv/docker/blocks/memcached/docker-compose.yaml new file mode 100644 index 00000000000..b3201da0f95 --- /dev/null +++ b/devenv/docker/blocks/memcached/docker-compose.yaml @@ -0,0 +1,5 @@ + memcached: + image: memcached:latest + ports: + - "11211:11211" + diff --git a/devenv/docker/blocks/mssql/docker-compose.yaml b/devenv/docker/blocks/mssql/docker-compose.yaml new file mode 100644 index 00000000000..05a93629e73 --- /dev/null +++ b/devenv/docker/blocks/mssql/docker-compose.yaml @@ -0,0 +1,19 @@ + mssql: + build: + context: docker/blocks/mssql/build + environment: + ACCEPT_EULA: Y + MSSQL_SA_PASSWORD: Password! + MSSQL_PID: Developer + MSSQL_DATABASE: grafana + MSSQL_USER: grafana + MSSQL_PASSWORD: Password! + ports: + - "1433:1433" + + fake-mssql-data: + image: grafana/fake-data-gen + network_mode: bridge + environment: + FD_DATASOURCE: mssql + FD_PORT: 1433 \ No newline at end of file diff --git a/devenv/docker/blocks/mssql_tests/docker-compose.yaml b/devenv/docker/blocks/mssql_tests/docker-compose.yaml new file mode 100644 index 00000000000..eea4d1e3561 --- /dev/null +++ b/devenv/docker/blocks/mssql_tests/docker-compose.yaml @@ -0,0 +1,12 @@ + mssqltests: + build: + context: docker/blocks/mssql/build + environment: + ACCEPT_EULA: Y + MSSQL_SA_PASSWORD: Password! + MSSQL_PID: Express + MSSQL_DATABASE: grafanatest + MSSQL_USER: grafana + MSSQL_PASSWORD: Password! + ports: + - "1433:1433" \ No newline at end of file diff --git a/devenv/docker/blocks/mysql/docker-compose.yaml b/devenv/docker/blocks/mysql/docker-compose.yaml new file mode 100644 index 00000000000..381b04a53c8 --- /dev/null +++ b/devenv/docker/blocks/mysql/docker-compose.yaml @@ -0,0 +1,18 @@ + mysql: + image: mysql:5.6 + environment: + MYSQL_ROOT_PASSWORD: rootpass + MYSQL_DATABASE: grafana + MYSQL_USER: grafana + MYSQL_PASSWORD: password + ports: + - "3306:3306" + command: [mysqld, --character-set-server=utf8mb4, --collation-server=utf8mb4_unicode_ci, --innodb_monitor_enable=all] + + fake-mysql-data: + image: grafana/fake-data-gen + network_mode: bridge + environment: + FD_DATASOURCE: mysql + FD_PORT: 3306 + diff --git a/devenv/docker/blocks/mysql_opendata/docker-compose.yaml b/devenv/docker/blocks/mysql_opendata/docker-compose.yaml new file mode 100644 index 00000000000..4d478ee0860 --- /dev/null +++ b/devenv/docker/blocks/mysql_opendata/docker-compose.yaml @@ -0,0 +1,9 @@ + mysql_opendata: + build: docker/blocks/mysql_opendata + environment: + MYSQL_ROOT_PASSWORD: rootpass + MYSQL_DATABASE: testdata + MYSQL_USER: grafana + MYSQL_PASSWORD: password + ports: + - "3307:3306" diff --git a/devenv/docker/blocks/mysql_tests/docker-compose.yaml b/devenv/docker/blocks/mysql_tests/docker-compose.yaml new file mode 100644 index 00000000000..a7509d47880 --- /dev/null +++ b/devenv/docker/blocks/mysql_tests/docker-compose.yaml @@ -0,0 +1,11 @@ + mysqltests: + build: + context: docker/blocks/mysql_tests + environment: + MYSQL_ROOT_PASSWORD: rootpass + MYSQL_DATABASE: grafana_tests + MYSQL_USER: grafana + MYSQL_PASSWORD: password + ports: + - "3306:3306" + tmpfs: /var/lib/mysql:rw diff --git a/devenv/docker/blocks/nginx_proxy/docker-compose.yaml b/devenv/docker/blocks/nginx_proxy/docker-compose.yaml new file mode 100644 index 00000000000..aefd7226f36 --- /dev/null +++ b/devenv/docker/blocks/nginx_proxy/docker-compose.yaml @@ -0,0 +1,9 @@ +# This will proxy all requests for http://localhost:10080/grafana/ to +# http://localhost:3000 (Grafana running locally) +# +# Please note that you'll need to change the root_url in the Grafana configuration: +# root_url = %(protocol)s://%(domain)s:10080/grafana/ + + nginxproxy: + build: docker/blocks/nginx_proxy + network_mode: host diff --git a/devenv/docker/blocks/openldap/docker-compose.yaml b/devenv/docker/blocks/openldap/docker-compose.yaml new file mode 100644 index 00000000000..d11858ccfb9 --- /dev/null +++ b/devenv/docker/blocks/openldap/docker-compose.yaml @@ -0,0 +1,10 @@ + openldap: + build: docker/blocks/openldap + environment: + SLAPD_PASSWORD: grafana + SLAPD_DOMAIN: grafana.org + SLAPD_ADDITIONAL_MODULES: memberof + ports: + - "389:389" + + diff --git a/devenv/docker/blocks/opentsdb/docker-compose.yaml b/devenv/docker/blocks/opentsdb/docker-compose.yaml new file mode 100644 index 00000000000..ee064bb107d --- /dev/null +++ b/devenv/docker/blocks/opentsdb/docker-compose.yaml @@ -0,0 +1,11 @@ + opentsdb: + image: opower/opentsdb:latest + ports: + - "4242:4242" + + fake-opentsdb-data: + image: grafana/fake-data-gen + network_mode: bridge + environment: + FD_DATASOURCE: opentsdb + diff --git a/devenv/docker/blocks/postgres/docker-compose.yaml b/devenv/docker/blocks/postgres/docker-compose.yaml new file mode 100644 index 00000000000..27736042f7b --- /dev/null +++ b/devenv/docker/blocks/postgres/docker-compose.yaml @@ -0,0 +1,16 @@ + postgrestest: + image: postgres:9.3 + environment: + POSTGRES_USER: grafana + POSTGRES_PASSWORD: password + POSTGRES_DATABASE: grafana + ports: + - "5432:5432" + command: postgres -c log_connections=on -c logging_collector=on -c log_destination=stderr -c log_directory=/var/log/postgresql + + fake-postgres-data: + image: grafana/fake-data-gen + network_mode: bridge + environment: + FD_DATASOURCE: postgres + FD_PORT: 5432 diff --git a/devenv/docker/blocks/postgres_tests/docker-compose.yaml b/devenv/docker/blocks/postgres_tests/docker-compose.yaml new file mode 100644 index 00000000000..7e6da7d8517 --- /dev/null +++ b/devenv/docker/blocks/postgres_tests/docker-compose.yaml @@ -0,0 +1,9 @@ + postgrestest: + build: + context: docker/blocks/postgres_tests + environment: + POSTGRES_USER: grafanatest + POSTGRES_PASSWORD: grafanatest + ports: + - "5432:5432" + tmpfs: /var/lib/postgresql/data:rw \ No newline at end of file diff --git a/devenv/docker/blocks/prometheus/docker-compose.yaml b/devenv/docker/blocks/prometheus/docker-compose.yaml new file mode 100644 index 00000000000..db778060dde --- /dev/null +++ b/devenv/docker/blocks/prometheus/docker-compose.yaml @@ -0,0 +1,31 @@ + prometheus: + build: docker/blocks/prometheus + network_mode: host + ports: + - "9090:9090" + + node_exporter: + image: prom/node-exporter + network_mode: host + ports: + - "9100:9100" + + fake-prometheus-data: + image: grafana/fake-data-gen + network_mode: host + ports: + - "9091:9091" + environment: + FD_DATASOURCE: prom + + alertmanager: + image: quay.io/prometheus/alertmanager + network_mode: host + ports: + - "9093:9093" + + prometheus-random-data: + build: docker/blocks/prometheus_random_data + network_mode: host + ports: + - "8081:8080" diff --git a/devenv/docker/blocks/prometheus2/docker-compose.yaml b/devenv/docker/blocks/prometheus2/docker-compose.yaml new file mode 100644 index 00000000000..d586b4b5742 --- /dev/null +++ b/devenv/docker/blocks/prometheus2/docker-compose.yaml @@ -0,0 +1,31 @@ + prometheus: + build: docker/blocks/prometheus2 + network_mode: host + ports: + - "9090:9090" + + node_exporter: + image: prom/node-exporter + network_mode: host + ports: + - "9100:9100" + + fake-prometheus-data: + image: grafana/fake-data-gen + network_mode: host + ports: + - "9091:9091" + environment: + FD_DATASOURCE: prom + + alertmanager: + image: quay.io/prometheus/alertmanager + network_mode: host + ports: + - "9093:9093" + + prometheus-random-data: + build: docker/blocks/prometheus_random_data + network_mode: host + ports: + - "8081:8080" diff --git a/devenv/docker/blocks/prometheus_mac/docker-compose.yaml b/devenv/docker/blocks/prometheus_mac/docker-compose.yaml new file mode 100644 index 00000000000..b73d278fae2 --- /dev/null +++ b/devenv/docker/blocks/prometheus_mac/docker-compose.yaml @@ -0,0 +1,26 @@ + prometheus: + build: docker/blocks/prometheus_mac + ports: + - "9090:9090" + + node_exporter: + image: prom/node-exporter + ports: + - "9100:9100" + + fake-prometheus-data: + image: grafana/fake-data-gen + ports: + - "9091:9091" + environment: + FD_DATASOURCE: prom + + alertmanager: + image: quay.io/prometheus/alertmanager + ports: + - "9093:9093" + + prometheus-random-data: + build: docker/blocks/prometheus_random_data + ports: + - "8081:8080" diff --git a/devenv/docker/blocks/smtp/docker-compose.yaml b/devenv/docker/blocks/smtp/docker-compose.yaml new file mode 100644 index 00000000000..85d598b6167 --- /dev/null +++ b/devenv/docker/blocks/smtp/docker-compose.yaml @@ -0,0 +1,4 @@ + snmpd: + image: namshi/smtp + ports: + - "25:25" diff --git a/devenv/docker/ha_test/.gitignore b/devenv/docker/ha_test/.gitignore new file mode 100644 index 00000000000..0f4e139e204 --- /dev/null +++ b/devenv/docker/ha_test/.gitignore @@ -0,0 +1 @@ +grafana/provisioning/dashboards/alerts/alert-* \ No newline at end of file diff --git a/devenv/docker/ha_test/README.md b/devenv/docker/ha_test/README.md new file mode 100644 index 00000000000..bc93727ceae --- /dev/null +++ b/devenv/docker/ha_test/README.md @@ -0,0 +1,137 @@ +# Grafana High Availability (HA) test setup + +A set of docker compose services which together creates a Grafana HA test setup with capability of easily +scaling up/down number of Grafana instances. + +Included services + +* Grafana +* Mysql - Grafana configuration database and session storage +* Prometheus - Monitoring of Grafana and used as datasource of provisioned alert rules +* Nginx - Reverse proxy for Grafana and Prometheus. Enables browsing Grafana/Prometheus UI using a hostname + +## Prerequisites + +### Build grafana docker container + +Build a Grafana docker container from current branch and commit and tag it as grafana/grafana:dev. + +```bash +$ cd +$ make build-docker-full +``` + +### Virtual host names + +#### Alternative 1 - Use dnsmasq + +```bash +$ sudo apt-get install dnsmasq +$ echo 'address=/loc/127.0.0.1' | sudo tee /etc/dnsmasq.d/dnsmasq-loc.conf > /dev/null +$ sudo /etc/init.d/dnsmasq restart +$ ping whatever.loc +PING whatever.loc (127.0.0.1) 56(84) bytes of data. +64 bytes from localhost (127.0.0.1): icmp_seq=1 ttl=64 time=0.076 ms +--- whatever.loc ping statistics --- +1 packet transmitted, 1 received, 0% packet loss, time 1998ms +``` + +#### Alternative 2 - Manually update /etc/hosts + +Update your `/etc/hosts` to be able to access Grafana and/or Prometheus UI using a hostname. + +```bash +$ cat /etc/hosts +127.0.0.1 grafana.loc +127.0.0.1 prometheus.loc +``` + +## Start services + +```bash +$ docker-compose up -d +``` + +Browse +* http://grafana.loc/ +* http://prometheus.loc/ + +Check for any errors + +```bash +$ docker-compose logs | grep error +``` + +### Scale Grafana instances up/down + +Scale number of Grafana instances to `` + +```bash +$ docker-compose up --scale grafana= -d +# for example 3 instances +$ docker-compose up --scale grafana=3 -d +``` + +## Test alerting + +### Create notification channels + +Creates default notification channels, if not already exists + +```bash +$ ./alerts.sh setup +``` + +### Slack notifications + +Disable + +```bash +$ ./alerts.sh slack -d +``` + +Enable and configure url + +```bash +$ ./alerts.sh slack -u https://hooks.slack.com/services/... +``` + +Enable, configure url and enable reminders + +```bash +$ ./alerts.sh slack -u https://hooks.slack.com/services/... -r -e 10m +``` + +### Provision alert dashboards with alert rules + +Provision 1 dashboard/alert rule (default) + +```bash +$ ./alerts.sh provision +``` + +Provision 10 dashboards/alert rules + +```bash +$ ./alerts.sh provision -a 10 +``` + +Provision 10 dashboards/alert rules and change condition to `gt > 100` + +```bash +$ ./alerts.sh provision -a 10 -c 100 +``` + +### Pause/unpause all alert rules + +Pause + +```bash +$ ./alerts.sh pause +``` + +Unpause + +```bash +$ ./alerts.sh unpause +``` diff --git a/devenv/docker/ha_test/alerts.sh b/devenv/docker/ha_test/alerts.sh new file mode 100755 index 00000000000..a05a4581739 --- /dev/null +++ b/devenv/docker/ha_test/alerts.sh @@ -0,0 +1,156 @@ +#!/bin/bash + +requiresJsonnet() { + if ! type "jsonnet" > /dev/null; then + echo "you need you install jsonnet to run this script" + echo "follow the instructions on https://github.com/google/jsonnet" + exit 1 + fi +} + +setup() { + STATUS=$(curl -s -o /dev/null -w '%{http_code}' http://admin:admin@grafana.loc/api/alert-notifications/1) + if [ $STATUS -eq 200 ]; then + echo "Email already exists, skipping..." + else + curl -H "Content-Type: application/json" \ + -d '{ + "name": "Email", + "type": "email", + "isDefault": false, + "sendReminder": false, + "uploadImage": true, + "settings": { + "addresses": "user@test.com" + } + }' \ + http://admin:admin@grafana.loc/api/alert-notifications + fi + + STATUS=$(curl -s -o /dev/null -w '%{http_code}' http://admin:admin@grafana.loc/api/alert-notifications/2) + if [ $STATUS -eq 200 ]; then + echo "Slack already exists, skipping..." + else + curl -H "Content-Type: application/json" \ + -d '{ + "name": "Slack", + "type": "slack", + "isDefault": false, + "sendReminder": false, + "uploadImage": true + }' \ + http://admin:admin@grafana.loc/api/alert-notifications + fi +} + +slack() { + enabled=true + url='' + remind=false + remindEvery='10m' + + while getopts ":e:u:dr" o; do + case "${o}" in + e) + remindEvery=${OPTARG} + ;; + u) + url=${OPTARG} + ;; + d) + enabled=false + ;; + r) + remind=true + ;; + esac + done + shift $((OPTIND-1)) + + curl -X PUT \ + -H "Content-Type: application/json" \ + -d '{ + "id": 2, + "name": "Slack", + "type": "slack", + "isDefault": '$enabled', + "sendReminder": '$remind', + "frequency": "'$remindEvery'", + "uploadImage": true, + "settings": { + "url": "'$url'" + } + }' \ + http://admin:admin@grafana.loc/api/alert-notifications/2 +} + +provision() { + alerts=1 + condition=65 + while getopts ":a:c:" o; do + case "${o}" in + a) + alerts=${OPTARG} + ;; + c) + condition=${OPTARG} + ;; + esac + done + shift $((OPTIND-1)) + + requiresJsonnet + + rm -rf grafana/provisioning/dashboards/alerts/alert-*.json + jsonnet -m grafana/provisioning/dashboards/alerts grafana/provisioning/alerts.jsonnet --ext-code alerts=$alerts --ext-code condition=$condition +} + +pause() { + curl -H "Content-Type: application/json" \ + -d '{"paused":true}' \ + http://admin:admin@grafana.loc/api/admin/pause-all-alerts +} + +unpause() { + curl -H "Content-Type: application/json" \ + -d '{"paused":false}' \ + http://admin:admin@grafana.loc/api/admin/pause-all-alerts +} + +usage() { + echo -e "Usage: ./alerts.sh COMMAND [OPTIONS]\n" + echo -e "Commands" + echo -e " setup\t\t creates default alert notification channels" + echo -e " slack\t\t configure slack notification channel" + echo -e " [-d]\t\t\t disable notifier, default enabled" + echo -e " [-u]\t\t\t url" + echo -e " [-r]\t\t\t send reminders" + echo -e " [-e ]\t\t default 10m\n" + echo -e " provision\t provision alerts" + echo -e " [-a ]\t default 1" + echo -e " [-c ]\t default 65\n" + echo -e " pause\t\t pause all alerts" + echo -e " unpause\t unpause all alerts" +} + +main() { + local cmd=$1 + + if [[ $cmd == "setup" ]]; then + setup + elif [[ $cmd == "slack" ]]; then + slack "${@:2}" + elif [[ $cmd == "provision" ]]; then + provision "${@:2}" + elif [[ $cmd == "pause" ]]; then + pause + elif [[ $cmd == "unpause" ]]; then + unpause + fi + + if [[ -z "$cmd" ]]; then + usage + fi +} + +main "$@" diff --git a/devenv/docker/ha_test/docker-compose.yaml b/devenv/docker/ha_test/docker-compose.yaml new file mode 100644 index 00000000000..ce8630d88a4 --- /dev/null +++ b/devenv/docker/ha_test/docker-compose.yaml @@ -0,0 +1,78 @@ +version: "2.1" + +services: + nginx-proxy: + image: jwilder/nginx-proxy + ports: + - "80:80" + volumes: + - /var/run/docker.sock:/tmp/docker.sock:ro + + db: + image: mysql + environment: + MYSQL_ROOT_PASSWORD: rootpass + MYSQL_DATABASE: grafana + MYSQL_USER: grafana + MYSQL_PASSWORD: password + ports: + - 3306 + healthcheck: + test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"] + timeout: 10s + retries: 10 + + # db: + # image: postgres:9.3 + # environment: + # POSTGRES_DATABASE: grafana + # POSTGRES_USER: grafana + # POSTGRES_PASSWORD: password + # ports: + # - 5432 + # healthcheck: + # test: ["CMD-SHELL", "pg_isready -d grafana -U grafana"] + # timeout: 10s + # retries: 10 + + grafana: + image: grafana/grafana:dev + volumes: + - ./grafana/provisioning/:/etc/grafana/provisioning/ + environment: + - VIRTUAL_HOST=grafana.loc + - GF_SERVER_ROOT_URL=http://grafana.loc + - GF_DATABASE_NAME=grafana + - GF_DATABASE_USER=grafana + - GF_DATABASE_PASSWORD=password + - GF_DATABASE_TYPE=mysql + - GF_DATABASE_HOST=db:3306 + - GF_SESSION_PROVIDER=mysql + - GF_SESSION_PROVIDER_CONFIG=grafana:password@tcp(db:3306)/grafana?allowNativePasswords=true + # - GF_DATABASE_TYPE=postgres + # - GF_DATABASE_HOST=db:5432 + # - GF_DATABASE_SSL_MODE=disable + # - GF_SESSION_PROVIDER=postgres + # - GF_SESSION_PROVIDER_CONFIG=user=grafana password=password host=db port=5432 dbname=grafana sslmode=disable + - GF_LOG_FILTERS=alerting.notifier:debug,alerting.notifier.slack:debug + ports: + - 3000 + depends_on: + db: + condition: service_healthy + + prometheus: + image: prom/prometheus:v2.4.2 + volumes: + - ./prometheus/:/etc/prometheus/ + environment: + - VIRTUAL_HOST=prometheus.loc + ports: + - 9090 + + # mysqld-exporter: + # image: prom/mysqld-exporter + # environment: + # - DATA_SOURCE_NAME=grafana:password@(mysql:3306)/ + # ports: + # - 9104 diff --git a/devenv/docker/ha_test/grafana/provisioning/alerts.jsonnet b/devenv/docker/ha_test/grafana/provisioning/alerts.jsonnet new file mode 100644 index 00000000000..86ded7e79d6 --- /dev/null +++ b/devenv/docker/ha_test/grafana/provisioning/alerts.jsonnet @@ -0,0 +1,202 @@ +local numAlerts = std.extVar('alerts'); +local condition = std.extVar('condition'); +local arr = std.range(1, numAlerts); + +local alertDashboardTemplate = { + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "id": null, + "links": [], + "panels": [ + { + "alert": { + "conditions": [ + { + "evaluator": { + "params": [ + 65 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "A", + "5m", + "now" + ] + }, + "reducer": { + "params": [], + "type": "avg" + }, + "type": "query" + } + ], + "executionErrorState": "alerting", + "frequency": "10s", + "handler": 1, + "name": "bulk alerting", + "noDataState": "no_data", + "notifications": [ + { + "id": 2 + } + ] + }, + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "$$hashKey": "object:117", + "expr": "go_goroutines", + "format": "time_series", + "intervalFactor": 1, + "refId": "A" + } + ], + "thresholds": [ + { + "colorMode": "critical", + "fill": true, + "line": true, + "op": "gt", + "value": 50 + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + } + ], + "schemaVersion": 16, + "style": "dark", + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "New dashboard", + "uid": null, + "version": 0 +}; + + +{ + ['alert-' + std.toString(x) + '.json']: + alertDashboardTemplate + { + panels: [ + alertDashboardTemplate.panels[0] + + { + alert+: { + name: 'Alert rule ' + x, + conditions: [ + alertDashboardTemplate.panels[0].alert.conditions[0] + + { + evaluator+: { + params: [condition] + } + }, + ], + }, + }, + ], + uid: 'alert-' + x, + title: 'Alert ' + x + }, + for x in arr +} \ No newline at end of file diff --git a/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts.yaml b/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts.yaml new file mode 100644 index 00000000000..60b6cd4bb04 --- /dev/null +++ b/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts.yaml @@ -0,0 +1,8 @@ +apiVersion: 1 + +providers: + - name: 'Alerts' + folder: 'Alerts' + type: file + options: + path: /etc/grafana/provisioning/dashboards/alerts diff --git a/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts/overview.json b/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts/overview.json new file mode 100644 index 00000000000..53e33c37b1f --- /dev/null +++ b/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts/overview.json @@ -0,0 +1,172 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "aliasColors": { + "Active alerts": "#bf1b00" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "fill": 1, + "gridPos": { + "h": 12, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 2, + "interval": "", + "legend": { + "alignAsTable": true, + "avg": false, + "current": true, + "max": false, + "min": false, + "rightSide": true, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "Active grafana instances", + "dashes": true, + "fill": 0 + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "sum(increase(grafana_alerting_notification_sent_total[1m])) by(job)", + "format": "time_series", + "instant": false, + "interval": "1m", + "intervalFactor": 1, + "legendFormat": "Notifications sent", + "refId": "A" + }, + { + "expr": "min(grafana_alerting_active_alerts) without(instance)", + "format": "time_series", + "interval": "1m", + "intervalFactor": 1, + "legendFormat": "Active alerts", + "refId": "B" + }, + { + "expr": "count(up{job=\"grafana\"})", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "Active grafana instances", + "refId": "C" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Notifications sent vs active alerts", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": 3 + } + } + ], + "schemaVersion": 16, + "style": "dark", + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "Overview", + "uid": "xHy7-hAik", + "version": 6 +} \ No newline at end of file diff --git a/devenv/docker/ha_test/grafana/provisioning/datasources/datasources.yaml b/devenv/docker/ha_test/grafana/provisioning/datasources/datasources.yaml new file mode 100644 index 00000000000..8d59793be16 --- /dev/null +++ b/devenv/docker/ha_test/grafana/provisioning/datasources/datasources.yaml @@ -0,0 +1,11 @@ +apiVersion: 1 + +datasources: + - name: Prometheus + type: prometheus + access: proxy + url: http://prometheus:9090 + jsonData: + timeInterval: 10s + queryTimeout: 30s + httpMethod: POST \ No newline at end of file diff --git a/devenv/docker/ha_test/prometheus/prometheus.yml b/devenv/docker/ha_test/prometheus/prometheus.yml new file mode 100644 index 00000000000..ea97ba8ba05 --- /dev/null +++ b/devenv/docker/ha_test/prometheus/prometheus.yml @@ -0,0 +1,39 @@ +# my global config +global: + scrape_interval: 10s # By default, scrape targets every 15 seconds. + evaluation_interval: 10s # By default, scrape targets every 15 seconds. + # scrape_timeout is set to the global default (10s). + +# Load and evaluate rules in this file every 'evaluation_interval' seconds. +#rule_files: +# - "alert.rules" +# - "first.rules" +# - "second.rules" + +# alerting: +# alertmanagers: +# - scheme: http +# static_configs: +# - targets: +# - "127.0.0.1:9093" + +scrape_configs: + - job_name: 'prometheus' + static_configs: + - targets: ['localhost:9090'] + + - job_name: 'grafana' + dns_sd_configs: + - names: + - 'grafana' + type: 'A' + port: 3000 + refresh_interval: 10s + + # - job_name: 'mysql' + # dns_sd_configs: + # - names: + # - 'mysqld-exporter' + # type: 'A' + # port: 9104 + # refresh_interval: 10s \ No newline at end of file diff --git a/devenv/setup.sh b/devenv/setup.sh index cc71ecc71bf..c9cc0d47a6f 100755 --- a/devenv/setup.sh +++ b/devenv/setup.sh @@ -11,7 +11,21 @@ bulkDashboard() { let COUNTER=COUNTER+1 done - ln -s -f -r ./bulk-dashboards/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml + ln -s -f ../../../devenv/bulk-dashboards/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml +} + +bulkAlertingDashboard() { + + requiresJsonnet + + COUNTER=0 + MAX=100 + while [ $COUNTER -lt $MAX ]; do + jsonnet -o "bulk_alerting_dashboards/alerting_dashboard${COUNTER}.json" -e "local bulkDash = import 'bulk_alerting_dashboards/bulkdash_alerting.jsonnet'; bulkDash + { uid: 'bd-${COUNTER}', title: 'alerting-title-${COUNTER}' }" + let COUNTER=COUNTER+1 + done + + ln -s -f ../../../devenv/bulk_alerting_dashboards/bulk_alerting_dashboards.yaml ../conf/provisioning/dashboards/custom.yaml } requiresJsonnet() { @@ -36,8 +50,9 @@ devDatasources() { usage() { echo -e "\n" echo "Usage:" - echo " bulk-dashboards - create and provisioning 400 dashboards" - echo " no args - provisiong core datasources and dev dashboards" + echo " bulk-dashboards - create and provisioning 400 dashboards" + echo " bulk-alerting-dashboards - create and provisioning 400 dashboards with alerts" + echo " no args - provisiong core datasources and dev dashboards" } main() { @@ -48,7 +63,9 @@ main() { local cmd=$1 - if [[ $cmd == "bulk-dashboards" ]]; then + if [[ $cmd == "bulk-alerting-dashboards" ]]; then + bulkAlertingDashboard + elif [[ $cmd == "bulk-dashboards" ]]; then bulkDashboard else devDashboards diff --git a/docs/README.md b/docs/README.md index ff5ef6a4131..7310f184a60 100644 --- a/docs/README.md +++ b/docs/README.md @@ -65,7 +65,7 @@ make docs-build This will rebuild the docs docker container. -To be able to use the image your have to quit (CTRL-C) the `make watch` command (that you run in the same directory as this README). Then simply rerun `make watch`, it will restart the docs server but now with access to your image. +To be able to use the image you have to quit (CTRL-C) the `make watch` command (that you run in the same directory as this README). Then simply rerun `make watch`, it will restart the docs server but now with access to your image. ### Editing content diff --git a/docs/sources/administration/provisioning.md b/docs/sources/administration/provisioning.md index a026d1ec0cd..16d425d289a 100644 --- a/docs/sources/administration/provisioning.md +++ b/docs/sources/administration/provisioning.md @@ -200,7 +200,7 @@ providers: folder: '' type: file disableDeletion: false - updateIntervalSeconds: 3 #how often Grafana will scan for changed dashboards + updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards options: path: /var/lib/grafana/dashboards ``` diff --git a/docs/sources/auth/generic-oauth.md b/docs/sources/auth/generic-oauth.md index 0f8c2bd6856..5bb5c4cd753 100644 --- a/docs/sources/auth/generic-oauth.md +++ b/docs/sources/auth/generic-oauth.md @@ -174,6 +174,8 @@ allowed_organizations = allowed_organizations = ``` +> Note: It's important to ensure that the [root_url](/installation/configuration/#root-url) in Grafana is set in your Azure Application Reply URLs (App -> Settings -> Reply URLs) + ## Set up OAuth2 with Centrify 1. Create a new Custom OpenID Connect application configuration in the Centrify dashboard. diff --git a/docs/sources/auth/ldap.md b/docs/sources/auth/ldap.md index 82db8214fb7..4a884a60d15 100644 --- a/docs/sources/auth/ldap.md +++ b/docs/sources/auth/ldap.md @@ -181,6 +181,7 @@ group_search_filter = "(member:1.2.840.113556.1.4.1941:=CN=%s,[user container/OU group_search_filter = "(|(member:1.2.840.113556.1.4.1941:=CN=%s,[user container/OU])(member:1.2.840.113556.1.4.1941:=CN=%s,[another user container/OU]))" group_search_filter_user_attribute = "cn" ``` +For more information on AD searches see [Microsoft's Search Filter Syntax](https://docs.microsoft.com/en-us/windows/desktop/adsi/search-filter-syntax) documentation. For troubleshooting, by changing `member_of` in `[servers.attributes]` to "dn" it will show you more accurate group memberships when [debug is enabled](#troubleshooting). diff --git a/docs/sources/auth/overview.md b/docs/sources/auth/overview.md index 3a38ed83988..20010a9ac09 100644 --- a/docs/sources/auth/overview.md +++ b/docs/sources/auth/overview.md @@ -32,11 +32,11 @@ permissions and org memberships. ## Grafana Auth -Grafana of course has a built in user authentication system with password authenticaten enabled by default. You can +Grafana of course has a built in user authentication system with password authentication enabled by default. You can disable authentication by enabling anonymous access. You can also hide login form and only allow login through an auth provider (listed above). There is also options for allowing self sign up. -### Anonymous authenticaten +### Anonymous authentication You can make Grafana accessible without any login required by enabling anonymous access in the configuration file. @@ -84,4 +84,3 @@ Set to the option detailed below to true to hide sign-out menu link. Useful if y [auth] disable_signout_menu = true ``` - diff --git a/docs/sources/features/datasources/stackdriver.md b/docs/sources/features/datasources/stackdriver.md new file mode 100644 index 00000000000..96f3ba3382e --- /dev/null +++ b/docs/sources/features/datasources/stackdriver.md @@ -0,0 +1,171 @@ ++++ +title = "Using Stackdriver in Grafana" +description = "Guide for using Stackdriver in Grafana" +keywords = ["grafana", "stackdriver", "google", "guide"] +type = "docs" +aliases = ["/datasources/stackdriver"] +[menu.docs] +name = "Stackdriver" +parent = "datasources" +weight = 11 ++++ + +# Using Google Stackdriver in Grafana + +> Only available in Grafana v5.3+. +> The datasource is currently a beta feature and is subject to change. + +Grafana ships with built-in support for Google Stackdriver. Just add it as a datasource and you are ready to build dashboards for your Stackdriver metrics. + +## Adding the data source to Grafana + +1. Open the side menu by clicking the Grafana icon in the top header. +2. In the side menu under the `Dashboards` link you should find a link named `Data Sources`. +3. Click the `+ Add data source` button in the top header. +4. Select `Stackdriver` from the *Type* dropdown. +5. Upload or paste in the Service Account Key file. See below for steps on how to create a Service Account Key file. + +> NOTE: If you're not seeing the `Data Sources` link in your side menu it means that your current user does not have the `Admin` role for the current organization. + +| Name | Description | +| --------------------- | ----------------------------------------------------------------------------------- | +| _Name_ | The datasource name. This is how you refer to the datasource in panels & queries. | +| _Default_ | Default datasource means that it will be pre-selected for new panels. | +| _Service Account Key_ | Service Account Key File for a GCP Project. Instructions below on how to create it. | + +## Authentication + +### Service Account Credentials - Private Key File + +To authenticate with the Stackdriver API, you need to create a Google Cloud Platform (GCP) Service Account for the Project you want to show data for. A Grafana datasource integrates with one GCP Project. If you want to visualize data from multiple GCP Projects then you need to create one datasource per GCP Project. + +#### Enable APIs + +The following APIs need to be enabled first: + +- [Monitoring API](https://console.cloud.google.com/apis/library/monitoring.googleapis.com) +- [Cloud Resource Manager API](https://console.cloud.google.com/apis/library/cloudresourcemanager.googleapis.com) + +Click on the links above and click the `Enable` button: + +![Enable GCP APIs](/img/docs/v54/stackdriver_enable_api.png) + +#### Create a GCP Service Account for a Project + +1. Navigate to the [APIs & Services Credentials page](https://console.cloud.google.com/apis/credentials). +2. Click on the `Create credentials` dropdown/button and choose the `Service account key` option. + + ![Create service account button](/img/docs/v54/stackdriver_create_service_account_button.png) +3. On the `Create service account key` page, choose key type `JSON`. Then in the `Service Account` dropdown, choose the `New service account` option: + + ![Create service account key](/img/docs/v54/stackdriver_create_service_account_key.png) +4. Some new fields will appear. Fill in a name for the service account in the `Service account name` field and then choose the `Monitoring Viewer` role from the `Role` dropdown: + + ![Choose role](/img/docs/v54/stackdriver_service_account_choose_role.png) +5. Click the Create button. A JSON key file will be created and downloaded to your computer. Store this file in a secure place as it allows access to your Stackdriver data. +6. Upload it to Grafana on the datasource Configuration page. You can either upload the file or paste in the contents of the file. + + ![Choose role](/img/docs/v54/stackdriver_grafana_upload_key.png) +7. The file contents will be encrypted and saved in the Grafana database. Don't forget to save after uploading the file! + + ![Choose role](/img/docs/v54/stackdriver_grafana_key_uploaded.png) + +## Metric Query Editor + +Choose a metric from the `Metric` dropdown. + +To add a filter, click the plus icon and choose a field to filter by and enter a filter value e.g. `instance_name = grafana-1` + +### Aggregation + +The aggregation field lets you combine time series based on common statistics. Read more about this option [here](https://cloud.google.com/monitoring/charts/metrics-selector#aggregation-options). + +The `Aligner` field allows you to align multiple time series after the same group by time interval. Read more about how it works [here](https://cloud.google.com/monitoring/charts/metrics-selector#alignment). + +#### Alignment Period/Group by Time + +The `Alignment Period` groups a metric by time if an aggregation is chosen. The default is to use the GCP Stackdriver default groupings (which allows you to compare graphs in Grafana with graphs in the Stackdriver UI). +The option is called `Stackdriver auto` and the defaults are: + +- 1m for time ranges < 23 hours +- 5m for time ranges >= 23 hours and < 6 days +- 1h for time ranges >= 6 days + +The other automatic option is `Grafana auto`. This will automatically set the group by time depending on the time range chosen and the width of the graph panel. Read more about the details [here](http://docs.grafana.org/reference/templating/#the-interval-variable). + +It is also possible to choose fixed time intervals to group by, like `1h` or `1d`. + +### Group By + +Group by resource or metric labels to reduce the number of time series and to aggregate the results by a group by. E.g. Group by instance_name to see an aggregated metric for a Compute instance. + +### Alias Patterns + +The Alias By field allows you to control the format of the legend keys. The default is to show the metric name and labels. This can be long and hard to read. Using the following patterns in the alias field, you can format the legend key the way you want it. + +#### Metric Type Patterns + +Alias Pattern | Description | Example Result +----------------- | ---------------------------- | ------------- +`{{metric.type}}` | returns the full Metric Type | `compute.googleapis.com/instance/cpu/utilization` +`{{metric.name}}` | returns the metric name part | `instance/cpu/utilization` +`{{metric.service}}` | returns the service part | `compute` + +#### Label Patterns + +In the Group By dropdown, you can see a list of metric and resource labels for a metric. These can be included in the legend key using alias patterns. + +Alias Pattern Format | Description | Alias Pattern Example | Example Result +---------------------- | ---------------------------------- | ---------------------------- | ------------- +`{{metric.label.xxx}}` | returns the metric label value | `{{metric.label.instance_name}}` | `grafana-1-prod` +`{{resource.label.xxx}}` | returns the resource label value | `{{resource.label.zone}}` | `us-east1-b` + +Example Alias By: `{{metric.type}} - {{metric.labels.instance_name}}` + +Example Result: `compute.googleapis.com/instance/cpu/usage_time - server1-prod` + +## Templating + +Instead of hard-coding things like server, application and sensor name in you metric queries you can use variables in their place. +Variables are shown as dropdown select boxes at the top of the dashboard. These dropdowns makes it easy to change the data +being displayed in your dashboard. + +Checkout the [Templating]({{< relref "reference/templating.md" >}}) documentation for an introduction to the templating feature and the different +types of template variables. + +### Query Variable + +Writing variable queries is not supported yet. + +### Using variables in queries + +There are two syntaxes: + +- `$` Example: rate(http_requests_total{job=~"$job"}[5m]) +- `[[varname]]` Example: rate(http_requests_total{job=~"[[job]]"}[5m]) + +Why two ways? The first syntax is easier to read and write but does not allow you to use a variable in the middle of a word. When the *Multi-value* or *Include all value* options are enabled, Grafana converts the labels from plain text to a regex compatible string, which means you have to use `=~` instead of `=`. + +## Annotations + +[Annotations]({{< relref "reference/annotations.md" >}}) allows you to overlay rich event information on top of graphs. You add annotation +queries via the Dashboard menu / Annotations view. + +## Configure the Datasource with Provisioning + +It's now possible to configure datasources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for datasources on the [provisioning docs page](/administration/provisioning/#datasources) + +Here is a provisioning example for this datasource. + +```yaml +apiVersion: 1 + +datasources: + - name: Stackdriver + type: stackdriver + jsonData: + tokenUri: https://oauth2.googleapis.com/token + clientEmail: stackdriver@myproject.iam.gserviceaccount.com + secureJsonData: + privateKey: "" +``` diff --git a/docs/sources/guides/whats-new-in-v4-2.md b/docs/sources/guides/whats-new-in-v4-2.md index e976ed24700..e36e762bb76 100644 --- a/docs/sources/guides/whats-new-in-v4-2.md +++ b/docs/sources/guides/whats-new-in-v4-2.md @@ -67,7 +67,7 @@ Making it possible to have users in multiple groups and have detailed access con ## Upgrade & Breaking changes -If your using https in grafana we now force you to use tls 1.2 and the most secure ciphers. +If you're using https in grafana we now force you to use tls 1.2 and the most secure ciphers. We think its better to be secure by default rather then making it configurable. If you want to run https with lower versions of tls we suggest you put a reserve proxy in front of grafana. diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index 2bf4789257d..8d156e739bf 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -127,10 +127,13 @@ Another way is put a webserver like Nginx or Apache in front of Grafana and have ### protocol -`http` or `https` +`http`,`https` or `socket` > **Note** Grafana versions earlier than 3.0 are vulnerable to [POODLE](https://en.wikipedia.org/wiki/POODLE). So we strongly recommend to upgrade to 3.x or use a reverse proxy for ssl termination. +### socket +Path where the socket should be created when `protocol=socket`. Please make sure that Grafana has appropriate permissions. + ### domain This setting is only used in as a part of the `root_url` setting (see below). Important if you @@ -566,3 +569,11 @@ Default setting for new alert rules. Defaults to categorize error and timeouts a > Available in 5.3 and above Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok) + +# concurrent_render_limit + +> Available in 5.3 and above + +Alert notifications can include images, but rendering many images at the same time can overload the server. +This limit will protect the server from render overloading and make sure notifications are sent out quickly. Default +value is `5`. diff --git a/docs/sources/tutorials/ha_setup.md b/docs/sources/tutorials/ha_setup.md index 0f138b20a17..5fdb091a348 100644 --- a/docs/sources/tutorials/ha_setup.md +++ b/docs/sources/tutorials/ha_setup.md @@ -22,7 +22,7 @@ Setting up Grafana for high availability is fairly simple. It comes down to two First, you need to do is to setup MySQL or Postgres on another server and configure Grafana to use that database. You can find the configuration for doing that in the [[database]]({{< relref "configuration.md" >}}#database) section in the grafana config. -Grafana will now persist all long term data in the database. How to configure the database for high availability is out of scope for this guide. We recommend finding an expert on for the database your using. +Grafana will now persist all long term data in the database. How to configure the database for high availability is out of scope for this guide. We recommend finding an expert on for the database you're using. ## User sessions diff --git a/docs/versions.json b/docs/versions.json index caefbe198d6..34e9c2150e1 100644 --- a/docs/versions.json +++ b/docs/versions.json @@ -1,4 +1,5 @@ [ + { "version": "v5.3", "path": "/v5.3", "archived": false, "current": false }, { "version": "v5.2", "path": "/", "archived": false, "current": true }, { "version": "v5.1", "path": "/v5.1", "archived": true }, { "version": "v5.0", "path": "/v5.0", "archived": true }, diff --git a/package.json b/package.json index f4846f9fc51..5577579e0e5 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "company": "Grafana Labs" }, "name": "grafana", - "version": "5.3.0-pre1", + "version": "5.4.0-pre1", "repository": { "type": "git", "url": "http://github.com/grafana/grafana.git" @@ -12,7 +12,7 @@ "devDependencies": { "@types/d3": "^4.10.1", "@types/enzyme": "^3.1.13", - "@types/jest": "^21.1.4", + "@types/jest": "^23.3.2", "@types/node": "^8.0.31", "@types/react": "^16.4.14", "@types/react-custom-scrollbars": "^4.0.5", @@ -100,7 +100,6 @@ "watch": "webpack --progress --colors --watch --mode development --config scripts/webpack/webpack.dev.js", "build": "grunt build", "test": "grunt test", - "test:coverage": "grunt test --coverage=true", "lint": "tslint -c tslint.json --project tsconfig.json", "jest": "jest --notify --watch", "api-tests": "jest --notify --watch --config=tests/api/jest.js", diff --git a/pkg/api/avatar/avatar.go b/pkg/api/avatar/avatar.go index 5becf90ca35..6cf164285bf 100644 --- a/pkg/api/avatar/avatar.go +++ b/pkg/api/avatar/avatar.go @@ -97,15 +97,6 @@ type CacheServer struct { cache *gocache.Cache } -func (this *CacheServer) mustInt(r *http.Request, defaultValue int, keys ...string) (v int) { - for _, k := range keys { - if _, err := fmt.Sscanf(r.FormValue(k), "%d", &v); err == nil { - defaultValue = v - } - } - return defaultValue -} - func (this *CacheServer) Handler(ctx *macaron.Context) { urlPath := ctx.Req.URL.Path hash := urlPath[strings.LastIndex(urlPath, "/")+1:] diff --git a/pkg/api/dashboard.go b/pkg/api/dashboard.go index c2ab6dd9a1a..d65598f6e5e 100644 --- a/pkg/api/dashboard.go +++ b/pkg/api/dashboard.go @@ -22,6 +22,10 @@ import ( "github.com/grafana/grafana/pkg/util" ) +const ( + anonString = "Anonymous" +) + func isDashboardStarredByUser(c *m.ReqContext, dashID int64) (bool, error) { if !c.IsSignedIn { return false, nil @@ -64,7 +68,7 @@ func GetDashboard(c *m.ReqContext) Response { } // Finding creator and last updater of the dashboard - updater, creator := "Anonymous", "Anonymous" + updater, creator := anonString, anonString if dash.UpdatedBy > 0 { updater = getUserLogin(dash.UpdatedBy) } @@ -128,7 +132,7 @@ func getUserLogin(userID int64) string { query := m.GetUserByIdQuery{Id: userID} err := bus.Dispatch(&query) if err != nil { - return "Anonymous" + return anonString } return query.Result.Login } @@ -403,7 +407,7 @@ func GetDashboardVersion(c *m.ReqContext) Response { return Error(500, fmt.Sprintf("Dashboard version %d not found for dashboardId %d", query.Version, dashID), err) } - creator := "Anonymous" + creator := anonString if query.Result.CreatedBy > 0 { creator = getUserLogin(query.Result.CreatedBy) } diff --git a/pkg/api/dtos/alerting_test.go b/pkg/api/dtos/alerting_test.go index c38f281be9c..f4c09f202cb 100644 --- a/pkg/api/dtos/alerting_test.go +++ b/pkg/api/dtos/alerting_test.go @@ -29,7 +29,7 @@ func TestFormatShort(t *testing.T) { } if parsed != tc.interval { - t.Errorf("expectes the parsed duration to equal the interval. Got %v expected: %v", parsed, tc.interval) + t.Errorf("expects the parsed duration to equal the interval. Got %v expected: %v", parsed, tc.interval) } } } diff --git a/pkg/api/folder.go b/pkg/api/folder.go index f0cdff24d20..0e08343b556 100644 --- a/pkg/api/folder.go +++ b/pkg/api/folder.go @@ -95,7 +95,7 @@ func toFolderDto(g guardian.DashboardGuardian, folder *m.Folder) dtos.Folder { canAdmin, _ := g.CanAdmin() // Finding creator and last updater of the folder - updater, creator := "Anonymous", "Anonymous" + updater, creator := anonString, anonString if folder.CreatedBy > 0 { creator = getUserLogin(folder.CreatedBy) } diff --git a/pkg/api/folder_test.go b/pkg/api/folder_test.go index 6e24e432535..880de338c8f 100644 --- a/pkg/api/folder_test.go +++ b/pkg/api/folder_test.go @@ -133,16 +133,6 @@ func TestFoldersApiEndpoint(t *testing.T) { }) } -func callGetFolderByUID(sc *scenarioContext) { - sc.handlerFunc = GetFolderByUID - sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() -} - -func callDeleteFolder(sc *scenarioContext) { - sc.handlerFunc = DeleteFolder - sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() -} - func callCreateFolder(sc *scenarioContext) { sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() } diff --git a/pkg/api/index.go b/pkg/api/index.go index b8101a01fc8..1b73acd8829 100644 --- a/pkg/api/index.go +++ b/pkg/api/index.go @@ -11,6 +11,12 @@ import ( "github.com/grafana/grafana/pkg/setting" ) +const ( + // Themes + lightName = "light" + darkName = "dark" +) + func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { settings, err := getFrontendSettingsMap(c) if err != nil { @@ -60,7 +66,7 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { OrgRole: c.OrgRole, GravatarUrl: dtos.GetGravatarUrl(c.Email), IsGrafanaAdmin: c.IsGrafanaAdmin, - LightTheme: prefs.Theme == "light", + LightTheme: prefs.Theme == lightName, Timezone: prefs.Timezone, Locale: locale, HelpFlags1: c.HelpFlags1, @@ -88,12 +94,12 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { } themeURLParam := c.Query("theme") - if themeURLParam == "light" { + if themeURLParam == lightName { data.User.LightTheme = true - data.Theme = "light" - } else if themeURLParam == "dark" { + data.Theme = lightName + } else if themeURLParam == darkName { data.User.LightTheme = false - data.Theme = "dark" + data.Theme = darkName } if hasEditPermissionInFoldersQuery.Result { diff --git a/pkg/api/live/hub.go b/pkg/api/live/hub.go index 37ab5667e55..9708bc515d1 100644 --- a/pkg/api/live/hub.go +++ b/pkg/api/live/hub.go @@ -37,9 +37,6 @@ func newHub() *hub { } } -func (h *hub) removeConnection() { -} - func (h *hub) run(ctx context.Context) { for { select { diff --git a/pkg/api/pluginproxy/access_token_provider.go b/pkg/api/pluginproxy/access_token_provider.go new file mode 100644 index 00000000000..22407823ff9 --- /dev/null +++ b/pkg/api/pluginproxy/access_token_provider.go @@ -0,0 +1,171 @@ +package pluginproxy + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + "strconv" + "sync" + "time" + + "golang.org/x/oauth2" + + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "golang.org/x/oauth2/jwt" +) + +var ( + tokenCache = tokenCacheType{ + cache: map[string]*jwtToken{}, + } + oauthJwtTokenCache = oauthJwtTokenCacheType{ + cache: map[string]*oauth2.Token{}, + } +) + +type tokenCacheType struct { + cache map[string]*jwtToken + sync.Mutex +} + +type oauthJwtTokenCacheType struct { + cache map[string]*oauth2.Token + sync.Mutex +} + +type accessTokenProvider struct { + route *plugins.AppPluginRoute + datasourceId int64 + datasourceVersion int +} + +type jwtToken struct { + ExpiresOn time.Time `json:"-"` + ExpiresOnString string `json:"expires_on"` + AccessToken string `json:"access_token"` +} + +func newAccessTokenProvider(ds *models.DataSource, pluginRoute *plugins.AppPluginRoute) *accessTokenProvider { + return &accessTokenProvider{ + datasourceId: ds.Id, + datasourceVersion: ds.Version, + route: pluginRoute, + } +} + +func (provider *accessTokenProvider) getAccessToken(data templateData) (string, error) { + tokenCache.Lock() + defer tokenCache.Unlock() + if cachedToken, found := tokenCache.cache[provider.getAccessTokenCacheKey()]; found { + if cachedToken.ExpiresOn.After(time.Now().Add(time.Second * 10)) { + logger.Info("Using token from cache") + return cachedToken.AccessToken, nil + } + } + + urlInterpolated, err := interpolateString(provider.route.TokenAuth.Url, data) + if err != nil { + return "", err + } + + params := make(url.Values) + for key, value := range provider.route.TokenAuth.Params { + interpolatedParam, err := interpolateString(value, data) + if err != nil { + return "", err + } + params.Add(key, interpolatedParam) + } + + getTokenReq, _ := http.NewRequest("POST", urlInterpolated, bytes.NewBufferString(params.Encode())) + getTokenReq.Header.Add("Content-Type", "application/x-www-form-urlencoded") + getTokenReq.Header.Add("Content-Length", strconv.Itoa(len(params.Encode()))) + + resp, err := client.Do(getTokenReq) + if err != nil { + return "", err + } + + defer resp.Body.Close() + + var token jwtToken + if err := json.NewDecoder(resp.Body).Decode(&token); err != nil { + return "", err + } + + expiresOnEpoch, _ := strconv.ParseInt(token.ExpiresOnString, 10, 64) + token.ExpiresOn = time.Unix(expiresOnEpoch, 0) + tokenCache.cache[provider.getAccessTokenCacheKey()] = &token + + logger.Info("Got new access token", "ExpiresOn", token.ExpiresOn) + + return token.AccessToken, nil +} + +func (provider *accessTokenProvider) getJwtAccessToken(ctx context.Context, data templateData) (string, error) { + oauthJwtTokenCache.Lock() + defer oauthJwtTokenCache.Unlock() + if cachedToken, found := oauthJwtTokenCache.cache[provider.getAccessTokenCacheKey()]; found { + if cachedToken.Expiry.After(time.Now().Add(time.Second * 10)) { + logger.Debug("Using token from cache") + return cachedToken.AccessToken, nil + } + } + + conf := &jwt.Config{} + + if val, ok := provider.route.JwtTokenAuth.Params["client_email"]; ok { + interpolatedVal, err := interpolateString(val, data) + if err != nil { + return "", err + } + conf.Email = interpolatedVal + } + + if val, ok := provider.route.JwtTokenAuth.Params["private_key"]; ok { + interpolatedVal, err := interpolateString(val, data) + if err != nil { + return "", err + } + conf.PrivateKey = []byte(interpolatedVal) + } + + if val, ok := provider.route.JwtTokenAuth.Params["token_uri"]; ok { + interpolatedVal, err := interpolateString(val, data) + if err != nil { + return "", err + } + conf.TokenURL = interpolatedVal + } + + conf.Scopes = provider.route.JwtTokenAuth.Scopes + + token, err := getTokenSource(conf, ctx) + if err != nil { + return "", err + } + + oauthJwtTokenCache.cache[provider.getAccessTokenCacheKey()] = token + + logger.Info("Got new access token", "ExpiresOn", token.Expiry) + + return token.AccessToken, nil +} + +var getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) { + tokenSrc := conf.TokenSource(ctx) + token, err := tokenSrc.Token() + if err != nil { + return nil, err + } + + return token, nil +} + +func (provider *accessTokenProvider) getAccessTokenCacheKey() string { + return fmt.Sprintf("%v_%v_%v_%v", provider.datasourceId, provider.datasourceVersion, provider.route.Path, provider.route.Method) +} diff --git a/pkg/api/pluginproxy/access_token_provider_test.go b/pkg/api/pluginproxy/access_token_provider_test.go new file mode 100644 index 00000000000..e75748e4660 --- /dev/null +++ b/pkg/api/pluginproxy/access_token_provider_test.go @@ -0,0 +1,94 @@ +package pluginproxy + +import ( + "context" + "testing" + "time" + + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + . "github.com/smartystreets/goconvey/convey" + "golang.org/x/oauth2" + "golang.org/x/oauth2/jwt" +) + +func TestAccessToken(t *testing.T) { + Convey("Plugin with JWT token auth route", t, func() { + pluginRoute := &plugins.AppPluginRoute{ + Path: "pathwithjwttoken1", + Url: "https://api.jwt.io/some/path", + Method: "GET", + JwtTokenAuth: &plugins.JwtTokenAuth{ + Url: "https://login.server.com/{{.JsonData.tenantId}}/oauth2/token", + Scopes: []string{ + "https://www.testapi.com/auth/monitoring.read", + "https://www.testapi.com/auth/cloudplatformprojects.readonly", + }, + Params: map[string]string{ + "token_uri": "{{.JsonData.tokenUri}}", + "client_email": "{{.JsonData.clientEmail}}", + "private_key": "{{.SecureJsonData.privateKey}}", + }, + }, + } + + templateData := templateData{ + JsonData: map[string]interface{}{ + "clientEmail": "test@test.com", + "tokenUri": "login.url.com/token", + }, + SecureJsonData: map[string]string{ + "privateKey": "testkey", + }, + } + + ds := &models.DataSource{Id: 1, Version: 2} + + Convey("should fetch token using jwt private key", func() { + getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) { + return &oauth2.Token{AccessToken: "abc"}, nil + } + provider := newAccessTokenProvider(ds, pluginRoute) + token, err := provider.getJwtAccessToken(context.Background(), templateData) + So(err, ShouldBeNil) + + So(token, ShouldEqual, "abc") + }) + + Convey("should set jwt config values", func() { + getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) { + So(conf.Email, ShouldEqual, "test@test.com") + So(conf.PrivateKey, ShouldResemble, []byte("testkey")) + So(len(conf.Scopes), ShouldEqual, 2) + So(conf.Scopes[0], ShouldEqual, "https://www.testapi.com/auth/monitoring.read") + So(conf.Scopes[1], ShouldEqual, "https://www.testapi.com/auth/cloudplatformprojects.readonly") + So(conf.TokenURL, ShouldEqual, "login.url.com/token") + + return &oauth2.Token{AccessToken: "abc"}, nil + } + + provider := newAccessTokenProvider(ds, pluginRoute) + _, err := provider.getJwtAccessToken(context.Background(), templateData) + So(err, ShouldBeNil) + }) + + Convey("should use cached token on second call", func() { + getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) { + return &oauth2.Token{ + AccessToken: "abc", + Expiry: time.Now().Add(1 * time.Minute)}, nil + } + provider := newAccessTokenProvider(ds, pluginRoute) + token1, err := provider.getJwtAccessToken(context.Background(), templateData) + So(err, ShouldBeNil) + So(token1, ShouldEqual, "abc") + + getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) { + return &oauth2.Token{AccessToken: "error: cache not used"}, nil + } + token2, err := provider.getJwtAccessToken(context.Background(), templateData) + So(err, ShouldBeNil) + So(token2, ShouldEqual, "abc") + }) + }) +} diff --git a/pkg/api/pluginproxy/ds_auth_provider.go b/pkg/api/pluginproxy/ds_auth_provider.go new file mode 100644 index 00000000000..c68da839d13 --- /dev/null +++ b/pkg/api/pluginproxy/ds_auth_provider.go @@ -0,0 +1,93 @@ +package pluginproxy + +import ( + "bytes" + "context" + "fmt" + "net/http" + "net/url" + "strings" + "text/template" + + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/util" +) + +//ApplyRoute should use the plugin route data to set auth headers and custom headers +func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route *plugins.AppPluginRoute, ds *m.DataSource) { + proxyPath = strings.TrimPrefix(proxyPath, route.Path) + + data := templateData{ + JsonData: ds.JsonData.Interface().(map[string]interface{}), + SecureJsonData: ds.SecureJsonData.Decrypt(), + } + + interpolatedURL, err := interpolateString(route.Url, data) + if err != nil { + logger.Error("Error interpolating proxy url", "error", err) + return + } + + routeURL, err := url.Parse(interpolatedURL) + if err != nil { + logger.Error("Error parsing plugin route url", "error", err) + return + } + + req.URL.Scheme = routeURL.Scheme + req.URL.Host = routeURL.Host + req.Host = routeURL.Host + req.URL.Path = util.JoinUrlFragments(routeURL.Path, proxyPath) + + if err := addHeaders(&req.Header, route, data); err != nil { + logger.Error("Failed to render plugin headers", "error", err) + } + + tokenProvider := newAccessTokenProvider(ds, route) + + if route.TokenAuth != nil { + if token, err := tokenProvider.getAccessToken(data); err != nil { + logger.Error("Failed to get access token", "error", err) + } else { + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token)) + } + } + + if route.JwtTokenAuth != nil { + if token, err := tokenProvider.getJwtAccessToken(ctx, data); err != nil { + logger.Error("Failed to get access token", "error", err) + } else { + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token)) + } + } + logger.Info("Requesting", "url", req.URL.String()) + +} + +func interpolateString(text string, data templateData) (string, error) { + t, err := template.New("content").Parse(text) + if err != nil { + return "", fmt.Errorf("could not parse template %s", text) + } + + var contentBuf bytes.Buffer + err = t.Execute(&contentBuf, data) + if err != nil { + return "", fmt.Errorf("failed to execute template %s", text) + } + + return contentBuf.String(), nil +} + +func addHeaders(reqHeaders *http.Header, route *plugins.AppPluginRoute, data templateData) error { + for _, header := range route.Headers { + interpolated, err := interpolateString(header.Content, data) + if err != nil { + return err + } + reqHeaders.Add(header.Name, interpolated) + } + + return nil +} diff --git a/pkg/api/pluginproxy/ds_auth_provider_test.go b/pkg/api/pluginproxy/ds_auth_provider_test.go new file mode 100644 index 00000000000..9bd98a339e5 --- /dev/null +++ b/pkg/api/pluginproxy/ds_auth_provider_test.go @@ -0,0 +1,21 @@ +package pluginproxy + +import ( + "testing" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestDsAuthProvider(t *testing.T) { + Convey("When interpolating string", t, func() { + data := templateData{ + SecureJsonData: map[string]string{ + "Test": "0asd+asd", + }, + } + + interpolated, err := interpolateString("{{.SecureJsonData.Test}}", data) + So(err, ShouldBeNil) + So(interpolated, ShouldEqual, "0asd+asd") + }) +} diff --git a/pkg/api/pluginproxy/ds_proxy.go b/pkg/api/pluginproxy/ds_proxy.go index fb2cab9b9b1..0c000058e4b 100644 --- a/pkg/api/pluginproxy/ds_proxy.go +++ b/pkg/api/pluginproxy/ds_proxy.go @@ -2,7 +2,6 @@ package pluginproxy import ( "bytes" - "encoding/json" "errors" "fmt" "io/ioutil" @@ -12,7 +11,6 @@ import ( "net/url" "strconv" "strings" - "text/template" "time" "github.com/opentracing/opentracing-go" @@ -25,17 +23,10 @@ import ( ) var ( - logger = log.New("data-proxy-log") - tokenCache = map[string]*jwtToken{} - client = newHTTPClient() + logger = log.New("data-proxy-log") + client = newHTTPClient() ) -type jwtToken struct { - ExpiresOn time.Time `json:"-"` - ExpiresOnString string `json:"expires_on"` - AccessToken string `json:"access_token"` -} - type DataSourceProxy struct { ds *m.DataSource ctx *m.ReqContext @@ -162,7 +153,6 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) { } else { req.URL.Path = util.JoinUrlFragments(proxy.targetUrl.Path, proxy.proxyPath) } - if proxy.ds.BasicAuth { req.Header.Del("Authorization") req.Header.Add("Authorization", util.GetBasicAuthHeader(proxy.ds.BasicAuthUser, proxy.ds.BasicAuthPassword)) @@ -219,7 +209,7 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) { } if proxy.route != nil { - proxy.applyRoute(req) + ApplyRoute(proxy.ctx.Req.Context(), req, proxy.proxyPath, proxy.route, proxy.ds) } } } @@ -311,120 +301,3 @@ func checkWhiteList(c *m.ReqContext, host string) bool { return true } - -func (proxy *DataSourceProxy) applyRoute(req *http.Request) { - proxy.proxyPath = strings.TrimPrefix(proxy.proxyPath, proxy.route.Path) - - data := templateData{ - JsonData: proxy.ds.JsonData.Interface().(map[string]interface{}), - SecureJsonData: proxy.ds.SecureJsonData.Decrypt(), - } - - interpolatedURL, err := interpolateString(proxy.route.Url, data) - if err != nil { - logger.Error("Error interpolating proxy url", "error", err) - return - } - - routeURL, err := url.Parse(interpolatedURL) - if err != nil { - logger.Error("Error parsing plugin route url", "error", err) - return - } - - req.URL.Scheme = routeURL.Scheme - req.URL.Host = routeURL.Host - req.Host = routeURL.Host - req.URL.Path = util.JoinUrlFragments(routeURL.Path, proxy.proxyPath) - - if err := addHeaders(&req.Header, proxy.route, data); err != nil { - logger.Error("Failed to render plugin headers", "error", err) - } - - if proxy.route.TokenAuth != nil { - if token, err := proxy.getAccessToken(data); err != nil { - logger.Error("Failed to get access token", "error", err) - } else { - req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token)) - } - } - - logger.Info("Requesting", "url", req.URL.String()) -} - -func (proxy *DataSourceProxy) getAccessToken(data templateData) (string, error) { - if cachedToken, found := tokenCache[proxy.getAccessTokenCacheKey()]; found { - if cachedToken.ExpiresOn.After(time.Now().Add(time.Second * 10)) { - logger.Info("Using token from cache") - return cachedToken.AccessToken, nil - } - } - - urlInterpolated, err := interpolateString(proxy.route.TokenAuth.Url, data) - if err != nil { - return "", err - } - - params := make(url.Values) - for key, value := range proxy.route.TokenAuth.Params { - interpolatedParam, err := interpolateString(value, data) - if err != nil { - return "", err - } - params.Add(key, interpolatedParam) - } - - getTokenReq, _ := http.NewRequest("POST", urlInterpolated, bytes.NewBufferString(params.Encode())) - getTokenReq.Header.Add("Content-Type", "application/x-www-form-urlencoded") - getTokenReq.Header.Add("Content-Length", strconv.Itoa(len(params.Encode()))) - - resp, err := client.Do(getTokenReq) - if err != nil { - return "", err - } - - defer resp.Body.Close() - - var token jwtToken - if err := json.NewDecoder(resp.Body).Decode(&token); err != nil { - return "", err - } - - expiresOnEpoch, _ := strconv.ParseInt(token.ExpiresOnString, 10, 64) - token.ExpiresOn = time.Unix(expiresOnEpoch, 0) - tokenCache[proxy.getAccessTokenCacheKey()] = &token - - logger.Info("Got new access token", "ExpiresOn", token.ExpiresOn) - return token.AccessToken, nil -} - -func (proxy *DataSourceProxy) getAccessTokenCacheKey() string { - return fmt.Sprintf("%v_%v_%v", proxy.ds.Id, proxy.route.Path, proxy.route.Method) -} - -func interpolateString(text string, data templateData) (string, error) { - t, err := template.New("content").Parse(text) - if err != nil { - return "", fmt.Errorf("could not parse template %s", text) - } - - var contentBuf bytes.Buffer - err = t.Execute(&contentBuf, data) - if err != nil { - return "", fmt.Errorf("failed to execute template %s", text) - } - - return contentBuf.String(), nil -} - -func addHeaders(reqHeaders *http.Header, route *plugins.AppPluginRoute, data templateData) error { - for _, header := range route.Headers { - interpolated, err := interpolateString(header.Content, data) - if err != nil { - return err - } - reqHeaders.Add(header.Name, interpolated) - } - - return nil -} diff --git a/pkg/api/pluginproxy/ds_proxy_test.go b/pkg/api/pluginproxy/ds_proxy_test.go index e6d05872787..7dcd187c368 100644 --- a/pkg/api/pluginproxy/ds_proxy_test.go +++ b/pkg/api/pluginproxy/ds_proxy_test.go @@ -83,7 +83,7 @@ func TestDSRouteRule(t *testing.T) { Convey("When matching route path", func() { proxy := NewDataSourceProxy(ds, plugin, ctx, "api/v4/some/method") proxy.route = plugin.Routes[0] - proxy.applyRoute(req) + ApplyRoute(proxy.ctx.Req.Context(), req, proxy.proxyPath, proxy.route, proxy.ds) Convey("should add headers and update url", func() { So(req.URL.String(), ShouldEqual, "https://www.google.com/some/method") @@ -94,7 +94,7 @@ func TestDSRouteRule(t *testing.T) { Convey("When matching route path and has dynamic url", func() { proxy := NewDataSourceProxy(ds, plugin, ctx, "api/common/some/method") proxy.route = plugin.Routes[3] - proxy.applyRoute(req) + ApplyRoute(proxy.ctx.Req.Context(), req, proxy.proxyPath, proxy.route, proxy.ds) Convey("should add headers and interpolate the url", func() { So(req.URL.String(), ShouldEqual, "https://dynamic.grafana.com/some/method") @@ -188,7 +188,7 @@ func TestDSRouteRule(t *testing.T) { client = newFakeHTTPClient(json) proxy1 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken1") proxy1.route = plugin.Routes[0] - proxy1.applyRoute(req) + ApplyRoute(proxy1.ctx.Req.Context(), req, proxy1.proxyPath, proxy1.route, proxy1.ds) authorizationHeaderCall1 = req.Header.Get("Authorization") So(req.URL.String(), ShouldEqual, "https://api.nr1.io/some/path") @@ -202,7 +202,7 @@ func TestDSRouteRule(t *testing.T) { client = newFakeHTTPClient(json2) proxy2 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken2") proxy2.route = plugin.Routes[1] - proxy2.applyRoute(req) + ApplyRoute(proxy2.ctx.Req.Context(), req, proxy2.proxyPath, proxy2.route, proxy2.ds) authorizationHeaderCall2 = req.Header.Get("Authorization") @@ -217,7 +217,7 @@ func TestDSRouteRule(t *testing.T) { client = newFakeHTTPClient([]byte{}) proxy3 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken1") proxy3.route = plugin.Routes[0] - proxy3.applyRoute(req) + ApplyRoute(proxy3.ctx.Req.Context(), req, proxy3.proxyPath, proxy3.route, proxy3.ds) authorizationHeaderCall3 := req.Header.Get("Authorization") So(req.URL.String(), ShouldEqual, "https://api.nr1.io/some/path") @@ -331,18 +331,6 @@ func TestDSRouteRule(t *testing.T) { }) }) - Convey("When interpolating string", func() { - data := templateData{ - SecureJsonData: map[string]string{ - "Test": "0asd+asd", - }, - } - - interpolated, err := interpolateString("{{.SecureJsonData.Test}}", data) - So(err, ShouldBeNil) - So(interpolated, ShouldEqual, "0asd+asd") - }) - Convey("When proxying a data source with custom headers specified", func() { plugin := &plugins.DataSourcePlugin{} diff --git a/pkg/api/render.go b/pkg/api/render.go index b8ef6cc5cb6..cf672af9bea 100644 --- a/pkg/api/render.go +++ b/pkg/api/render.go @@ -41,15 +41,16 @@ func (hs *HTTPServer) RenderToPng(c *m.ReqContext) { } result, err := hs.RenderService.Render(c.Req.Context(), rendering.Opts{ - Width: width, - Height: height, - Timeout: time.Duration(timeout) * time.Second, - OrgId: c.OrgId, - UserId: c.UserId, - OrgRole: c.OrgRole, - Path: c.Params("*") + queryParams, - Timezone: queryReader.Get("tz", ""), - Encoding: queryReader.Get("encoding", ""), + Width: width, + Height: height, + Timeout: time.Duration(timeout) * time.Second, + OrgId: c.OrgId, + UserId: c.UserId, + OrgRole: c.OrgRole, + Path: c.Params("*") + queryParams, + Timezone: queryReader.Get("tz", ""), + Encoding: queryReader.Get("encoding", ""), + ConcurrentLimit: 30, }) if err != nil && err == rendering.ErrTimeout { diff --git a/pkg/cmd/grafana-cli/commands/commands.go b/pkg/cmd/grafana-cli/commands/commands.go index 5e69559b9fa..902fd415977 100644 --- a/pkg/cmd/grafana-cli/commands/commands.go +++ b/pkg/cmd/grafana-cli/commands/commands.go @@ -6,6 +6,7 @@ import ( "github.com/codegangsta/cli" "github.com/fatih/color" + "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/setting" @@ -24,6 +25,7 @@ func runDbCommand(command func(commandLine CommandLine) error) func(context *cli engine := &sqlstore.SqlStore{} engine.Cfg = cfg + engine.Bus = bus.GetBus() engine.Init() if err := command(cmd); err != nil { diff --git a/pkg/cmd/grafana-cli/commands/install_command.go b/pkg/cmd/grafana-cli/commands/install_command.go index 5d4969e06af..f88bb9bbfff 100644 --- a/pkg/cmd/grafana-cli/commands/install_command.go +++ b/pkg/cmd/grafana-cli/commands/install_command.go @@ -112,7 +112,7 @@ func SelectVersion(plugin m.Plugin, version string) (m.Version, error) { } } - return m.Version{}, errors.New("Could not find the version your looking for") + return m.Version{}, errors.New("Could not find the version you're looking for") } func RemoveGitBuildFromName(pluginName, filename string) string { diff --git a/pkg/cmd/grafana-server/main.go b/pkg/cmd/grafana-server/main.go index f1e298671d7..84325bae808 100644 --- a/pkg/cmd/grafana-server/main.go +++ b/pkg/cmd/grafana-server/main.go @@ -29,6 +29,7 @@ import ( _ "github.com/grafana/grafana/pkg/tsdb/opentsdb" _ "github.com/grafana/grafana/pkg/tsdb/postgres" _ "github.com/grafana/grafana/pkg/tsdb/prometheus" + _ "github.com/grafana/grafana/pkg/tsdb/stackdriver" _ "github.com/grafana/grafana/pkg/tsdb/testdata" ) @@ -103,7 +104,7 @@ func listenToSystemSignals(server *GrafanaServerImpl) { for { select { - case _ = <-sighupChan: + case <-sighupChan: log.Reload() case sig := <-signalChan: server.Shutdown(fmt.Sprintf("System signal: %s", sig)) diff --git a/pkg/components/imguploader/azureblobuploader.go b/pkg/components/imguploader/azureblobuploader.go index 3c0ac5b8884..b37763931c8 100644 --- a/pkg/components/imguploader/azureblobuploader.go +++ b/pkg/components/imguploader/azureblobuploader.go @@ -52,7 +52,7 @@ func (az *AzureBlobUploader) Upload(ctx context.Context, imageDiskPath string) ( } randomFileName := util.GetRandomString(30) + ".png" // upload image - az.log.Debug("Uploading image to azure_blob", "conatiner_name", az.container_name, "blob_name", randomFileName) + az.log.Debug("Uploading image to azure_blob", "container_name", az.container_name, "blob_name", randomFileName) resp, err := blob.FileUpload(az.container_name, randomFileName, file) if err != nil { return "", err @@ -127,8 +127,6 @@ type xmlError struct { const ms_date_layout = "Mon, 02 Jan 2006 15:04:05 GMT" const version = "2017-04-17" -var client = &http.Client{} - type StorageClient struct { Auth *Auth Transport http.RoundTripper @@ -274,10 +272,10 @@ func (a *Auth) canonicalizedHeaders(req *http.Request) string { } } - splitted := strings.Split(buffer.String(), "\n") - sort.Strings(splitted) + split := strings.Split(buffer.String(), "\n") + sort.Strings(split) - return strings.Join(splitted, "\n") + return strings.Join(split, "\n") } /* @@ -313,8 +311,8 @@ func (a *Auth) canonicalizedResource(req *http.Request) string { buffer.WriteString(fmt.Sprintf("\n%s:%s", key, strings.Join(values, ","))) } - splitted := strings.Split(buffer.String(), "\n") - sort.Strings(splitted) + split := strings.Split(buffer.String(), "\n") + sort.Strings(split) - return strings.Join(splitted, "\n") + return strings.Join(split, "\n") } diff --git a/pkg/components/imguploader/s3uploader.go b/pkg/components/imguploader/s3uploader.go index a1e4aed0f47..9c8af21e39e 100644 --- a/pkg/components/imguploader/s3uploader.go +++ b/pkg/components/imguploader/s3uploader.go @@ -2,12 +2,15 @@ package imguploader import ( "context" + "fmt" "os" "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/credentials" "github.com/aws/aws-sdk-go/aws/credentials/ec2rolecreds" + "github.com/aws/aws-sdk-go/aws/credentials/endpointcreds" + "github.com/aws/aws-sdk-go/aws/defaults" "github.com/aws/aws-sdk-go/aws/ec2metadata" "github.com/aws/aws-sdk-go/aws/endpoints" "github.com/aws/aws-sdk-go/aws/session" @@ -50,7 +53,7 @@ func (u *S3Uploader) Upload(ctx context.Context, imageDiskPath string) (string, SecretAccessKey: u.secretKey, }}, &credentials.EnvProvider{}, - &ec2rolecreds.EC2RoleProvider{Client: ec2metadata.New(sess), ExpiryWindow: 5 * time.Minute}, + remoteCredProvider(sess), }) cfg := &aws.Config{ Region: aws.String(u.region), @@ -85,3 +88,27 @@ func (u *S3Uploader) Upload(ctx context.Context, imageDiskPath string) (string, } return image_url, nil } + +func remoteCredProvider(sess *session.Session) credentials.Provider { + ecsCredURI := os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") + + if len(ecsCredURI) > 0 { + return ecsCredProvider(sess, ecsCredURI) + } + return ec2RoleProvider(sess) +} + +func ecsCredProvider(sess *session.Session, uri string) credentials.Provider { + const host = `169.254.170.2` + + d := defaults.Get() + return endpointcreds.NewProviderClient( + *d.Config, + d.Handlers, + fmt.Sprintf("http://%s%s", host, uri), + func(p *endpointcreds.Provider) { p.ExpiryWindow = 5 * time.Minute }) +} + +func ec2RoleProvider(sess *session.Session) credentials.Provider { + return &ec2rolecreds.EC2RoleProvider{Client: ec2metadata.New(sess), ExpiryWindow: 5 * time.Minute} +} diff --git a/pkg/components/null/float.go b/pkg/components/null/float.go index 4f783f2c584..9082c831084 100644 --- a/pkg/components/null/float.go +++ b/pkg/components/null/float.go @@ -8,6 +8,10 @@ import ( "strconv" ) +const ( + nullString = "null" +) + // Float is a nullable float64. // It does not consider zero values to be null. // It will decode to null, not zero, if null. @@ -68,7 +72,7 @@ func (f *Float) UnmarshalJSON(data []byte) error { // It will return an error if the input is not an integer, blank, or "null". func (f *Float) UnmarshalText(text []byte) error { str := string(text) - if str == "" || str == "null" { + if str == "" || str == nullString { f.Valid = false return nil } @@ -82,7 +86,7 @@ func (f *Float) UnmarshalText(text []byte) error { // It will encode null if this Float is null. func (f Float) MarshalJSON() ([]byte, error) { if !f.Valid { - return []byte("null"), nil + return []byte(nullString), nil } return []byte(strconv.FormatFloat(f.Float64, 'f', -1, 64)), nil } @@ -100,7 +104,7 @@ func (f Float) MarshalText() ([]byte, error) { // It will encode a blank string if this Float is null. func (f Float) String() string { if !f.Valid { - return "null" + return nullString } return fmt.Sprintf("%1.3f", f.Float64) @@ -109,7 +113,7 @@ func (f Float) String() string { // FullString returns float as string in full precision func (f Float) FullString() string { if !f.Valid { - return "null" + return nullString } return fmt.Sprintf("%f", f.Float64) diff --git a/pkg/components/simplejson/simplejson.go b/pkg/components/simplejson/simplejson.go index 85e2f955943..35e305eb414 100644 --- a/pkg/components/simplejson/simplejson.go +++ b/pkg/components/simplejson/simplejson.go @@ -256,7 +256,7 @@ func (j *Json) StringArray() ([]string, error) { // MustArray guarantees the return of a `[]interface{}` (with optional default) // -// useful when you want to interate over array values in a succinct manner: +// useful when you want to iterate over array values in a succinct manner: // for i, v := range js.Get("results").MustArray() { // fmt.Println(i, v) // } @@ -281,7 +281,7 @@ func (j *Json) MustArray(args ...[]interface{}) []interface{} { // MustMap guarantees the return of a `map[string]interface{}` (with optional default) // -// useful when you want to interate over map values in a succinct manner: +// useful when you want to iterate over map values in a succinct manner: // for k, v := range js.Get("dictionary").MustMap() { // fmt.Println(k, v) // } @@ -329,7 +329,7 @@ func (j *Json) MustString(args ...string) string { // MustStringArray guarantees the return of a `[]string` (with optional default) // -// useful when you want to interate over array values in a succinct manner: +// useful when you want to iterate over array values in a succinct manner: // for i, s := range js.Get("results").MustStringArray() { // fmt.Println(i, s) // } diff --git a/pkg/login/ldap_settings.go b/pkg/login/ldap_settings.go index 7ebfbc79ba8..40791a509db 100644 --- a/pkg/login/ldap_settings.go +++ b/pkg/login/ldap_settings.go @@ -48,7 +48,7 @@ type LdapAttributeMap struct { type LdapGroupToOrgRole struct { GroupDN string `toml:"group_dn"` OrgId int64 `toml:"org_id"` - IsGrafanaAdmin *bool `toml:"grafana_admin"` // This is a pointer to know if it was set or not (for backwards compatability) + IsGrafanaAdmin *bool `toml:"grafana_admin"` // This is a pointer to know if it was set or not (for backwards compatibility) OrgRole m.RoleType `toml:"org_role"` } diff --git a/pkg/middleware/middleware_test.go b/pkg/middleware/middleware_test.go index 87c23a7b49a..1830b3eb161 100644 --- a/pkg/middleware/middleware_test.go +++ b/pkg/middleware/middleware_test.go @@ -435,11 +435,6 @@ func (sc *scenarioContext) withValidApiKey() *scenarioContext { return sc } -func (sc *scenarioContext) withInvalidApiKey() *scenarioContext { - sc.apiKey = "nvalidhhhhds" - return sc -} - func (sc *scenarioContext) withAuthorizationHeader(authHeader string) *scenarioContext { sc.authHeader = authHeader return sc diff --git a/pkg/models/alert.go b/pkg/models/alert.go index fba2aa63df9..ba1fc0779ba 100644 --- a/pkg/models/alert.go +++ b/pkg/models/alert.go @@ -75,7 +75,7 @@ type Alert struct { EvalData *simplejson.Json NewStateDate time.Time - StateChanges int + StateChanges int64 Created time.Time Updated time.Time @@ -156,7 +156,7 @@ type SetAlertStateCommand struct { Error string EvalData *simplejson.Json - Timestamp time.Time + Result Alert } //Queries diff --git a/pkg/models/alert_notifications.go b/pkg/models/alert_notifications.go index 42d33d5ed22..2128b469fa4 100644 --- a/pkg/models/alert_notifications.go +++ b/pkg/models/alert_notifications.go @@ -8,8 +8,18 @@ import ( ) var ( - ErrNotificationFrequencyNotFound = errors.New("Notification frequency not specified") - ErrJournalingNotFound = errors.New("alert notification journaling not found") + ErrNotificationFrequencyNotFound = errors.New("Notification frequency not specified") + ErrAlertNotificationStateNotFound = errors.New("alert notification state not found") + ErrAlertNotificationStateVersionConflict = errors.New("alert notification state update version conflict") + ErrAlertNotificationStateAlreadyExist = errors.New("alert notification state already exists.") +) + +type AlertNotificationStateType string + +var ( + AlertNotificationStatePending = AlertNotificationStateType("pending") + AlertNotificationStateCompleted = AlertNotificationStateType("completed") + AlertNotificationStateUnknown = AlertNotificationStateType("unknown") ) type AlertNotification struct { @@ -76,33 +86,34 @@ type GetAllAlertNotificationsQuery struct { Result []*AlertNotification } -type AlertNotificationJournal struct { - Id int64 - OrgId int64 - AlertId int64 - NotifierId int64 - SentAt int64 - Success bool +type AlertNotificationState struct { + Id int64 + OrgId int64 + AlertId int64 + NotifierId int64 + State AlertNotificationStateType + Version int64 + UpdatedAt int64 + AlertRuleStateUpdatedVersion int64 } -type RecordNotificationJournalCommand struct { - OrgId int64 - AlertId int64 - NotifierId int64 - SentAt int64 - Success bool +type SetAlertNotificationStateToPendingCommand struct { + Id int64 + AlertRuleStateUpdatedVersion int64 + Version int64 + + ResultVersion int64 } -type GetLatestNotificationQuery struct { +type SetAlertNotificationStateToCompleteCommand struct { + Id int64 + Version int64 +} + +type GetOrCreateNotificationStateQuery struct { OrgId int64 AlertId int64 NotifierId int64 - Result *AlertNotificationJournal -} - -type CleanNotificationJournalCommand struct { - OrgId int64 - AlertId int64 - NotifierId int64 + Result *AlertNotificationState } diff --git a/pkg/models/datasource.go b/pkg/models/datasource.go index b7e3e3eaa17..c730622512f 100644 --- a/pkg/models/datasource.go +++ b/pkg/models/datasource.go @@ -22,6 +22,7 @@ const ( DS_MSSQL = "mssql" DS_ACCESS_DIRECT = "direct" DS_ACCESS_PROXY = "proxy" + DS_STACKDRIVER = "stackdriver" ) var ( @@ -59,23 +60,23 @@ type DataSource struct { } var knownDatasourcePlugins = map[string]bool{ - DS_ES: true, - DS_GRAPHITE: true, - DS_INFLUXDB: true, - DS_INFLUXDB_08: true, - DS_KAIROSDB: true, - DS_CLOUDWATCH: true, - DS_PROMETHEUS: true, - DS_OPENTSDB: true, - DS_POSTGRES: true, - DS_MYSQL: true, - DS_MSSQL: true, - "opennms": true, - "abhisant-druid-datasource": true, - "dalmatinerdb-datasource": true, - "gnocci": true, - "zabbix": true, - "alexanderzobnin-zabbix-datasource": true, + DS_ES: true, + DS_GRAPHITE: true, + DS_INFLUXDB: true, + DS_INFLUXDB_08: true, + DS_KAIROSDB: true, + DS_CLOUDWATCH: true, + DS_PROMETHEUS: true, + DS_OPENTSDB: true, + DS_POSTGRES: true, + DS_MYSQL: true, + DS_MSSQL: true, + DS_STACKDRIVER: true, + "opennms": true, + "abhisant-druid-datasource": true, + "dalmatinerdb-datasource": true, + "gnocci": true, + "zabbix": true, "newrelic-app": true, "grafana-datadog-datasource": true, "grafana-simple-json": true, @@ -88,6 +89,7 @@ var knownDatasourcePlugins = map[string]bool{ "ayoungprogrammer-finance-datasource": true, "monasca-datasource": true, "vertamedia-clickhouse-datasource": true, + "alexanderzobnin-zabbix-datasource": true, } func IsKnownDataSourcePlugin(dsType string) bool { diff --git a/pkg/plugins/app_plugin.go b/pkg/plugins/app_plugin.go index b070ba592f0..922b2444b7b 100644 --- a/pkg/plugins/app_plugin.go +++ b/pkg/plugins/app_plugin.go @@ -23,12 +23,13 @@ type AppPlugin struct { } type AppPluginRoute struct { - Path string `json:"path"` - Method string `json:"method"` - ReqRole models.RoleType `json:"reqRole"` - Url string `json:"url"` - Headers []AppPluginRouteHeader `json:"headers"` - TokenAuth *JwtTokenAuth `json:"tokenAuth"` + Path string `json:"path"` + Method string `json:"method"` + ReqRole models.RoleType `json:"reqRole"` + Url string `json:"url"` + Headers []AppPluginRouteHeader `json:"headers"` + TokenAuth *JwtTokenAuth `json:"tokenAuth"` + JwtTokenAuth *JwtTokenAuth `json:"jwtTokenAuth"` } type AppPluginRouteHeader struct { @@ -36,8 +37,11 @@ type AppPluginRouteHeader struct { Content string `json:"content"` } +// JwtTokenAuth struct is both for normal Token Auth and JWT Token Auth with +// an uploaded JWT file. type JwtTokenAuth struct { Url string `json:"url"` + Scopes []string `json:"scopes"` Params map[string]string `json:"params"` } diff --git a/pkg/plugins/dashboard_importer_test.go b/pkg/plugins/dashboard_importer_test.go index 6f31b49f99d..ca8dfcd515c 100644 --- a/pkg/plugins/dashboard_importer_test.go +++ b/pkg/plugins/dashboard_importer_test.go @@ -35,7 +35,7 @@ func TestDashboardImport(t *testing.T) { So(cmd.Result, ShouldNotBeNil) resultStr, _ := mock.SavedDashboards[0].Dashboard.Data.EncodePretty() - expectedBytes, _ := ioutil.ReadFile("../../tests/test-app/dashboards/connections_result.json") + expectedBytes, _ := ioutil.ReadFile("testdata/test-app/dashboards/connections_result.json") expectedJson, _ := simplejson.NewJson(expectedBytes) expectedStr, _ := expectedJson.EncodePretty() @@ -89,7 +89,7 @@ func pluginScenario(desc string, t *testing.T, fn func()) { Convey("Given a plugin", t, func() { setting.Raw = ini.Empty() sec, _ := setting.Raw.NewSection("plugin.test-app") - sec.NewKey("path", "../../tests/test-app") + sec.NewKey("path", "testdata/test-app") pm := &PluginManager{} err := pm.Init() diff --git a/pkg/plugins/dashboards_test.go b/pkg/plugins/dashboards_test.go index c422a1431c0..6fc6ace0e00 100644 --- a/pkg/plugins/dashboards_test.go +++ b/pkg/plugins/dashboards_test.go @@ -16,7 +16,7 @@ func TestPluginDashboards(t *testing.T) { Convey("When asking plugin dashboard info", t, func() { setting.Raw = ini.Empty() sec, _ := setting.Raw.NewSection("plugin.test-app") - sec.NewKey("path", "../../tests/test-app") + sec.NewKey("path", "testdata/test-app") pm := &PluginManager{} err := pm.Init() diff --git a/pkg/plugins/dashboards_updater.go b/pkg/plugins/dashboards_updater.go index ebe11ed32d4..616d4541bec 100644 --- a/pkg/plugins/dashboards_updater.go +++ b/pkg/plugins/dashboards_updater.go @@ -48,11 +48,7 @@ func autoUpdateAppDashboard(pluginDashInfo *PluginDashboardInfoDTO, orgId int64) Path: pluginDashInfo.Path, } - if err := bus.Dispatch(&updateCmd); err != nil { - return err - } - - return nil + return bus.Dispatch(&updateCmd) } func syncPluginDashboards(pluginDef *PluginBase, orgId int64) { diff --git a/pkg/plugins/plugins_test.go b/pkg/plugins/plugins_test.go index fa68ae4389d..d16e6abb4c7 100644 --- a/pkg/plugins/plugins_test.go +++ b/pkg/plugins/plugins_test.go @@ -30,7 +30,7 @@ func TestPluginScans(t *testing.T) { Convey("When reading app plugin definition", t, func() { setting.Raw = ini.Empty() sec, _ := setting.Raw.NewSection("plugin.nginx-app") - sec.NewKey("path", "../../tests/test-app") + sec.NewKey("path", "testdata/test-app") pm := &PluginManager{} err := pm.Init() diff --git a/tests/datasource-test/module.js b/pkg/plugins/testdata/datasource-test/module.js similarity index 100% rename from tests/datasource-test/module.js rename to pkg/plugins/testdata/datasource-test/module.js diff --git a/tests/datasource-test/plugin.json b/pkg/plugins/testdata/datasource-test/plugin.json similarity index 100% rename from tests/datasource-test/plugin.json rename to pkg/plugins/testdata/datasource-test/plugin.json diff --git a/tests/test-app/dashboards/connections.json b/pkg/plugins/testdata/test-app/dashboards/connections.json similarity index 100% rename from tests/test-app/dashboards/connections.json rename to pkg/plugins/testdata/test-app/dashboards/connections.json diff --git a/tests/test-app/dashboards/connections_result.json b/pkg/plugins/testdata/test-app/dashboards/connections_result.json similarity index 100% rename from tests/test-app/dashboards/connections_result.json rename to pkg/plugins/testdata/test-app/dashboards/connections_result.json diff --git a/tests/test-app/dashboards/memory.json b/pkg/plugins/testdata/test-app/dashboards/memory.json similarity index 100% rename from tests/test-app/dashboards/memory.json rename to pkg/plugins/testdata/test-app/dashboards/memory.json diff --git a/tests/test-app/plugin.json b/pkg/plugins/testdata/test-app/plugin.json similarity index 100% rename from tests/test-app/plugin.json rename to pkg/plugins/testdata/test-app/plugin.json diff --git a/pkg/services/alerting/extractor.go b/pkg/services/alerting/extractor.go index e1c1bfacb2e..229092e217b 100644 --- a/pkg/services/alerting/extractor.go +++ b/pkg/services/alerting/extractor.go @@ -82,12 +82,13 @@ func (e *DashAlertExtractor) getAlertFromPanels(jsonWithPanels *simplejson.Json, if collapsed && collapsedJSON.MustBool() { // extract alerts from sub panels for collapsed panels - als, err := e.getAlertFromPanels(panel, validateAlertFunc) + alertSlice, err := e.getAlertFromPanels(panel, + validateAlertFunc) if err != nil { return nil, err } - alerts = append(alerts, als...) + alerts = append(alerts, alertSlice...) continue } diff --git a/pkg/services/alerting/interfaces.go b/pkg/services/alerting/interfaces.go index 46f8b3c769c..96294f0624f 100644 --- a/pkg/services/alerting/interfaces.go +++ b/pkg/services/alerting/interfaces.go @@ -3,6 +3,8 @@ package alerting import ( "context" "time" + + "github.com/grafana/grafana/pkg/models" ) type EvalHandler interface { @@ -20,7 +22,7 @@ type Notifier interface { NeedsImage() bool // ShouldNotify checks this evaluation should send an alert notification - ShouldNotify(ctx context.Context, evalContext *EvalContext) bool + ShouldNotify(ctx context.Context, evalContext *EvalContext, notificationState *models.AlertNotificationState) bool GetNotifierId() int64 GetIsDefault() bool @@ -28,11 +30,16 @@ type Notifier interface { GetFrequency() time.Duration } -type NotifierSlice []Notifier +type notifierState struct { + notifier Notifier + state *models.AlertNotificationState +} -func (notifiers NotifierSlice) ShouldUploadImage() bool { - for _, notifier := range notifiers { - if notifier.NeedsImage() { +type notifierStateSlice []*notifierState + +func (notifiers notifierStateSlice) ShouldUploadImage() bool { + for _, ns := range notifiers { + if ns.notifier.NeedsImage() { return true } } diff --git a/pkg/services/alerting/notifier.go b/pkg/services/alerting/notifier.go index 7fbd956f4f9..9ce50eadd6b 100644 --- a/pkg/services/alerting/notifier.go +++ b/pkg/services/alerting/notifier.go @@ -1,16 +1,15 @@ package alerting import ( - "context" "errors" "fmt" - "time" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/imguploader" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/services/rendering" + "github.com/grafana/grafana/pkg/setting" m "github.com/grafana/grafana/pkg/models" ) @@ -40,61 +39,78 @@ type notificationService struct { } func (n *notificationService) SendIfNeeded(context *EvalContext) error { - notifiers, err := n.getNeededNotifiers(context.Rule.OrgId, context.Rule.Notifications, context) + notifierStates, err := n.getNeededNotifiers(context.Rule.OrgId, context.Rule.Notifications, context) if err != nil { return err } - if len(notifiers) == 0 { + if len(notifierStates) == 0 { return nil } - if notifiers.ShouldUploadImage() { + if notifierStates.ShouldUploadImage() { if err = n.uploadImage(context); err != nil { n.log.Error("Failed to upload alert panel image.", "error", err) } } - return n.sendNotifications(context, notifiers) + return n.sendNotifications(context, notifierStates) } -func (n *notificationService) sendNotifications(evalContext *EvalContext, notifiers []Notifier) error { - for _, notifier := range notifiers { - not := notifier +func (n *notificationService) sendAndMarkAsComplete(evalContext *EvalContext, notifierState *notifierState) error { + notifier := notifierState.notifier - err := bus.InTransaction(evalContext.Ctx, func(ctx context.Context) error { - n.log.Debug("trying to send notification", "id", not.GetNotifierId()) + n.log.Debug("Sending notification", "type", notifier.GetType(), "id", notifier.GetNotifierId(), "isDefault", notifier.GetIsDefault()) + metrics.M_Alerting_Notification_Sent.WithLabelValues(notifier.GetType()).Inc() - // Verify that we can send the notification again - // but this time within the same transaction. - if !evalContext.IsTestRun && !not.ShouldNotify(context.Background(), evalContext) { - return nil - } + err := notifier.Notify(evalContext) - n.log.Debug("Sending notification", "type", not.GetType(), "id", not.GetNotifierId(), "isDefault", not.GetIsDefault()) - metrics.M_Alerting_Notification_Sent.WithLabelValues(not.GetType()).Inc() + if err != nil { + n.log.Error("failed to send notification", "id", notifier.GetNotifierId(), "error", err) + } - //send notification - success := not.Notify(evalContext) == nil + if evalContext.IsTestRun { + return nil + } - if evalContext.IsTestRun { - return nil - } + cmd := &m.SetAlertNotificationStateToCompleteCommand{ + Id: notifierState.state.Id, + Version: notifierState.state.Version, + } - //write result to db. - cmd := &m.RecordNotificationJournalCommand{ - OrgId: evalContext.Rule.OrgId, - AlertId: evalContext.Rule.Id, - NotifierId: not.GetNotifierId(), - SentAt: time.Now().Unix(), - Success: success, - } + return bus.DispatchCtx(evalContext.Ctx, cmd) +} - return bus.DispatchCtx(ctx, cmd) - }) +func (n *notificationService) sendNotification(evalContext *EvalContext, notifierState *notifierState) error { + if !evalContext.IsTestRun { + setPendingCmd := &m.SetAlertNotificationStateToPendingCommand{ + Id: notifierState.state.Id, + Version: notifierState.state.Version, + AlertRuleStateUpdatedVersion: evalContext.Rule.StateChanges, + } + + err := bus.DispatchCtx(evalContext.Ctx, setPendingCmd) + if err == m.ErrAlertNotificationStateVersionConflict { + return nil + } if err != nil { - n.log.Error("failed to send notification", "id", not.GetNotifierId()) + return err + } + + // We need to update state version to be able to log + // unexpected version conflicts when marking notifications as ok + notifierState.state.Version = setPendingCmd.ResultVersion + } + + return n.sendAndMarkAsComplete(evalContext, notifierState) +} + +func (n *notificationService) sendNotifications(evalContext *EvalContext, notifierStates notifierStateSlice) error { + for _, notifierState := range notifierStates { + err := n.sendNotification(evalContext, notifierState) + if err != nil { + n.log.Error("failed to send notification", "id", notifierState.notifier.GetNotifierId(), "error", err) } } @@ -108,11 +124,12 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { } renderOpts := rendering.Opts{ - Width: 1000, - Height: 500, - Timeout: alertTimeout / 2, - OrgId: context.Rule.OrgId, - OrgRole: m.ROLE_ADMIN, + Width: 1000, + Height: 500, + Timeout: alertTimeout / 2, + OrgId: context.Rule.OrgId, + OrgRole: m.ROLE_ADMIN, + ConcurrentLimit: setting.AlertingRenderLimit, } ref, err := context.GetDashboardUID() @@ -140,22 +157,38 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { return nil } -func (n *notificationService) getNeededNotifiers(orgId int64, notificationIds []int64, evalContext *EvalContext) (NotifierSlice, error) { +func (n *notificationService) getNeededNotifiers(orgId int64, notificationIds []int64, evalContext *EvalContext) (notifierStateSlice, error) { query := &m.GetAlertNotificationsToSendQuery{OrgId: orgId, Ids: notificationIds} if err := bus.Dispatch(query); err != nil { return nil, err } - var result []Notifier + var result notifierStateSlice for _, notification := range query.Result { not, err := n.createNotifierFor(notification) if err != nil { - return nil, err + n.log.Error("Could not create notifier", "notifier", notification.Id, "error", err) + continue } - if not.ShouldNotify(evalContext.Ctx, evalContext) { - result = append(result, not) + query := &m.GetOrCreateNotificationStateQuery{ + NotifierId: notification.Id, + AlertId: evalContext.Rule.Id, + OrgId: evalContext.Rule.OrgId, + } + + err = bus.DispatchCtx(evalContext.Ctx, query) + if err != nil { + n.log.Error("Could not get notification state.", "notifier", notification.Id, "error", err) + continue + } + + if not.ShouldNotify(evalContext.Ctx, evalContext, query.Result) { + result = append(result, ¬ifierState{ + notifier: not, + state: query.Result, + }) } } diff --git a/pkg/services/alerting/notifiers/alertmanager.go b/pkg/services/alerting/notifiers/alertmanager.go index 9826dd1dffb..2caa4d5ab58 100644 --- a/pkg/services/alerting/notifiers/alertmanager.go +++ b/pkg/services/alerting/notifiers/alertmanager.go @@ -46,7 +46,7 @@ type AlertmanagerNotifier struct { log log.Logger } -func (this *AlertmanagerNotifier) ShouldNotify(ctx context.Context, evalContext *alerting.EvalContext) bool { +func (this *AlertmanagerNotifier) ShouldNotify(ctx context.Context, evalContext *alerting.EvalContext, notificationState *m.AlertNotificationState) bool { this.log.Debug("Should notify", "ruleId", evalContext.Rule.Id, "state", evalContext.Rule.State, "previousState", evalContext.PrevAlertState) // Do not notify when we become OK for the first time. diff --git a/pkg/services/alerting/notifiers/base.go b/pkg/services/alerting/notifiers/base.go index ca011356247..fbade2eccac 100644 --- a/pkg/services/alerting/notifiers/base.go +++ b/pkg/services/alerting/notifiers/base.go @@ -4,13 +4,16 @@ import ( "context" "time" - "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" ) +const ( + triggMetrString = "Triggered metrics:\n\n" +) + type NotifierBase struct { Name string Type string @@ -42,55 +45,47 @@ func NewNotifierBase(model *models.AlertNotification) NotifierBase { } } -func defaultShouldNotify(context *alerting.EvalContext, sendReminder bool, frequency time.Duration, lastNotify time.Time) bool { +// ShouldNotify checks this evaluation should send an alert notification +func (n *NotifierBase) ShouldNotify(ctx context.Context, context *alerting.EvalContext, notiferState *models.AlertNotificationState) bool { // Only notify on state change. - if context.PrevAlertState == context.Rule.State && !sendReminder { + if context.PrevAlertState == context.Rule.State && !n.SendReminder { return false } - // Do not notify if interval has not elapsed - if sendReminder && !lastNotify.IsZero() && lastNotify.Add(frequency).After(time.Now()) { - return false - } + if context.PrevAlertState == context.Rule.State && n.SendReminder { + // Do not notify if interval has not elapsed + lastNotify := time.Unix(notiferState.UpdatedAt, 0) + if notiferState.UpdatedAt != 0 && lastNotify.Add(n.Frequency).After(time.Now()) { + return false + } - // Do not notify if alert state if OK or pending even on repeated notify - if sendReminder && (context.Rule.State == models.AlertStateOK || context.Rule.State == models.AlertStatePending) { - return false + // Do not notify if alert state is OK or pending even on repeated notify + if context.Rule.State == models.AlertStateOK || context.Rule.State == models.AlertStatePending { + return false + } } // Do not notify when we become OK for the first time. - if (context.PrevAlertState == models.AlertStatePending) && (context.Rule.State == models.AlertStateOK) { + if context.PrevAlertState == models.AlertStatePending && context.Rule.State == models.AlertStateOK { return false } + // Do not notify when we OK -> Pending + if context.PrevAlertState == models.AlertStateOK && context.Rule.State == models.AlertStatePending { + return false + } + + // Do not notifu if state pending and it have been updated last minute + if notiferState.State == models.AlertNotificationStatePending { + lastUpdated := time.Unix(notiferState.UpdatedAt, 0) + if lastUpdated.Add(1 * time.Minute).After(time.Now()) { + return false + } + } + return true } -// ShouldNotify checks this evaluation should send an alert notification -func (n *NotifierBase) ShouldNotify(ctx context.Context, c *alerting.EvalContext) bool { - cmd := &models.GetLatestNotificationQuery{ - OrgId: c.Rule.OrgId, - AlertId: c.Rule.Id, - NotifierId: n.Id, - } - - err := bus.DispatchCtx(ctx, cmd) - if err == models.ErrJournalingNotFound { - return true - } - - if err != nil { - n.log.Error("Could not determine last time alert notifier fired", "Alert name", c.Rule.Name, "Error", err) - return false - } - - if !cmd.Result.Success { - return true - } - - return defaultShouldNotify(c, n.SendReminder, n.Frequency, time.Unix(cmd.Result.SentAt, 0)) -} - func (n *NotifierBase) GetType() string { return n.Type } diff --git a/pkg/services/alerting/notifiers/base_test.go b/pkg/services/alerting/notifiers/base_test.go index 57b82f32466..5e46d3ad72e 100644 --- a/pkg/services/alerting/notifiers/base_test.go +++ b/pkg/services/alerting/notifiers/base_test.go @@ -2,12 +2,9 @@ package notifiers import ( "context" - "errors" "testing" "time" - "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/components/simplejson" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" @@ -15,100 +12,144 @@ import ( ) func TestShouldSendAlertNotification(t *testing.T) { + tnow := time.Now() + tcs := []struct { name string prevState m.AlertStateType newState m.AlertStateType - expected bool sendReminder bool + frequency time.Duration + state *m.AlertNotificationState + + expect bool }{ { - name: "pending -> ok should not trigger an notification", - newState: m.AlertStatePending, - prevState: m.AlertStateOK, - expected: false, + name: "pending -> ok should not trigger an notification", + newState: m.AlertStateOK, + prevState: m.AlertStatePending, + sendReminder: false, + state: &m.AlertNotificationState{}, + + expect: false, }, { - name: "ok -> alerting should trigger an notification", - newState: m.AlertStateOK, - prevState: m.AlertStateAlerting, - expected: true, + name: "ok -> alerting should trigger an notification", + newState: m.AlertStateAlerting, + prevState: m.AlertStateOK, + sendReminder: false, + state: &m.AlertNotificationState{}, + + expect: true, }, { - name: "ok -> pending should not trigger an notification", - newState: m.AlertStateOK, - prevState: m.AlertStatePending, - expected: false, + name: "ok -> pending should not trigger an notification", + newState: m.AlertStatePending, + prevState: m.AlertStateOK, + sendReminder: false, + state: &m.AlertNotificationState{}, + + expect: false, }, { name: "ok -> ok should not trigger an notification", newState: m.AlertStateOK, prevState: m.AlertStateOK, - expected: false, sendReminder: false, - }, - { - name: "ok -> alerting should not trigger an notification", - newState: m.AlertStateOK, - prevState: m.AlertStateAlerting, - expected: true, - sendReminder: true, + state: &m.AlertNotificationState{}, + + expect: false, }, { name: "ok -> ok with reminder should not trigger an notification", newState: m.AlertStateOK, prevState: m.AlertStateOK, - expected: false, sendReminder: true, + state: &m.AlertNotificationState{}, + + expect: false, + }, + { + name: "alerting -> ok should trigger an notification", + newState: m.AlertStateOK, + prevState: m.AlertStateAlerting, + sendReminder: false, + state: &m.AlertNotificationState{}, + + expect: true, + }, + { + name: "alerting -> ok should trigger an notification when reminders enabled", + newState: m.AlertStateOK, + prevState: m.AlertStateAlerting, + frequency: time.Minute * 10, + sendReminder: true, + state: &m.AlertNotificationState{UpdatedAt: tnow.Add(-time.Minute).Unix()}, + + expect: true, + }, + { + name: "alerting -> alerting with reminder and no state should trigger", + newState: m.AlertStateAlerting, + prevState: m.AlertStateAlerting, + frequency: time.Minute * 10, + sendReminder: true, + state: &m.AlertNotificationState{}, + + expect: true, + }, + { + name: "alerting -> alerting with reminder and last notification sent 1 minute ago should not trigger", + newState: m.AlertStateAlerting, + prevState: m.AlertStateAlerting, + frequency: time.Minute * 10, + sendReminder: true, + state: &m.AlertNotificationState{UpdatedAt: tnow.Add(-time.Minute).Unix()}, + + expect: false, + }, + { + name: "alerting -> alerting with reminder and last notifciation sent 11 minutes ago should trigger", + newState: m.AlertStateAlerting, + prevState: m.AlertStateAlerting, + frequency: time.Minute * 10, + sendReminder: true, + state: &m.AlertNotificationState{UpdatedAt: tnow.Add(-11 * time.Minute).Unix()}, + + expect: true, + }, + { + name: "OK -> alerting with notifciation state pending and updated 30 seconds ago should not trigger", + newState: m.AlertStateAlerting, + prevState: m.AlertStateOK, + state: &m.AlertNotificationState{State: m.AlertNotificationStatePending, UpdatedAt: tnow.Add(-30 * time.Second).Unix()}, + + expect: false, + }, + { + name: "OK -> alerting with notifciation state pending and updated 2 minutes ago should trigger", + newState: m.AlertStateAlerting, + prevState: m.AlertStateOK, + state: &m.AlertNotificationState{State: m.AlertNotificationStatePending, UpdatedAt: tnow.Add(-2 * time.Minute).Unix()}, + + expect: true, }, } for _, tc := range tcs { evalContext := alerting.NewEvalContext(context.TODO(), &alerting.Rule{ - State: tc.newState, + State: tc.prevState, }) - evalContext.Rule.State = tc.prevState - if defaultShouldNotify(evalContext, true, 0, time.Now()) != tc.expected { - t.Errorf("failed %s. expected %+v to return %v", tc.name, tc, tc.expected) + evalContext.Rule.State = tc.newState + nb := &NotifierBase{SendReminder: tc.sendReminder, Frequency: tc.frequency} + + if nb.ShouldNotify(evalContext.Ctx, evalContext, tc.state) != tc.expect { + t.Errorf("failed test %s.\n expected \n%+v \nto return: %v", tc.name, tc, tc.expect) } } } -func TestShouldNotifyWhenNoJournalingIsFound(t *testing.T) { - Convey("base notifier", t, func() { - bus.ClearBusHandlers() - - notifier := NewNotifierBase(&m.AlertNotification{ - Id: 1, - Name: "name", - Type: "email", - Settings: simplejson.New(), - }) - evalContext := alerting.NewEvalContext(context.TODO(), &alerting.Rule{}) - - Convey("should notify if no journaling is found", func() { - bus.AddHandlerCtx("", func(ctx context.Context, q *m.GetLatestNotificationQuery) error { - return m.ErrJournalingNotFound - }) - - if !notifier.ShouldNotify(context.Background(), evalContext) { - t.Errorf("should send notifications when ErrJournalingNotFound is returned") - } - }) - - Convey("should not notify query returns error", func() { - bus.AddHandlerCtx("", func(ctx context.Context, q *m.GetLatestNotificationQuery) error { - return errors.New("some kind of error unknown error") - }) - - if notifier.ShouldNotify(context.Background(), evalContext) { - t.Errorf("should not send notifications when query returns error") - } - }) - }) -} - func TestBaseNotifier(t *testing.T) { Convey("default constructor for notifiers", t, func() { bJson := simplejson.New() diff --git a/pkg/services/alerting/notifiers/kafka.go b/pkg/services/alerting/notifiers/kafka.go index d8d19fc5dae..a8a424c87a7 100644 --- a/pkg/services/alerting/notifiers/kafka.go +++ b/pkg/services/alerting/notifiers/kafka.go @@ -61,7 +61,7 @@ func (this *KafkaNotifier) Notify(evalContext *alerting.EvalContext) error { state := evalContext.Rule.State - customData := "Triggered metrics:\n\n" + customData := triggMetrString for _, evt := range evalContext.EvalMatches { customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value) } diff --git a/pkg/services/alerting/notifiers/opsgenie.go b/pkg/services/alerting/notifiers/opsgenie.go index 84148a0d99c..629968b5102 100644 --- a/pkg/services/alerting/notifiers/opsgenie.go +++ b/pkg/services/alerting/notifiers/opsgenie.go @@ -95,7 +95,7 @@ func (this *OpsGenieNotifier) createAlert(evalContext *alerting.EvalContext) err return err } - customData := "Triggered metrics:\n\n" + customData := triggMetrString for _, evt := range evalContext.EvalMatches { customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value) } diff --git a/pkg/services/alerting/notifiers/pagerduty.go b/pkg/services/alerting/notifiers/pagerduty.go index bf85466388f..9f6ce3c2dc8 100644 --- a/pkg/services/alerting/notifiers/pagerduty.go +++ b/pkg/services/alerting/notifiers/pagerduty.go @@ -76,7 +76,7 @@ func (this *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error { if evalContext.Rule.State == m.AlertStateOK { eventType = "resolve" } - customData := "Triggered metrics:\n\n" + customData := triggMetrString for _, evt := range evalContext.EvalMatches { customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value) } diff --git a/pkg/services/alerting/notifiers/teams.go b/pkg/services/alerting/notifiers/teams.go index 7beb71e5c65..2dad11285b4 100644 --- a/pkg/services/alerting/notifiers/teams.go +++ b/pkg/services/alerting/notifiers/teams.go @@ -74,7 +74,7 @@ func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { } message := "" - if evalContext.Rule.State != m.AlertStateOK { //dont add message when going back to alert state ok. + if evalContext.Rule.State != m.AlertStateOK { //don't add message when going back to alert state ok. message = evalContext.Rule.Message } diff --git a/pkg/services/alerting/notifiers/telegram_test.go b/pkg/services/alerting/notifiers/telegram_test.go index 98c8d884ad0..911323ae9d1 100644 --- a/pkg/services/alerting/notifiers/telegram_test.go +++ b/pkg/services/alerting/notifiers/telegram_test.go @@ -1,6 +1,7 @@ package notifiers import ( + "context" "testing" "github.com/grafana/grafana/pkg/components/simplejson" @@ -52,11 +53,12 @@ func TestTelegramNotifier(t *testing.T) { }) Convey("generateCaption should generate a message with all pertinent details", func() { - evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ - Name: "This is an alarm", - Message: "Some kind of message.", - State: m.AlertStateOK, - }) + evalContext := alerting.NewEvalContext(context.Background(), + &alerting.Rule{ + Name: "This is an alarm", + Message: "Some kind of message.", + State: m.AlertStateOK, + }) caption := generateImageCaption(evalContext, "http://grafa.url/abcdef", "") So(len(caption), ShouldBeLessThanOrEqualTo, 200) @@ -68,11 +70,12 @@ func TestTelegramNotifier(t *testing.T) { Convey("When generating a message", func() { Convey("URL should be skipped if it's too long", func() { - evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ - Name: "This is an alarm", - Message: "Some kind of message.", - State: m.AlertStateOK, - }) + evalContext := alerting.NewEvalContext(context.Background(), + &alerting.Rule{ + Name: "This is an alarm", + Message: "Some kind of message.", + State: m.AlertStateOK, + }) caption := generateImageCaption(evalContext, "http://grafa.url/abcdefaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", @@ -85,11 +88,12 @@ func TestTelegramNotifier(t *testing.T) { }) Convey("Message should be trimmed if it's too long", func() { - evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ - Name: "This is an alarm", - Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I promise I will. Yes siree that's it.", - State: m.AlertStateOK, - }) + evalContext := alerting.NewEvalContext(context.Background(), + &alerting.Rule{ + Name: "This is an alarm", + Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I promise I will. Yes siree that's it.", + State: m.AlertStateOK, + }) caption := generateImageCaption(evalContext, "http://grafa.url/foo", @@ -101,11 +105,12 @@ func TestTelegramNotifier(t *testing.T) { }) Convey("Metrics should be skipped if they don't fit", func() { - evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ - Name: "This is an alarm", - Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I ", - State: m.AlertStateOK, - }) + evalContext := alerting.NewEvalContext(context.Background(), + &alerting.Rule{ + Name: "This is an alarm", + Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I ", + State: m.AlertStateOK, + }) caption := generateImageCaption(evalContext, "http://grafa.url/foo", diff --git a/pkg/services/alerting/result_handler.go b/pkg/services/alerting/result_handler.go index 363d06d1132..420ffeb9a55 100644 --- a/pkg/services/alerting/result_handler.go +++ b/pkg/services/alerting/result_handler.go @@ -67,6 +67,12 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error { } handler.log.Error("Failed to save state", "error", err) + } else { + + // StateChanges is used for de duping alert notifications + // when two servers are raising. This makes sure that the server + // with the last state change always sends a notification. + evalContext.Rule.StateChanges = cmd.Result.StateChanges } // save annotation @@ -88,19 +94,6 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error { } } - if evalContext.Rule.State == m.AlertStateOK && evalContext.PrevAlertState != m.AlertStateOK { - for _, notifierId := range evalContext.Rule.Notifications { - cmd := &m.CleanNotificationJournalCommand{ - AlertId: evalContext.Rule.Id, - NotifierId: notifierId, - OrgId: evalContext.Rule.OrgId, - } - if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - handler.log.Error("Failed to clean up old notification records", "notifier", notifierId, "alert", evalContext.Rule.Id, "Error", err) - } - } - } handler.notifier.SendIfNeeded(evalContext) - return nil } diff --git a/pkg/services/alerting/rule.go b/pkg/services/alerting/rule.go index 018d138dbe4..d13924c2a17 100644 --- a/pkg/services/alerting/rule.go +++ b/pkg/services/alerting/rule.go @@ -23,6 +23,8 @@ type Rule struct { State m.AlertStateType Conditions []Condition Notifications []int64 + + StateChanges int64 } type ValidationError struct { @@ -100,6 +102,7 @@ func NewRuleFromDBAlert(ruleDef *m.Alert) (*Rule, error) { model.State = ruleDef.State model.NoDataState = m.NoDataOption(ruleDef.Settings.Get("noDataState").MustString("no_data")) model.ExecutionErrorState = m.ExecutionErrorOption(ruleDef.Settings.Get("executionErrorState").MustString("alerting")) + model.StateChanges = ruleDef.StateChanges for _, v := range ruleDef.Settings.Get("notifications").MustArray() { jsonModel := simplejson.NewFromAny(v) diff --git a/pkg/services/alerting/test_notification.go b/pkg/services/alerting/test_notification.go index 8421360b5ed..8aa1b80aa22 100644 --- a/pkg/services/alerting/test_notification.go +++ b/pkg/services/alerting/test_notification.go @@ -39,7 +39,7 @@ func handleNotificationTestCommand(cmd *NotificationTestCommand) error { return err } - return notifier.sendNotifications(createTestEvalContext(cmd), []Notifier{notifiers}) + return notifier.sendNotifications(createTestEvalContext(cmd), notifierStateSlice{{notifier: notifiers}}) } func createTestEvalContext(cmd *NotificationTestCommand) *EvalContext { diff --git a/pkg/services/alerting/ticker.go b/pkg/services/alerting/ticker.go index 5ce19b1b232..8cee2653ee9 100644 --- a/pkg/services/alerting/ticker.go +++ b/pkg/services/alerting/ticker.go @@ -37,10 +37,6 @@ func NewTicker(last time.Time, initialOffset time.Duration, c clock.Clock) *Tick return t } -func (t *Ticker) updateOffset(offset time.Duration) { - t.newOffset <- offset -} - func (t *Ticker) run() { for { next := t.last.Add(time.Duration(1) * time.Second) diff --git a/pkg/services/notifications/notifications_test.go b/pkg/services/notifications/notifications_test.go index 504c10c22ec..d54b70e704f 100644 --- a/pkg/services/notifications/notifications_test.go +++ b/pkg/services/notifications/notifications_test.go @@ -9,12 +9,6 @@ import ( . "github.com/smartystreets/goconvey/convey" ) -type testTriggeredAlert struct { - ActualValue float64 - Name string - State string -} - func TestNotifications(t *testing.T) { Convey("Given the notifications service", t, func() { diff --git a/pkg/services/provisioning/dashboards/config_reader.go b/pkg/services/provisioning/dashboards/config_reader.go index 7508550838f..bfef06b558e 100644 --- a/pkg/services/provisioning/dashboards/config_reader.go +++ b/pkg/services/provisioning/dashboards/config_reader.go @@ -83,7 +83,7 @@ func (cr *configReader) readConfig() ([]*DashboardsAsConfig, error) { } if dashboards[i].UpdateIntervalSeconds == 0 { - dashboards[i].UpdateIntervalSeconds = 3 + dashboards[i].UpdateIntervalSeconds = 10 } } diff --git a/pkg/services/provisioning/dashboards/config_reader_test.go b/pkg/services/provisioning/dashboards/config_reader_test.go index df0d2ae038e..d386e42349d 100644 --- a/pkg/services/provisioning/dashboards/config_reader_test.go +++ b/pkg/services/provisioning/dashboards/config_reader_test.go @@ -70,7 +70,7 @@ func validateDashboardAsConfig(t *testing.T, cfg []*DashboardsAsConfig) { So(len(ds.Options), ShouldEqual, 1) So(ds.Options["path"], ShouldEqual, "/var/lib/grafana/dashboards") So(ds.DisableDeletion, ShouldBeTrue) - So(ds.UpdateIntervalSeconds, ShouldEqual, 10) + So(ds.UpdateIntervalSeconds, ShouldEqual, 15) ds2 := cfg[1] So(ds2.Name, ShouldEqual, "default") @@ -81,5 +81,5 @@ func validateDashboardAsConfig(t *testing.T, cfg []*DashboardsAsConfig) { So(len(ds2.Options), ShouldEqual, 1) So(ds2.Options["path"], ShouldEqual, "/var/lib/grafana/dashboards") So(ds2.DisableDeletion, ShouldBeFalse) - So(ds2.UpdateIntervalSeconds, ShouldEqual, 3) + So(ds2.UpdateIntervalSeconds, ShouldEqual, 10) } diff --git a/pkg/services/provisioning/dashboards/file_reader.go b/pkg/services/provisioning/dashboards/file_reader.go index ef27ba97235..ea093860f3e 100644 --- a/pkg/services/provisioning/dashboards/file_reader.go +++ b/pkg/services/provisioning/dashboards/file_reader.go @@ -43,26 +43,6 @@ func NewDashboardFileReader(cfg *DashboardsAsConfig, log log.Logger) (*fileReade log.Warn("[Deprecated] The folder property is deprecated. Please use path instead.") } - if _, err := os.Stat(path); os.IsNotExist(err) { - log.Error("Cannot read directory", "error", err) - } - - copy := path - path, err := filepath.Abs(path) - if err != nil { - log.Error("Could not create absolute path ", "path", path) - } - - path, err = filepath.EvalSymlinks(path) - if err != nil { - log.Error("Failed to read content of symlinked path: %s", path) - } - - if path == "" { - path = copy - log.Info("falling back to original path due to EvalSymlink/Abs failure") - } - return &fileReader{ Cfg: cfg, Path: path, @@ -99,7 +79,8 @@ func (fr *fileReader) ReadAndListen(ctx context.Context) error { } func (fr *fileReader) startWalkingDisk() error { - if _, err := os.Stat(fr.Path); err != nil { + resolvedPath := fr.resolvePath(fr.Path) + if _, err := os.Stat(resolvedPath); err != nil { if os.IsNotExist(err) { return err } @@ -116,7 +97,7 @@ func (fr *fileReader) startWalkingDisk() error { } filesFoundOnDisk := map[string]os.FileInfo{} - err = filepath.Walk(fr.Path, createWalkFn(filesFoundOnDisk)) + err = filepath.Walk(resolvedPath, createWalkFn(filesFoundOnDisk)) if err != nil { return err } @@ -156,7 +137,7 @@ func (fr *fileReader) deleteDashboardIfFileIsMissing(provisionedDashboardRefs ma cmd := &models.DeleteDashboardCommand{OrgId: fr.Cfg.OrgId, Id: dashboardId} err := bus.Dispatch(cmd) if err != nil { - fr.log.Error("failed to delete dashboard", "id", cmd.Id) + fr.log.Error("failed to delete dashboard", "id", cmd.Id, "error", err) } } } @@ -344,6 +325,29 @@ func (fr *fileReader) readDashboardFromFile(path string, lastModified time.Time, }, nil } +func (fr *fileReader) resolvePath(path string) string { + if _, err := os.Stat(path); os.IsNotExist(err) { + fr.log.Error("Cannot read directory", "error", err) + } + + copy := path + path, err := filepath.Abs(path) + if err != nil { + fr.log.Error("Could not create absolute path ", "path", path) + } + + path, err = filepath.EvalSymlinks(path) + if err != nil { + fr.log.Error("Failed to read content of symlinked path: %s", path) + } + + if path == "" { + path = copy + fr.log.Info("falling back to original path due to EvalSymlink/Abs failure") + } + return path +} + type provisioningMetadata struct { uid string title string diff --git a/pkg/services/provisioning/dashboards/file_reader_linux_test.go b/pkg/services/provisioning/dashboards/file_reader_linux_test.go index 9d4cdae8609..77f488ebcfb 100644 --- a/pkg/services/provisioning/dashboards/file_reader_linux_test.go +++ b/pkg/services/provisioning/dashboards/file_reader_linux_test.go @@ -30,10 +30,11 @@ func TestProvsionedSymlinkedFolder(t *testing.T) { want, err := filepath.Abs(containingId) if err != nil { - t.Errorf("expected err to be nill") + t.Errorf("expected err to be nil") } - if reader.Path != want { - t.Errorf("got %s want %s", reader.Path, want) + resolvedPath := reader.resolvePath(reader.Path) + if resolvedPath != want { + t.Errorf("got %s want %s", resolvedPath, want) } } diff --git a/pkg/services/provisioning/dashboards/file_reader_test.go b/pkg/services/provisioning/dashboards/file_reader_test.go index bdc1e95aafe..fe849816553 100644 --- a/pkg/services/provisioning/dashboards/file_reader_test.go +++ b/pkg/services/provisioning/dashboards/file_reader_test.go @@ -67,7 +67,8 @@ func TestCreatingNewDashboardFileReader(t *testing.T) { reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) So(err, ShouldBeNil) - So(filepath.IsAbs(reader.Path), ShouldBeTrue) + resolvedPath := reader.resolvePath(reader.Path) + So(filepath.IsAbs(resolvedPath), ShouldBeTrue) }) }) } diff --git a/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml b/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml index e26c329f87c..c43c4a14c53 100644 --- a/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml +++ b/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml @@ -6,7 +6,7 @@ providers: folder: 'developers' editable: true disableDeletion: true - updateIntervalSeconds: 10 + updateIntervalSeconds: 15 type: file options: path: /var/lib/grafana/dashboards diff --git a/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml b/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml index 69a317fb396..8b7b8991759 100644 --- a/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml +++ b/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml @@ -3,7 +3,7 @@ folder: 'developers' editable: true disableDeletion: true - updateIntervalSeconds: 10 + updateIntervalSeconds: 15 type: file options: path: /var/lib/grafana/dashboards diff --git a/pkg/services/provisioning/datasources/testdata/broken-yaml/commented.yaml b/pkg/services/provisioning/datasources/testdata/broken-yaml/commented.yaml index 1bb9cb53b45..b532c9012ec 100644 --- a/pkg/services/provisioning/datasources/testdata/broken-yaml/commented.yaml +++ b/pkg/services/provisioning/datasources/testdata/broken-yaml/commented.yaml @@ -4,7 +4,7 @@ # org_id: 1 # # list of datasources to insert/update depending -# # whats available in the datbase +# # what's available in the database #datasources: # # name of the datasource. Required # - name: Graphite diff --git a/pkg/services/rendering/http_mode.go b/pkg/services/rendering/http_mode.go index d47dfaeaae1..40259c44746 100644 --- a/pkg/services/rendering/http_mode.go +++ b/pkg/services/rendering/http_mode.go @@ -70,7 +70,7 @@ func (rs *RenderingService) renderViaHttp(ctx context.Context, opts Opts) (*Rend return nil, ErrTimeout } - // if we didnt get a 200 response, something went wrong. + // if we didn't get a 200 response, something went wrong. if resp.StatusCode != http.StatusOK { rs.log.Error("Remote rendering request failed", "error", resp.Status) return nil, fmt.Errorf("Remote rendering request failed. %d: %s", resp.StatusCode, resp.Status) @@ -83,7 +83,7 @@ func (rs *RenderingService) renderViaHttp(ctx context.Context, opts Opts) (*Rend defer out.Close() _, err = io.Copy(out, resp.Body) if err != nil { - // check that we didnt timeout while receiving the response. + // check that we didn't timeout while receiving the response. if reqContext.Err() == context.DeadlineExceeded { rs.log.Info("Rendering timed out") return nil, ErrTimeout diff --git a/pkg/services/rendering/interface.go b/pkg/services/rendering/interface.go index 85c139cfc04..39cb1ada0f5 100644 --- a/pkg/services/rendering/interface.go +++ b/pkg/services/rendering/interface.go @@ -13,15 +13,16 @@ var ErrNoRenderer = errors.New("No renderer plugin found nor is an external rend var ErrPhantomJSNotInstalled = errors.New("PhantomJS executable not found") type Opts struct { - Width int - Height int - Timeout time.Duration - OrgId int64 - UserId int64 - OrgRole models.RoleType - Path string - Encoding string - Timezone string + Width int + Height int + Timeout time.Duration + OrgId int64 + UserId int64 + OrgRole models.RoleType + Path string + Encoding string + Timezone string + ConcurrentLimit int } type RenderResult struct { diff --git a/pkg/services/rendering/rendering.go b/pkg/services/rendering/rendering.go index ff4a67cc9b6..0b4f23e93b4 100644 --- a/pkg/services/rendering/rendering.go +++ b/pkg/services/rendering/rendering.go @@ -24,12 +24,13 @@ func init() { } type RenderingService struct { - log log.Logger - pluginClient *plugin.Client - grpcPlugin pluginModel.RendererPlugin - pluginInfo *plugins.RendererPlugin - renderAction renderFunc - domain string + log log.Logger + pluginClient *plugin.Client + grpcPlugin pluginModel.RendererPlugin + pluginInfo *plugins.RendererPlugin + renderAction renderFunc + domain string + inProgressCount int Cfg *setting.Cfg `inject:""` } @@ -45,7 +46,7 @@ func (rs *RenderingService) Init() error { // set value used for domain attribute of renderKey cookie if rs.Cfg.RendererUrl != "" { - // RendererCallbackUrl has already been passed, it wont generate an error. + // RendererCallbackUrl has already been passed, it won't generate an error. u, _ := url.Parse(rs.Cfg.RendererCallbackUrl) rs.domain = u.Hostname() } else if setting.HttpAddr != setting.DEFAULT_HTTP_ADDR { @@ -90,6 +91,18 @@ func (rs *RenderingService) Run(ctx context.Context) error { } func (rs *RenderingService) Render(ctx context.Context, opts Opts) (*RenderResult, error) { + if rs.inProgressCount > opts.ConcurrentLimit { + return &RenderResult{ + FilePath: filepath.Join(setting.HomePath, "public/img/rendering_limit.png"), + }, nil + } + + defer func() { + rs.inProgressCount -= 1 + }() + + rs.inProgressCount += 1 + if rs.renderAction != nil { return rs.renderAction(ctx, opts) } else { diff --git a/pkg/services/sqlstore/alert.go b/pkg/services/sqlstore/alert.go index ba898769578..2f17402b80c 100644 --- a/pkg/services/sqlstore/alert.go +++ b/pkg/services/sqlstore/alert.go @@ -60,6 +60,10 @@ func deleteAlertByIdInternal(alertId int64, reason string, sess *DBSession) erro return err } + if _, err := sess.Exec("DELETE FROM alert_notification_state WHERE alert_id = ?", alertId); err != nil { + return err + } + return nil } @@ -275,6 +279,8 @@ func SetAlertState(cmd *m.SetAlertStateCommand) error { } sess.ID(alert.Id).Update(&alert) + + cmd.Result = alert return nil }) } diff --git a/pkg/services/sqlstore/alert_notification.go b/pkg/services/sqlstore/alert_notification.go index 19ed960638e..daaef945b96 100644 --- a/pkg/services/sqlstore/alert_notification.go +++ b/pkg/services/sqlstore/alert_notification.go @@ -3,6 +3,7 @@ package sqlstore import ( "bytes" "context" + "errors" "fmt" "strings" "time" @@ -18,16 +19,23 @@ func init() { bus.AddHandler("sql", DeleteAlertNotification) bus.AddHandler("sql", GetAlertNotificationsToSend) bus.AddHandler("sql", GetAllAlertNotifications) - bus.AddHandlerCtx("sql", RecordNotificationJournal) - bus.AddHandlerCtx("sql", GetLatestNotification) - bus.AddHandlerCtx("sql", CleanNotificationJournal) + bus.AddHandlerCtx("sql", GetOrCreateAlertNotificationState) + bus.AddHandlerCtx("sql", SetAlertNotificationStateToCompleteCommand) + bus.AddHandlerCtx("sql", SetAlertNotificationStateToPendingCommand) } func DeleteAlertNotification(cmd *m.DeleteAlertNotificationCommand) error { return inTransaction(func(sess *DBSession) error { sql := "DELETE FROM alert_notification WHERE alert_notification.org_id = ? AND alert_notification.id = ?" - _, err := sess.Exec(sql, cmd.OrgId, cmd.Id) - return err + if _, err := sess.Exec(sql, cmd.OrgId, cmd.Id); err != nil { + return err + } + + if _, err := sess.Exec("DELETE FROM alert_notification_state WHERE alert_notification_state.org_id = ? AND alert_notification_state.notifier_id = ?", cmd.OrgId, cmd.Id); err != nil { + return err + } + + return nil }) } @@ -229,49 +237,123 @@ func UpdateAlertNotification(cmd *m.UpdateAlertNotificationCommand) error { }) } -func RecordNotificationJournal(ctx context.Context, cmd *m.RecordNotificationJournalCommand) error { +func SetAlertNotificationStateToCompleteCommand(ctx context.Context, cmd *m.SetAlertNotificationStateToCompleteCommand) error { return inTransactionCtx(ctx, func(sess *DBSession) error { - journalEntry := &m.AlertNotificationJournal{ - OrgId: cmd.OrgId, - AlertId: cmd.AlertId, - NotifierId: cmd.NotifierId, - SentAt: cmd.SentAt, - Success: cmd.Success, - } + version := cmd.Version + var current m.AlertNotificationState + sess.ID(cmd.Id).Get(¤t) - if _, err := sess.Insert(journalEntry); err != nil { - return err - } + newVersion := cmd.Version + 1 - return nil - }) -} + sql := `UPDATE alert_notification_state SET + state = ?, + version = ?, + updated_at = ? + WHERE + id = ?` -func GetLatestNotification(ctx context.Context, cmd *m.GetLatestNotificationQuery) error { - return inTransactionCtx(ctx, func(sess *DBSession) error { - nj := &m.AlertNotificationJournal{} - - _, err := sess.Desc("alert_notification_journal.sent_at"). - Limit(1). - Where("alert_notification_journal.org_id = ? AND alert_notification_journal.alert_id = ? AND alert_notification_journal.notifier_id = ?", cmd.OrgId, cmd.AlertId, cmd.NotifierId).Get(nj) + _, err := sess.Exec(sql, m.AlertNotificationStateCompleted, newVersion, timeNow().Unix(), cmd.Id) if err != nil { return err } - if nj.AlertId == 0 && nj.Id == 0 && nj.NotifierId == 0 && nj.OrgId == 0 { - return m.ErrJournalingNotFound + if current.Version != version { + sqlog.Error("notification state out of sync. the notification is marked as complete but has been modified between set as pending and completion.", "notifierId", current.NotifierId) } - cmd.Result = nj return nil }) } -func CleanNotificationJournal(ctx context.Context, cmd *m.CleanNotificationJournalCommand) error { - return inTransactionCtx(ctx, func(sess *DBSession) error { - sql := "DELETE FROM alert_notification_journal WHERE alert_notification_journal.org_id = ? AND alert_notification_journal.alert_id = ? AND alert_notification_journal.notifier_id = ?" - _, err := sess.Exec(sql, cmd.OrgId, cmd.AlertId, cmd.NotifierId) - return err +func SetAlertNotificationStateToPendingCommand(ctx context.Context, cmd *m.SetAlertNotificationStateToPendingCommand) error { + return withDbSession(ctx, func(sess *DBSession) error { + newVersion := cmd.Version + 1 + sql := `UPDATE alert_notification_state SET + state = ?, + version = ?, + updated_at = ?, + alert_rule_state_updated_version = ? + WHERE + id = ? AND + (version = ? OR alert_rule_state_updated_version < ?)` + + res, err := sess.Exec(sql, + m.AlertNotificationStatePending, + newVersion, + timeNow().Unix(), + cmd.AlertRuleStateUpdatedVersion, + cmd.Id, + cmd.Version, + cmd.AlertRuleStateUpdatedVersion) + + if err != nil { + return err + } + + affected, _ := res.RowsAffected() + if affected == 0 { + return m.ErrAlertNotificationStateVersionConflict + } + + cmd.ResultVersion = newVersion + + return nil }) } + +func GetOrCreateAlertNotificationState(ctx context.Context, cmd *m.GetOrCreateNotificationStateQuery) error { + return inTransactionCtx(ctx, func(sess *DBSession) error { + nj := &m.AlertNotificationState{} + + exist, err := getAlertNotificationState(sess, cmd, nj) + + // if exists, return it, otherwise create it with default values + if err != nil { + return err + } + + if exist { + cmd.Result = nj + return nil + } + + notificationState := &m.AlertNotificationState{ + OrgId: cmd.OrgId, + AlertId: cmd.AlertId, + NotifierId: cmd.NotifierId, + State: m.AlertNotificationStateUnknown, + UpdatedAt: timeNow().Unix(), + } + + if _, err := sess.Insert(notificationState); err != nil { + if dialect.IsUniqueConstraintViolation(err) { + exist, err = getAlertNotificationState(sess, cmd, nj) + + if err != nil { + return err + } + + if !exist { + return errors.New("Should not happen") + } + + cmd.Result = nj + return nil + } + + return err + } + + cmd.Result = notificationState + return nil + }) +} + +func getAlertNotificationState(sess *DBSession, cmd *m.GetOrCreateNotificationStateQuery, nj *m.AlertNotificationState) (bool, error) { + return sess. + Where("alert_notification_state.org_id = ?", cmd.OrgId). + Where("alert_notification_state.alert_id = ?", cmd.AlertId). + Where("alert_notification_state.notifier_id = ?", cmd.NotifierId). + Get(nj) +} diff --git a/pkg/services/sqlstore/alert_notification_test.go b/pkg/services/sqlstore/alert_notification_test.go index 83fb42db9bb..ed682bae5c6 100644 --- a/pkg/services/sqlstore/alert_notification_test.go +++ b/pkg/services/sqlstore/alert_notification_test.go @@ -6,7 +6,7 @@ import ( "time" "github.com/grafana/grafana/pkg/components/simplejson" - m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/models" . "github.com/smartystreets/goconvey/convey" ) @@ -14,50 +14,133 @@ func TestAlertNotificationSQLAccess(t *testing.T) { Convey("Testing Alert notification sql access", t, func() { InitTestDB(t) - Convey("Alert notification journal", func() { - var alertId int64 = 5 - var orgId int64 = 5 - var notifierId int64 = 5 + Convey("Alert notification state", func() { + var alertID int64 = 7 + var orgID int64 = 5 + var notifierID int64 = 10 + oldTimeNow := timeNow + now := time.Date(2018, 9, 30, 0, 0, 0, 0, time.UTC) + timeNow = func() time.Time { return now } - Convey("Getting last journal should raise error if no one exists", func() { - query := &m.GetLatestNotificationQuery{AlertId: alertId, OrgId: orgId, NotifierId: notifierId} - err := GetLatestNotification(context.Background(), query) - So(err, ShouldEqual, m.ErrJournalingNotFound) + Convey("Get no existing state should create a new state", func() { + query := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID} + err := GetOrCreateAlertNotificationState(context.Background(), query) + So(err, ShouldBeNil) + So(query.Result, ShouldNotBeNil) + So(query.Result.State, ShouldEqual, "unknown") + So(query.Result.Version, ShouldEqual, 0) + So(query.Result.UpdatedAt, ShouldEqual, now.Unix()) - Convey("shoulbe be able to record two journaling events", func() { - createCmd := &m.RecordNotificationJournalCommand{AlertId: alertId, NotifierId: notifierId, OrgId: orgId, Success: true, SentAt: 1} - - err := RecordNotificationJournal(context.Background(), createCmd) + Convey("Get existing state should not create a new state", func() { + query2 := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID} + err := GetOrCreateAlertNotificationState(context.Background(), query2) So(err, ShouldBeNil) + So(query2.Result, ShouldNotBeNil) + So(query2.Result.Id, ShouldEqual, query.Result.Id) + So(query2.Result.UpdatedAt, ShouldEqual, now.Unix()) + }) - createCmd.SentAt += 1000 //increase epoch + Convey("Update existing state to pending with correct version should update database", func() { + s := *query.Result - err = RecordNotificationJournal(context.Background(), createCmd) + cmd := models.SetAlertNotificationStateToPendingCommand{ + Id: s.Id, + Version: s.Version, + AlertRuleStateUpdatedVersion: s.AlertRuleStateUpdatedVersion, + } + + err := SetAlertNotificationStateToPendingCommand(context.Background(), &cmd) So(err, ShouldBeNil) + So(cmd.ResultVersion, ShouldEqual, 1) - Convey("get last journaling event", func() { - err := GetLatestNotification(context.Background(), query) + query2 := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID} + err = GetOrCreateAlertNotificationState(context.Background(), query2) + So(err, ShouldBeNil) + So(query2.Result.Version, ShouldEqual, 1) + So(query2.Result.State, ShouldEqual, models.AlertNotificationStatePending) + So(query2.Result.UpdatedAt, ShouldEqual, now.Unix()) + + Convey("Update existing state to completed should update database", func() { + s := *query.Result + setStateCmd := models.SetAlertNotificationStateToCompleteCommand{ + Id: s.Id, + Version: cmd.ResultVersion, + } + err := SetAlertNotificationStateToCompleteCommand(context.Background(), &setStateCmd) So(err, ShouldBeNil) - So(query.Result.SentAt, ShouldEqual, 1001) - Convey("be able to clear all journaling for an notifier", func() { - cmd := &m.CleanNotificationJournalCommand{AlertId: alertId, NotifierId: notifierId, OrgId: orgId} - err := CleanNotificationJournal(context.Background(), cmd) - So(err, ShouldBeNil) + query3 := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID} + err = GetOrCreateAlertNotificationState(context.Background(), query3) + So(err, ShouldBeNil) + So(query3.Result.Version, ShouldEqual, 2) + So(query3.Result.State, ShouldEqual, models.AlertNotificationStateCompleted) + So(query3.Result.UpdatedAt, ShouldEqual, now.Unix()) + }) - Convey("querying for last junaling should raise error", func() { - query := &m.GetLatestNotificationQuery{AlertId: alertId, OrgId: orgId, NotifierId: notifierId} - err := GetLatestNotification(context.Background(), query) - So(err, ShouldEqual, m.ErrJournalingNotFound) - }) - }) + Convey("Update existing state to completed should update database. regardless of version", func() { + s := *query.Result + unknownVersion := int64(1000) + cmd := models.SetAlertNotificationStateToCompleteCommand{ + Id: s.Id, + Version: unknownVersion, + } + err := SetAlertNotificationStateToCompleteCommand(context.Background(), &cmd) + So(err, ShouldBeNil) + + query3 := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID} + err = GetOrCreateAlertNotificationState(context.Background(), query3) + So(err, ShouldBeNil) + So(query3.Result.Version, ShouldEqual, unknownVersion+1) + So(query3.Result.State, ShouldEqual, models.AlertNotificationStateCompleted) + So(query3.Result.UpdatedAt, ShouldEqual, now.Unix()) }) }) + + Convey("Update existing state to pending with incorrect version should return version mismatch error", func() { + s := *query.Result + s.Version = 1000 + cmd := models.SetAlertNotificationStateToPendingCommand{ + Id: s.NotifierId, + Version: s.Version, + AlertRuleStateUpdatedVersion: s.AlertRuleStateUpdatedVersion, + } + err := SetAlertNotificationStateToPendingCommand(context.Background(), &cmd) + So(err, ShouldEqual, models.ErrAlertNotificationStateVersionConflict) + }) + + Convey("Updating existing state to pending with incorrect version since alert rule state update version is higher", func() { + s := *query.Result + cmd := models.SetAlertNotificationStateToPendingCommand{ + Id: s.Id, + Version: s.Version, + AlertRuleStateUpdatedVersion: 1000, + } + err := SetAlertNotificationStateToPendingCommand(context.Background(), &cmd) + So(err, ShouldBeNil) + + So(cmd.ResultVersion, ShouldEqual, 1) + }) + + Convey("different version and same alert state change version should return error", func() { + s := *query.Result + s.Version = 1000 + cmd := models.SetAlertNotificationStateToPendingCommand{ + Id: s.Id, + Version: s.Version, + AlertRuleStateUpdatedVersion: s.AlertRuleStateUpdatedVersion, + } + err := SetAlertNotificationStateToPendingCommand(context.Background(), &cmd) + So(err, ShouldNotBeNil) + }) + }) + + Reset(func() { + timeNow = oldTimeNow }) }) Convey("Alert notifications should be empty", func() { - cmd := &m.GetAlertNotificationsQuery{ + cmd := &models.GetAlertNotificationsQuery{ OrgId: 2, Name: "email", } @@ -68,7 +151,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { }) Convey("Cannot save alert notifier with send reminder = true", func() { - cmd := &m.CreateAlertNotificationCommand{ + cmd := &models.CreateAlertNotificationCommand{ Name: "ops", Type: "email", OrgId: 1, @@ -78,7 +161,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { Convey("and missing frequency", func() { err := CreateAlertNotificationCommand(cmd) - So(err, ShouldEqual, m.ErrNotificationFrequencyNotFound) + So(err, ShouldEqual, models.ErrNotificationFrequencyNotFound) }) Convey("invalid frequency", func() { @@ -90,7 +173,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { }) Convey("Cannot update alert notifier with send reminder = false", func() { - cmd := &m.CreateAlertNotificationCommand{ + cmd := &models.CreateAlertNotificationCommand{ Name: "ops update", Type: "email", OrgId: 1, @@ -101,14 +184,14 @@ func TestAlertNotificationSQLAccess(t *testing.T) { err := CreateAlertNotificationCommand(cmd) So(err, ShouldBeNil) - updateCmd := &m.UpdateAlertNotificationCommand{ + updateCmd := &models.UpdateAlertNotificationCommand{ Id: cmd.Result.Id, SendReminder: true, } Convey("and missing frequency", func() { err := UpdateAlertNotification(updateCmd) - So(err, ShouldEqual, m.ErrNotificationFrequencyNotFound) + So(err, ShouldEqual, models.ErrNotificationFrequencyNotFound) }) Convey("invalid frequency", func() { @@ -121,7 +204,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { }) Convey("Can save Alert Notification", func() { - cmd := &m.CreateAlertNotificationCommand{ + cmd := &models.CreateAlertNotificationCommand{ Name: "ops", Type: "email", OrgId: 1, @@ -143,7 +226,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { }) Convey("Can update alert notification", func() { - newCmd := &m.UpdateAlertNotificationCommand{ + newCmd := &models.UpdateAlertNotificationCommand{ Name: "NewName", Type: "webhook", OrgId: cmd.Result.OrgId, @@ -159,7 +242,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { }) Convey("Can update alert notification to disable sending of reminders", func() { - newCmd := &m.UpdateAlertNotificationCommand{ + newCmd := &models.UpdateAlertNotificationCommand{ Name: "NewName", Type: "webhook", OrgId: cmd.Result.OrgId, @@ -174,12 +257,12 @@ func TestAlertNotificationSQLAccess(t *testing.T) { }) Convey("Can search using an array of ids", func() { - cmd1 := m.CreateAlertNotificationCommand{Name: "nagios", Type: "webhook", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} - cmd2 := m.CreateAlertNotificationCommand{Name: "slack", Type: "webhook", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} - cmd3 := m.CreateAlertNotificationCommand{Name: "ops2", Type: "email", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} - cmd4 := m.CreateAlertNotificationCommand{IsDefault: true, Name: "default", Type: "email", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} + cmd1 := models.CreateAlertNotificationCommand{Name: "nagios", Type: "webhook", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} + cmd2 := models.CreateAlertNotificationCommand{Name: "slack", Type: "webhook", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} + cmd3 := models.CreateAlertNotificationCommand{Name: "ops2", Type: "email", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} + cmd4 := models.CreateAlertNotificationCommand{IsDefault: true, Name: "default", Type: "email", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} - otherOrg := m.CreateAlertNotificationCommand{Name: "default", Type: "email", OrgId: 2, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} + otherOrg := models.CreateAlertNotificationCommand{Name: "default", Type: "email", OrgId: 2, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} So(CreateAlertNotificationCommand(&cmd1), ShouldBeNil) So(CreateAlertNotificationCommand(&cmd2), ShouldBeNil) @@ -188,7 +271,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { So(CreateAlertNotificationCommand(&otherOrg), ShouldBeNil) Convey("search", func() { - query := &m.GetAlertNotificationsToSendQuery{ + query := &models.GetAlertNotificationsToSendQuery{ Ids: []int64{cmd1.Result.Id, cmd2.Result.Id, 112341231}, OrgId: 1, } @@ -199,7 +282,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { }) Convey("all", func() { - query := &m.GetAllAlertNotificationsQuery{ + query := &models.GetAllAlertNotificationsQuery{ OrgId: 1, } diff --git a/pkg/services/sqlstore/dashboard_service_integration_test.go b/pkg/services/sqlstore/dashboard_service_integration_test.go index a9658f7ab76..a4e76aca340 100644 --- a/pkg/services/sqlstore/dashboard_service_integration_test.go +++ b/pkg/services/sqlstore/dashboard_service_integration_test.go @@ -932,29 +932,6 @@ func TestIntegratedDashboardService(t *testing.T) { }) } -type scenarioContext struct { - dashboardGuardianMock *guardian.FakeDashboardGuardian -} - -type scenarioFunc func(c *scenarioContext) - -func dashboardGuardianScenario(desc string, mock *guardian.FakeDashboardGuardian, fn scenarioFunc) { - Convey(desc, func() { - origNewDashboardGuardian := guardian.New - guardian.MockDashboardGuardian(mock) - - sc := &scenarioContext{ - dashboardGuardianMock: mock, - } - - defer func() { - guardian.New = origNewDashboardGuardian - }() - - fn(sc) - }) -} - type dashboardPermissionScenarioContext struct { dashboardGuardianMock *guardian.FakeDashboardGuardian } diff --git a/pkg/services/sqlstore/migrations/alert_mig.go b/pkg/services/sqlstore/migrations/alert_mig.go index e27e64c6124..cadcccf6c95 100644 --- a/pkg/services/sqlstore/migrations/alert_mig.go +++ b/pkg/services/sqlstore/migrations/alert_mig.go @@ -107,4 +107,27 @@ func addAlertMigrations(mg *Migrator) { mg.AddMigration("create notification_journal table v1", NewAddTableMigration(notification_journal)) mg.AddMigration("add index notification_journal org_id & alert_id & notifier_id", NewAddIndexMigration(notification_journal, notification_journal.Indices[0])) + + mg.AddMigration("drop alert_notification_journal", NewDropTableMigration("alert_notification_journal")) + + alert_notification_state := Table{ + Name: "alert_notification_state", + Columns: []*Column{ + {Name: "id", Type: DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true}, + {Name: "org_id", Type: DB_BigInt, Nullable: false}, + {Name: "alert_id", Type: DB_BigInt, Nullable: false}, + {Name: "notifier_id", Type: DB_BigInt, Nullable: false}, + {Name: "state", Type: DB_NVarchar, Length: 50, Nullable: false}, + {Name: "version", Type: DB_BigInt, Nullable: false}, + {Name: "updated_at", Type: DB_BigInt, Nullable: false}, + {Name: "alert_rule_state_updated_version", Type: DB_BigInt, Nullable: false}, + }, + Indices: []*Index{ + {Cols: []string{"org_id", "alert_id", "notifier_id"}, Type: UniqueIndex}, + }, + } + + mg.AddMigration("create alert_notification_state table v1", NewAddTableMigration(alert_notification_state)) + mg.AddMigration("add index alert_notification_state org_id & alert_id & notifier_id", + NewAddIndexMigration(alert_notification_state, alert_notification_state.Indices[0])) } diff --git a/pkg/services/sqlstore/migrations/annotation_mig.go b/pkg/services/sqlstore/migrations/annotation_mig.go index d231d3283e2..49920dee490 100644 --- a/pkg/services/sqlstore/migrations/annotation_mig.go +++ b/pkg/services/sqlstore/migrations/annotation_mig.go @@ -105,7 +105,7 @@ func addAnnotationMig(mg *Migrator) { })) // - // Convert epoch saved as seconds to miliseconds + // Convert epoch saved as seconds to milliseconds // updateEpochSql := "UPDATE annotation SET epoch = (epoch*1000) where epoch < 9999999999" mg.AddMigration("Convert existing annotations from seconds to milliseconds", NewRawSqlMigration(updateEpochSql)) diff --git a/pkg/services/sqlstore/migrator/dialect.go b/pkg/services/sqlstore/migrator/dialect.go index 427d102b280..506a01c3ed8 100644 --- a/pkg/services/sqlstore/migrator/dialect.go +++ b/pkg/services/sqlstore/migrator/dialect.go @@ -44,6 +44,8 @@ type Dialect interface { CleanDB() error NoOpSql() string + + IsUniqueConstraintViolation(err error) bool } func NewDialect(engine *xorm.Engine) Dialect { diff --git a/pkg/services/sqlstore/migrator/mysql_dialect.go b/pkg/services/sqlstore/migrator/mysql_dialect.go index 1ed16871c15..7daa4597430 100644 --- a/pkg/services/sqlstore/migrator/mysql_dialect.go +++ b/pkg/services/sqlstore/migrator/mysql_dialect.go @@ -5,6 +5,8 @@ import ( "strconv" "strings" + "github.com/VividCortex/mysqlerr" + "github.com/go-sql-driver/mysql" "github.com/go-xorm/xorm" ) @@ -125,3 +127,13 @@ func (db *Mysql) CleanDB() error { return nil } + +func (db *Mysql) IsUniqueConstraintViolation(err error) bool { + if driverErr, ok := err.(*mysql.MySQLError); ok { + if driverErr.Number == mysqlerr.ER_DUP_ENTRY { + return true + } + } + + return false +} diff --git a/pkg/services/sqlstore/migrator/postgres_dialect.go b/pkg/services/sqlstore/migrator/postgres_dialect.go index eae9ad3ca3f..ab8812a1e26 100644 --- a/pkg/services/sqlstore/migrator/postgres_dialect.go +++ b/pkg/services/sqlstore/migrator/postgres_dialect.go @@ -6,6 +6,7 @@ import ( "strings" "github.com/go-xorm/xorm" + "github.com/lib/pq" ) type Postgres struct { @@ -136,3 +137,13 @@ func (db *Postgres) CleanDB() error { return nil } + +func (db *Postgres) IsUniqueConstraintViolation(err error) bool { + if driverErr, ok := err.(*pq.Error); ok { + if driverErr.Code == "23505" { + return true + } + } + + return false +} diff --git a/pkg/services/sqlstore/migrator/sqlite_dialect.go b/pkg/services/sqlstore/migrator/sqlite_dialect.go index 01082b95c88..446e3fcef12 100644 --- a/pkg/services/sqlstore/migrator/sqlite_dialect.go +++ b/pkg/services/sqlstore/migrator/sqlite_dialect.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/go-xorm/xorm" + sqlite3 "github.com/mattn/go-sqlite3" ) type Sqlite3 struct { @@ -82,3 +83,13 @@ func (db *Sqlite3) DropIndexSql(tableName string, index *Index) string { func (db *Sqlite3) CleanDB() error { return nil } + +func (db *Sqlite3) IsUniqueConstraintViolation(err error) bool { + if driverErr, ok := err.(sqlite3.Error); ok { + if driverErr.ExtendedCode == sqlite3.ErrConstraintUnique { + return true + } + } + + return false +} diff --git a/pkg/services/sqlstore/transactions_test.go b/pkg/services/sqlstore/transactions_test.go index 937649921ba..041359cf1d3 100644 --- a/pkg/services/sqlstore/transactions_test.go +++ b/pkg/services/sqlstore/transactions_test.go @@ -10,10 +10,6 @@ import ( . "github.com/smartystreets/goconvey/convey" ) -type testQuery struct { - result bool -} - var ProvokedError = errors.New("testing error.") func TestTransaction(t *testing.T) { @@ -39,7 +35,7 @@ func TestTransaction(t *testing.T) { So(err, ShouldEqual, models.ErrInvalidApiKey) }) - Convey("wont update if one handler fails", func() { + Convey("won't update if one handler fails", func() { err := ss.InTransaction(context.Background(), func(ctx context.Context) error { err := DeleteApiKeyCtx(ctx, deleteApiKeyCmd) if err != nil { diff --git a/pkg/services/sqlstore/user.go b/pkg/services/sqlstore/user.go index 6bd30be1869..848a11d81ab 100644 --- a/pkg/services/sqlstore/user.go +++ b/pkg/services/sqlstore/user.go @@ -271,9 +271,6 @@ func ChangeUserPassword(cmd *m.ChangeUserPasswordCommand) error { func UpdateUserLastSeenAt(cmd *m.UpdateUserLastSeenAtCommand) error { return inTransaction(func(sess *DBSession) error { - if cmd.UserId <= 0 { - } - user := m.User{ Id: cmd.UserId, LastSeenAt: time.Now(), diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go index 1a253b9b238..27df73a9eed 100644 --- a/pkg/setting/setting.go +++ b/pkg/setting/setting.go @@ -166,6 +166,7 @@ var ( // Alerting AlertingEnabled bool ExecuteAlerts bool + AlertingRenderLimit int AlertingErrorOrTimeout string AlertingNoDataOrNullValues string @@ -196,10 +197,13 @@ type Cfg struct { Smtp SmtpSettings // Rendering - ImagesDir string - PhantomDir string - RendererUrl string - RendererCallbackUrl string + ImagesDir string + PhantomDir string + RendererUrl string + RendererCallbackUrl string + RendererLimit int + RendererLimitAlerting int + DisableBruteForceLoginProtection bool TempDataLifetime time.Duration @@ -677,6 +681,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { alerting := iniFile.Section("alerting") AlertingEnabled = alerting.Key("enabled").MustBool(true) ExecuteAlerts = alerting.Key("execute_alerts").MustBool(true) + AlertingRenderLimit = alerting.Key("concurrent_render_limit").MustInt(5) AlertingErrorOrTimeout = alerting.Key("error_or_timeout").MustString("alerting") AlertingNoDataOrNullValues = alerting.Key("nodata_or_nullvalues").MustString("no_data") diff --git a/pkg/social/social.go b/pkg/social/social.go index 721070ab789..8918507f3b9 100644 --- a/pkg/social/social.go +++ b/pkg/social/social.go @@ -46,10 +46,14 @@ func (e *Error) Error() string { return e.s } +const ( + grafanaCom = "grafana_com" +) + var ( SocialBaseUrl = "/login/" SocialMap = make(map[string]SocialConnector) - allOauthes = []string{"github", "gitlab", "google", "generic_oauth", "grafananet", "grafana_com"} + allOauthes = []string{"github", "gitlab", "google", "generic_oauth", "grafananet", grafanaCom} ) func NewOAuthService() { @@ -82,7 +86,7 @@ func NewOAuthService() { } if name == "grafananet" { - name = "grafana_com" + name = grafanaCom } setting.OAuthService.OAuthInfos[name] = info @@ -159,7 +163,7 @@ func NewOAuthService() { } } - if name == "grafana_com" { + if name == grafanaCom { config = oauth2.Config{ ClientID: info.ClientId, ClientSecret: info.ClientSecret, @@ -171,7 +175,7 @@ func NewOAuthService() { Scopes: info.Scopes, } - SocialMap["grafana_com"] = &SocialGrafanaCom{ + SocialMap[grafanaCom] = &SocialGrafanaCom{ SocialBase: &SocialBase{ Config: &config, log: logger, @@ -194,7 +198,7 @@ var GetOAuthProviders = func(cfg *setting.Cfg) map[string]bool { for _, name := range allOauthes { if name == "grafananet" { - name = "grafana_com" + name = grafanaCom } sec := cfg.Raw.Section("auth." + name) diff --git a/pkg/tracing/tracing.go b/pkg/tracing/tracing.go index 61f45af3635..fd7258b7a0a 100644 --- a/pkg/tracing/tracing.go +++ b/pkg/tracing/tracing.go @@ -58,7 +58,8 @@ func (ts *TracingService) parseSettings() { func (ts *TracingService) initGlobalTracer() error { cfg := jaegercfg.Configuration{ - Disabled: !ts.enabled, + ServiceName: "grafana", + Disabled: !ts.enabled, Sampler: &jaegercfg.SamplerConfig{ Type: ts.samplerType, Param: ts.samplerParam, @@ -78,7 +79,7 @@ func (ts *TracingService) initGlobalTracer() error { options = append(options, jaegercfg.Tag(tag, value)) } - tracer, closer, err := cfg.New("grafana", options...) + tracer, closer, err := cfg.NewTracer(options...) if err != nil { return err } diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 92352a51315..be14c6f96ec 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -196,7 +196,7 @@ func (e *CloudWatchExecutor) executeQuery(ctx context.Context, query *CloudWatch params.ExtendedStatistics = query.ExtendedStatistics } - // 1 minutes resolutin metrics is stored for 15 days, 15 * 24 * 60 = 21600 + // 1 minutes resolution metrics is stored for 15 days, 15 * 24 * 60 = 21600 if query.HighResolution && (((endTime.Unix() - startTime.Unix()) / int64(query.Period)) > 21600) { return nil, errors.New("too long query period") } @@ -267,7 +267,7 @@ func (e *CloudWatchExecutor) executeGetMetricDataQuery(ctx context.Context, regi ScanBy: aws.String("TimestampAscending"), } for _, query := range queries { - // 1 minutes resolutin metrics is stored for 15 days, 15 * 24 * 60 = 21600 + // 1 minutes resolution metrics is stored for 15 days, 15 * 24 * 60 = 21600 if query.HighResolution && (((endTime.Unix() - startTime.Unix()) / int64(query.Period)) > 21600) { return nil, errors.New("too long query period") } diff --git a/pkg/tsdb/cloudwatch/credentials.go b/pkg/tsdb/cloudwatch/credentials.go index 8b32c76daa3..165f8fdbe97 100644 --- a/pkg/tsdb/cloudwatch/credentials.go +++ b/pkg/tsdb/cloudwatch/credentials.go @@ -42,8 +42,7 @@ func GetCredentials(dsInfo *DatasourceInfo) (*credentials.Credentials, error) { accessKeyId := "" secretAccessKey := "" sessionToken := "" - var expiration *time.Time - expiration = nil + var expiration *time.Time = nil if dsInfo.AuthType == "arn" && strings.Index(dsInfo.AssumeRoleArn, "arn:aws:iam:") == 0 { params := &sts.AssumeRoleInput{ RoleArn: aws.String(dsInfo.AssumeRoleArn), diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go index e1e131d9f3a..ee9d9583c4e 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query.go +++ b/pkg/tsdb/cloudwatch/metric_find_query.go @@ -235,7 +235,7 @@ func parseMultiSelectValue(input string) []string { func (e *CloudWatchExecutor) handleGetRegions(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) { regions := []string{ "ap-northeast-1", "ap-northeast-2", "ap-southeast-1", "ap-southeast-2", "ap-south-1", "ca-central-1", "cn-north-1", "cn-northwest-1", - "eu-central-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "us-east-1", "us-east-2", "us-gov-west-1", "us-west-1", "us-west-2", + "eu-central-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "us-east-1", "us-east-2", "us-gov-west-1", "us-west-1", "us-west-2", "us-isob-east-1", "us-iso-east-1", } result := make([]suggestData, 0) diff --git a/pkg/tsdb/elasticsearch/client/client.go b/pkg/tsdb/elasticsearch/client/client.go index 78973b3faa6..4ebe0db8f89 100644 --- a/pkg/tsdb/elasticsearch/client/client.go +++ b/pkg/tsdb/elasticsearch/client/client.go @@ -144,7 +144,7 @@ func (c *baseClientImpl) encodeBatchRequests(requests []*multiRequest) ([]byte, payload.WriteString(body + "\n") } - elapsed := time.Now().Sub(start) + elapsed := time.Since(start) clientLog.Debug("Encoded batch requests to json", "took", elapsed) return payload.Bytes(), nil @@ -187,7 +187,7 @@ func (c *baseClientImpl) executeRequest(method, uriPath string, body []byte) (*h start := time.Now() defer func() { - elapsed := time.Now().Sub(start) + elapsed := time.Since(start) clientLog.Debug("Executed request", "took", elapsed) }() return ctxhttp.Do(c.ctx, httpClient, req) @@ -215,7 +215,7 @@ func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearch return nil, err } - elapsed := time.Now().Sub(start) + elapsed := time.Since(start) clientLog.Debug("Decoded multisearch json response", "took", elapsed) msr.Status = res.StatusCode diff --git a/pkg/tsdb/elasticsearch/client/client_test.go b/pkg/tsdb/elasticsearch/client/client_test.go index 11d1cdb1d71..540a999688a 100644 --- a/pkg/tsdb/elasticsearch/client/client_test.go +++ b/pkg/tsdb/elasticsearch/client/client_test.go @@ -25,7 +25,7 @@ func TestClient(t *testing.T) { JsonData: simplejson.NewFromAny(make(map[string]interface{})), } - _, err := NewClient(nil, ds, nil) + _, err := NewClient(context.Background(), ds, nil) So(err, ShouldNotBeNil) }) @@ -36,11 +36,11 @@ func TestClient(t *testing.T) { }), } - _, err := NewClient(nil, ds, nil) + _, err := NewClient(context.Background(), ds, nil) So(err, ShouldNotBeNil) }) - Convey("When unspported version set should return error", func() { + Convey("When unsupported version set should return error", func() { ds := &models.DataSource{ JsonData: simplejson.NewFromAny(map[string]interface{}{ "esVersion": 6, @@ -48,7 +48,7 @@ func TestClient(t *testing.T) { }), } - _, err := NewClient(nil, ds, nil) + _, err := NewClient(context.Background(), ds, nil) So(err, ShouldNotBeNil) }) @@ -60,7 +60,7 @@ func TestClient(t *testing.T) { }), } - c, err := NewClient(nil, ds, nil) + c, err := NewClient(context.Background(), ds, nil) So(err, ShouldBeNil) So(c.GetVersion(), ShouldEqual, 2) }) @@ -73,7 +73,7 @@ func TestClient(t *testing.T) { }), } - c, err := NewClient(nil, ds, nil) + c, err := NewClient(context.Background(), ds, nil) So(err, ShouldBeNil) So(c.GetVersion(), ShouldEqual, 5) }) @@ -86,7 +86,7 @@ func TestClient(t *testing.T) { }), } - c, err := NewClient(nil, ds, nil) + c, err := NewClient(context.Background(), ds, nil) So(err, ShouldBeNil) So(c.GetVersion(), ShouldEqual, 56) }) diff --git a/pkg/tsdb/elasticsearch/client/search_request.go b/pkg/tsdb/elasticsearch/client/search_request.go index 2b833ce78d3..4c577a2c31d 100644 --- a/pkg/tsdb/elasticsearch/client/search_request.go +++ b/pkg/tsdb/elasticsearch/client/search_request.go @@ -56,9 +56,7 @@ func (b *SearchRequestBuilder) Build() (*SearchRequest, error) { if err != nil { return nil, err } - for _, agg := range aggArray { - sr.Aggs = append(sr.Aggs, agg) - } + sr.Aggs = append(sr.Aggs, aggArray...) } } @@ -112,7 +110,7 @@ func (b *SearchRequestBuilder) Query() *QueryBuilder { return b.queryBuilder } -// Agg initaite and returns a new aggregation builder +// Agg initiate and returns a new aggregation builder func (b *SearchRequestBuilder) Agg() AggBuilder { aggBuilder := newAggBuilder() b.aggBuilders = append(b.aggBuilders, aggBuilder) @@ -300,9 +298,7 @@ func (b *aggBuilderImpl) Build() (AggArray, error) { return nil, err } - for _, childAgg := range childAggs { - agg.Aggregation.Aggs = append(agg.Aggregation.Aggs, childAgg) - } + agg.Aggregation.Aggs = append(agg.Aggregation.Aggs, childAggs...) } aggs = append(aggs, agg) diff --git a/pkg/tsdb/elasticsearch/response_parser.go b/pkg/tsdb/elasticsearch/response_parser.go index 7bdab60389c..0837c3dd9d5 100644 --- a/pkg/tsdb/elasticsearch/response_parser.go +++ b/pkg/tsdb/elasticsearch/response_parser.go @@ -13,6 +13,19 @@ import ( "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client" ) +const ( + // Metric types + countType = "count" + percentilesType = "percentiles" + extendedStatsType = "extended_stats" + // Bucket types + dateHistType = "date_histogram" + histogramType = "histogram" + filtersType = "filters" + termsType = "terms" + geohashGridType = "geohash_grid" +) + type responseParser struct { Responses []*es.SearchResponse Targets []*Query @@ -81,7 +94,7 @@ func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Qu } if depth == maxDepth { - if aggDef.Type == "date_histogram" { + if aggDef.Type == dateHistType { err = rp.processMetrics(esAgg, target, series, props) } else { err = rp.processAggregationDocs(esAgg, aggDef, target, table, props) @@ -92,7 +105,7 @@ func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Qu } else { for _, b := range esAgg.Get("buckets").MustArray() { bucket := simplejson.NewFromAny(b) - newProps := make(map[string]string, 0) + newProps := make(map[string]string) for k, v := range props { newProps[k] = v @@ -122,7 +135,7 @@ func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Qu for _, bucketKey := range bucketKeys { bucket := simplejson.NewFromAny(buckets[bucketKey]) - newProps := make(map[string]string, 0) + newProps := make(map[string]string) for k, v := range props { newProps[k] = v @@ -149,7 +162,7 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, } switch metric.Type { - case "count": + case countType: newSeries := tsdb.TimeSeries{ Tags: make(map[string]string), } @@ -164,10 +177,10 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, for k, v := range props { newSeries.Tags[k] = v } - newSeries.Tags["metric"] = "count" + newSeries.Tags["metric"] = countType *series = append(*series, &newSeries) - case "percentiles": + case percentilesType: buckets := esAgg.Get("buckets").MustArray() if len(buckets) == 0 { break @@ -198,7 +211,7 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, } *series = append(*series, &newSeries) } - case "extended_stats": + case extendedStatsType: buckets := esAgg.Get("buckets").MustArray() metaKeys := make([]string, 0) @@ -312,10 +325,9 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef for _, metric := range target.Metrics { switch metric.Type { - case "count": + case countType: addMetricValue(&values, rp.getMetricName(metric.Type), castToNullFloat(bucket.Get("doc_count"))) - break - case "extended_stats": + case extendedStatsType: metaKeys := make([]string, 0) meta := metric.Meta.MustMap() for k := range meta { @@ -355,7 +367,6 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef } addMetricValue(&values, metricName, castToNullFloat(bucket.GetPath(metric.ID, "value"))) - break } } @@ -368,7 +379,7 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef func (rp *responseParser) trimDatapoints(series *tsdb.TimeSeriesSlice, target *Query) { var histogram *BucketAgg for _, bucketAgg := range target.BucketAggs { - if bucketAgg.Type == "date_histogram" { + if bucketAgg.Type == dateHistType { histogram = bucketAgg break } diff --git a/pkg/tsdb/elasticsearch/time_series_query.go b/pkg/tsdb/elasticsearch/time_series_query.go index c9bb05dd09a..fddcf3cb8b3 100644 --- a/pkg/tsdb/elasticsearch/time_series_query.go +++ b/pkg/tsdb/elasticsearch/time_series_query.go @@ -75,15 +75,15 @@ func (e *timeSeriesQuery) execute() (*tsdb.Response, error) { // iterate backwards to create aggregations bottom-down for _, bucketAgg := range q.BucketAggs { switch bucketAgg.Type { - case "date_histogram": + case dateHistType: aggBuilder = addDateHistogramAgg(aggBuilder, bucketAgg, from, to) - case "histogram": + case histogramType: aggBuilder = addHistogramAgg(aggBuilder, bucketAgg) - case "filters": + case filtersType: aggBuilder = addFiltersAgg(aggBuilder, bucketAgg) - case "terms": + case termsType: aggBuilder = addTermsAgg(aggBuilder, bucketAgg, q.Metrics) - case "geohash_grid": + case geohashGridType: aggBuilder = addGeoHashGridAgg(aggBuilder, bucketAgg) } } diff --git a/pkg/tsdb/influxdb/query_test.go b/pkg/tsdb/influxdb/query_test.go index f1270560269..cc1358a72d7 100644 --- a/pkg/tsdb/influxdb/query_test.go +++ b/pkg/tsdb/influxdb/query_test.go @@ -158,7 +158,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) { So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" < 10001`) }) - Convey("can render number greather then condition tags", func() { + Convey("can render number greater then condition tags", func() { query := &Query{Tags: []*Tag{{Operator: ">", Value: "10001", Key: "key"}}} So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" > 10001`) diff --git a/pkg/tsdb/mssql/mssql_test.go b/pkg/tsdb/mssql/mssql_test.go index f9525fc37ac..93d11c318f2 100644 --- a/pkg/tsdb/mssql/mssql_test.go +++ b/pkg/tsdb/mssql/mssql_test.go @@ -1,6 +1,7 @@ package mssql import ( + "context" "fmt" "math/rand" "strings" @@ -128,7 +129,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["A"] So(err, ShouldBeNil) @@ -218,7 +219,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -265,7 +266,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -327,7 +328,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -352,7 +353,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -441,7 +442,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -463,7 +464,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -485,7 +486,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -507,7 +508,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -529,7 +530,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -551,7 +552,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -573,7 +574,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -595,7 +596,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -617,7 +618,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -640,7 +641,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -663,7 +664,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -739,7 +740,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["A"] So(err, ShouldBeNil) So(queryResult.Error, ShouldBeNil) @@ -816,7 +817,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["A"] So(err, ShouldBeNil) So(queryResult.Error, ShouldBeNil) @@ -892,7 +893,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Deploys"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -915,7 +916,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Tickets"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -941,7 +942,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -971,7 +972,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -1001,7 +1002,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -1031,7 +1032,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -1059,7 +1060,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -1087,7 +1088,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) diff --git a/pkg/tsdb/mysql/mysql_test.go b/pkg/tsdb/mysql/mysql_test.go index 13d9040a738..7c415024005 100644 --- a/pkg/tsdb/mysql/mysql_test.go +++ b/pkg/tsdb/mysql/mysql_test.go @@ -1,6 +1,7 @@ package mysql import ( + "context" "fmt" "math/rand" "strings" @@ -129,7 +130,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -217,7 +218,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -264,7 +265,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -327,7 +328,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -352,7 +353,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -378,7 +379,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -473,7 +474,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -495,7 +496,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -517,7 +518,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -539,7 +540,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -561,7 +562,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -583,7 +584,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -605,7 +606,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -627,7 +628,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -649,7 +650,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -671,7 +672,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -693,7 +694,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -716,7 +717,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -741,7 +742,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -802,7 +803,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Deploys"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -825,7 +826,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Tickets"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -851,7 +852,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -881,7 +882,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -911,7 +912,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -941,7 +942,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -969,7 +970,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -997,7 +998,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) diff --git a/pkg/tsdb/postgres/postgres_test.go b/pkg/tsdb/postgres/postgres_test.go index fc1a5f34253..c87691e6bbc 100644 --- a/pkg/tsdb/postgres/postgres_test.go +++ b/pkg/tsdb/postgres/postgres_test.go @@ -1,6 +1,7 @@ package postgres import ( + "context" "fmt" "math/rand" "strings" @@ -117,7 +118,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -197,7 +198,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -254,7 +255,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -279,7 +280,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -333,7 +334,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -360,7 +361,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -450,7 +451,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -472,7 +473,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -494,7 +495,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -516,7 +517,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -538,7 +539,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -560,7 +561,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -582,7 +583,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -604,7 +605,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -626,7 +627,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -649,7 +650,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -674,7 +675,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -735,7 +736,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Deploys"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -758,7 +759,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Tickets"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -784,7 +785,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -814,7 +815,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -844,7 +845,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -874,7 +875,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -902,7 +903,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -930,7 +931,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) diff --git a/pkg/tsdb/prometheus/prometheus.go b/pkg/tsdb/prometheus/prometheus.go index bf9fe9f152c..83bb683fccf 100644 --- a/pkg/tsdb/prometheus/prometheus.go +++ b/pkg/tsdb/prometheus/prometheus.go @@ -92,12 +92,12 @@ func (e *PrometheusExecutor) Query(ctx context.Context, dsInfo *models.DataSourc return nil, err } - querys, err := parseQuery(dsInfo, tsdbQuery.Queries, tsdbQuery) + queries, err := parseQuery(dsInfo, tsdbQuery.Queries, tsdbQuery) if err != nil { return nil, err } - for _, query := range querys { + for _, query := range queries { timeRange := apiv1.Range{ Start: query.Start, End: query.End, diff --git a/pkg/tsdb/stackdriver/annotation_query.go b/pkg/tsdb/stackdriver/annotation_query.go new file mode 100644 index 00000000000..db35171ad70 --- /dev/null +++ b/pkg/tsdb/stackdriver/annotation_query.go @@ -0,0 +1,120 @@ +package stackdriver + +import ( + "context" + "strconv" + "strings" + "time" + + "github.com/grafana/grafana/pkg/tsdb" +) + +func (e *StackdriverExecutor) executeAnnotationQuery(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + result := &tsdb.Response{ + Results: make(map[string]*tsdb.QueryResult), + } + + firstQuery := tsdbQuery.Queries[0] + + queries, err := e.buildQueries(tsdbQuery) + if err != nil { + return nil, err + } + + queryRes, resp, err := e.executeQuery(ctx, queries[0], tsdbQuery) + if err != nil { + return nil, err + } + title := firstQuery.Model.Get("title").MustString() + text := firstQuery.Model.Get("text").MustString() + tags := firstQuery.Model.Get("tags").MustString() + err = e.parseToAnnotations(queryRes, resp, queries[0], title, text, tags) + result.Results[firstQuery.RefId] = queryRes + + return result, err +} + +func (e *StackdriverExecutor) parseToAnnotations(queryRes *tsdb.QueryResult, data StackdriverResponse, query *StackdriverQuery, title string, text string, tags string) error { + annotations := make([]map[string]string, 0) + + for _, series := range data.TimeSeries { + // reverse the order to be ascending + for i := len(series.Points) - 1; i >= 0; i-- { + point := series.Points[i] + value := strconv.FormatFloat(point.Value.DoubleValue, 'f', 6, 64) + if series.ValueType == "STRING" { + value = point.Value.StringValue + } + annotation := make(map[string]string) + annotation["time"] = point.Interval.EndTime.UTC().Format(time.RFC3339) + annotation["title"] = formatAnnotationText(title, value, series.Metric.Type, series.Metric.Labels, series.Resource.Labels) + annotation["tags"] = tags + annotation["text"] = formatAnnotationText(text, value, series.Metric.Type, series.Metric.Labels, series.Resource.Labels) + annotations = append(annotations, annotation) + } + } + + transformAnnotationToTable(annotations, queryRes) + return nil +} + +func transformAnnotationToTable(data []map[string]string, result *tsdb.QueryResult) { + table := &tsdb.Table{ + Columns: make([]tsdb.TableColumn, 4), + Rows: make([]tsdb.RowValues, 0), + } + table.Columns[0].Text = "time" + table.Columns[1].Text = "title" + table.Columns[2].Text = "tags" + table.Columns[3].Text = "text" + + for _, r := range data { + values := make([]interface{}, 4) + values[0] = r["time"] + values[1] = r["title"] + values[2] = r["tags"] + values[3] = r["text"] + table.Rows = append(table.Rows, values) + } + result.Tables = append(result.Tables, table) + result.Meta.Set("rowCount", len(data)) + slog.Info("anno", "len", len(data)) +} + +func formatAnnotationText(annotationText string, pointValue string, metricType string, metricLabels map[string]string, resourceLabels map[string]string) string { + result := legendKeyFormat.ReplaceAllFunc([]byte(annotationText), func(in []byte) []byte { + metaPartName := strings.Replace(string(in), "{{", "", 1) + metaPartName = strings.Replace(metaPartName, "}}", "", 1) + metaPartName = strings.TrimSpace(metaPartName) + + if metaPartName == "metric.type" { + return []byte(metricType) + } + + metricPart := replaceWithMetricPart(metaPartName, metricType) + + if metricPart != nil { + return metricPart + } + + if metaPartName == "metric.value" { + return []byte(pointValue) + } + + metaPartName = strings.Replace(metaPartName, "metric.label.", "", 1) + + if val, exists := metricLabels[metaPartName]; exists { + return []byte(val) + } + + metaPartName = strings.Replace(metaPartName, "resource.label.", "", 1) + + if val, exists := resourceLabels[metaPartName]; exists { + return []byte(val) + } + + return in + }) + + return string(result) +} diff --git a/pkg/tsdb/stackdriver/annotation_query_test.go b/pkg/tsdb/stackdriver/annotation_query_test.go new file mode 100644 index 00000000000..8229470d665 --- /dev/null +++ b/pkg/tsdb/stackdriver/annotation_query_test.go @@ -0,0 +1,33 @@ +package stackdriver + +import ( + "testing" + + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestStackdriverAnnotationQuery(t *testing.T) { + Convey("Stackdriver Annotation Query Executor", t, func() { + executor := &StackdriverExecutor{} + Convey("When parsing the stackdriver api response", func() { + data, err := loadTestFile("./test-data/2-series-response-no-agg.json") + So(err, ShouldBeNil) + So(len(data.TimeSeries), ShouldEqual, 3) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "annotationQuery"} + query := &StackdriverQuery{} + err = executor.parseToAnnotations(res, data, query, "atitle {{metric.label.instance_name}} {{metric.value}}", "atext {{resource.label.zone}}", "atag") + So(err, ShouldBeNil) + + Convey("Should return annotations table", func() { + So(len(res.Tables), ShouldEqual, 1) + So(len(res.Tables[0].Rows), ShouldEqual, 9) + So(res.Tables[0].Rows[0][1], ShouldEqual, "atitle collector-asia-east-1 9.856650") + So(res.Tables[0].Rows[0][3], ShouldEqual, "atext asia-east1-a") + }) + }) + }) +} diff --git a/pkg/tsdb/stackdriver/stackdriver.go b/pkg/tsdb/stackdriver/stackdriver.go new file mode 100644 index 00000000000..586e154cd5d --- /dev/null +++ b/pkg/tsdb/stackdriver/stackdriver.go @@ -0,0 +1,460 @@ +package stackdriver + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "math" + "net/http" + "net/url" + "path" + "regexp" + "strconv" + "strings" + "time" + + "golang.org/x/net/context/ctxhttp" + + "github.com/grafana/grafana/pkg/api/pluginproxy" + "github.com/grafana/grafana/pkg/components/null" + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/tsdb" + "github.com/opentracing/opentracing-go" +) + +var ( + slog log.Logger + legendKeyFormat *regexp.Regexp + metricNameFormat *regexp.Regexp +) + +// StackdriverExecutor executes queries for the Stackdriver datasource +type StackdriverExecutor struct { + httpClient *http.Client + dsInfo *models.DataSource +} + +// NewStackdriverExecutor initializes a http client +func NewStackdriverExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { + httpClient, err := dsInfo.GetHttpClient() + if err != nil { + return nil, err + } + + return &StackdriverExecutor{ + httpClient: httpClient, + dsInfo: dsInfo, + }, nil +} + +func init() { + slog = log.New("tsdb.stackdriver") + tsdb.RegisterTsdbQueryEndpoint("stackdriver", NewStackdriverExecutor) + legendKeyFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`) + metricNameFormat = regexp.MustCompile(`([\w\d_]+)\.googleapis\.com/(.+)`) +} + +// Query takes in the frontend queries, parses them into the Stackdriver query format +// executes the queries against the Stackdriver API and parses the response into +// the time series or table format +func (e *StackdriverExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + var result *tsdb.Response + var err error + queryType := tsdbQuery.Queries[0].Model.Get("type").MustString("") + + switch queryType { + case "annotationQuery": + result, err = e.executeAnnotationQuery(ctx, tsdbQuery) + case "timeSeriesQuery": + fallthrough + default: + result, err = e.executeTimeSeriesQuery(ctx, tsdbQuery) + } + + return result, err +} + +func (e *StackdriverExecutor) executeTimeSeriesQuery(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + result := &tsdb.Response{ + Results: make(map[string]*tsdb.QueryResult), + } + + queries, err := e.buildQueries(tsdbQuery) + if err != nil { + return nil, err + } + + for _, query := range queries { + queryRes, resp, err := e.executeQuery(ctx, query, tsdbQuery) + if err != nil { + return nil, err + } + err = e.parseResponse(queryRes, resp, query) + if err != nil { + queryRes.Error = err + } + result.Results[query.RefID] = queryRes + } + + return result, nil +} + +func (e *StackdriverExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*StackdriverQuery, error) { + stackdriverQueries := []*StackdriverQuery{} + + startTime, err := tsdbQuery.TimeRange.ParseFrom() + if err != nil { + return nil, err + } + + endTime, err := tsdbQuery.TimeRange.ParseTo() + if err != nil { + return nil, err + } + + durationSeconds := int(endTime.Sub(startTime).Seconds()) + + for _, query := range tsdbQuery.Queries { + var target string + + metricType := query.Model.Get("metricType").MustString() + filterParts := query.Model.Get("filters").MustArray() + + params := url.Values{} + params.Add("interval.startTime", startTime.UTC().Format(time.RFC3339)) + params.Add("interval.endTime", endTime.UTC().Format(time.RFC3339)) + params.Add("filter", buildFilterString(metricType, filterParts)) + params.Add("view", query.Model.Get("view").MustString("FULL")) + setAggParams(¶ms, query, durationSeconds) + + target = params.Encode() + + if setting.Env == setting.DEV { + slog.Debug("Stackdriver request", "params", params) + } + + groupBys := query.Model.Get("groupBys").MustArray() + groupBysAsStrings := make([]string, 0) + for _, groupBy := range groupBys { + groupBysAsStrings = append(groupBysAsStrings, groupBy.(string)) + } + + aliasBy := query.Model.Get("aliasBy").MustString() + + stackdriverQueries = append(stackdriverQueries, &StackdriverQuery{ + Target: target, + Params: params, + RefID: query.RefId, + GroupBys: groupBysAsStrings, + AliasBy: aliasBy, + }) + } + + return stackdriverQueries, nil +} + +func buildFilterString(metricType string, filterParts []interface{}) string { + filterString := "" + for i, part := range filterParts { + mod := i % 4 + if part == "AND" { + filterString += " " + } else if mod == 2 { + filterString += fmt.Sprintf(`"%s"`, part) + } else { + filterString += part.(string) + } + } + return strings.Trim(fmt.Sprintf(`metric.type="%s" %s`, metricType, filterString), " ") +} + +func setAggParams(params *url.Values, query *tsdb.Query, durationSeconds int) { + primaryAggregation := query.Model.Get("primaryAggregation").MustString() + perSeriesAligner := query.Model.Get("perSeriesAligner").MustString() + alignmentPeriod := query.Model.Get("alignmentPeriod").MustString() + + if primaryAggregation == "" { + primaryAggregation = "REDUCE_NONE" + } + + if perSeriesAligner == "" { + perSeriesAligner = "ALIGN_MEAN" + } + + if alignmentPeriod == "grafana-auto" || alignmentPeriod == "" { + alignmentPeriodValue := int(math.Max(float64(query.IntervalMs)/1000, 60.0)) + alignmentPeriod = "+" + strconv.Itoa(alignmentPeriodValue) + "s" + } + + if alignmentPeriod == "stackdriver-auto" { + alignmentPeriodValue := int(math.Max(float64(durationSeconds), 60.0)) + if alignmentPeriodValue < 60*60*23 { + alignmentPeriod = "+60s" + } else if alignmentPeriodValue < 60*60*24*6 { + alignmentPeriod = "+300s" + } else { + alignmentPeriod = "+3600s" + } + } + + re := regexp.MustCompile("[0-9]+") + seconds, err := strconv.ParseInt(re.FindString(alignmentPeriod), 10, 64) + if err != nil || seconds > 3600 { + alignmentPeriod = "+3600s" + } + + params.Add("aggregation.crossSeriesReducer", primaryAggregation) + params.Add("aggregation.perSeriesAligner", perSeriesAligner) + params.Add("aggregation.alignmentPeriod", alignmentPeriod) + + groupBys := query.Model.Get("groupBys").MustArray() + if len(groupBys) > 0 { + for i := 0; i < len(groupBys); i++ { + params.Add("aggregation.groupByFields", groupBys[i].(string)) + } + } +} + +func (e *StackdriverExecutor) executeQuery(ctx context.Context, query *StackdriverQuery, tsdbQuery *tsdb.TsdbQuery) (*tsdb.QueryResult, StackdriverResponse, error) { + queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID} + + req, err := e.createRequest(ctx, e.dsInfo) + if err != nil { + queryResult.Error = err + return queryResult, StackdriverResponse{}, nil + } + + req.URL.RawQuery = query.Params.Encode() + queryResult.Meta.Set("rawQuery", req.URL.RawQuery) + alignmentPeriod, ok := req.URL.Query()["aggregation.alignmentPeriod"] + + if ok { + re := regexp.MustCompile("[0-9]+") + seconds, err := strconv.ParseInt(re.FindString(alignmentPeriod[0]), 10, 64) + if err == nil { + queryResult.Meta.Set("alignmentPeriod", seconds) + } + } + + span, ctx := opentracing.StartSpanFromContext(ctx, "stackdriver query") + span.SetTag("target", query.Target) + span.SetTag("from", tsdbQuery.TimeRange.From) + span.SetTag("until", tsdbQuery.TimeRange.To) + span.SetTag("datasource_id", e.dsInfo.Id) + span.SetTag("org_id", e.dsInfo.OrgId) + + defer span.Finish() + + opentracing.GlobalTracer().Inject( + span.Context(), + opentracing.HTTPHeaders, + opentracing.HTTPHeadersCarrier(req.Header)) + + res, err := ctxhttp.Do(ctx, e.httpClient, req) + if err != nil { + queryResult.Error = err + return queryResult, StackdriverResponse{}, nil + } + + data, err := e.unmarshalResponse(res) + if err != nil { + queryResult.Error = err + return queryResult, StackdriverResponse{}, nil + } + + return queryResult, data, nil +} + +func (e *StackdriverExecutor) unmarshalResponse(res *http.Response) (StackdriverResponse, error) { + body, err := ioutil.ReadAll(res.Body) + defer res.Body.Close() + if err != nil { + return StackdriverResponse{}, err + } + + if res.StatusCode/100 != 2 { + slog.Error("Request failed", "status", res.Status, "body", string(body)) + return StackdriverResponse{}, fmt.Errorf(string(body)) + } + + var data StackdriverResponse + err = json.Unmarshal(body, &data) + if err != nil { + slog.Error("Failed to unmarshal Stackdriver response", "error", err, "status", res.Status, "body", string(body)) + return StackdriverResponse{}, err + } + + return data, nil +} + +func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data StackdriverResponse, query *StackdriverQuery) error { + metricLabels := make(map[string][]string) + resourceLabels := make(map[string][]string) + + for _, series := range data.TimeSeries { + points := make([]tsdb.TimePoint, 0) + + // reverse the order to be ascending + for i := len(series.Points) - 1; i >= 0; i-- { + point := series.Points[i] + value := point.Value.DoubleValue + + if series.ValueType == "INT64" { + parsedValue, err := strconv.ParseFloat(point.Value.IntValue, 64) + if err == nil { + value = parsedValue + } + } + + if series.ValueType == "BOOL" { + if point.Value.BoolValue { + value = 1 + } else { + value = 0 + } + } + + points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000)) + } + + defaultMetricName := series.Metric.Type + + for key, value := range series.Metric.Labels { + if !containsLabel(metricLabels[key], value) { + metricLabels[key] = append(metricLabels[key], value) + } + if len(query.GroupBys) == 0 || containsLabel(query.GroupBys, "metric.label."+key) { + defaultMetricName += " " + value + } + } + + for key, value := range series.Resource.Labels { + if !containsLabel(resourceLabels[key], value) { + resourceLabels[key] = append(resourceLabels[key], value) + } + + if containsLabel(query.GroupBys, "resource.label."+key) { + defaultMetricName += " " + value + } + } + + metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, query) + + queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{ + Name: metricName, + Points: points, + }) + } + + queryRes.Meta.Set("resourceLabels", resourceLabels) + queryRes.Meta.Set("metricLabels", metricLabels) + queryRes.Meta.Set("groupBys", query.GroupBys) + + return nil +} + +func containsLabel(labels []string, newLabel string) bool { + for _, val := range labels { + if val == newLabel { + return true + } + } + return false +} + +func formatLegendKeys(metricType string, defaultMetricName string, metricLabels map[string]string, resourceLabels map[string]string, query *StackdriverQuery) string { + if query.AliasBy == "" { + return defaultMetricName + } + + result := legendKeyFormat.ReplaceAllFunc([]byte(query.AliasBy), func(in []byte) []byte { + metaPartName := strings.Replace(string(in), "{{", "", 1) + metaPartName = strings.Replace(metaPartName, "}}", "", 1) + metaPartName = strings.TrimSpace(metaPartName) + + if metaPartName == "metric.type" { + return []byte(metricType) + } + + metricPart := replaceWithMetricPart(metaPartName, metricType) + + if metricPart != nil { + return metricPart + } + + metaPartName = strings.Replace(metaPartName, "metric.label.", "", 1) + + if val, exists := metricLabels[metaPartName]; exists { + return []byte(val) + } + + metaPartName = strings.Replace(metaPartName, "resource.label.", "", 1) + + if val, exists := resourceLabels[metaPartName]; exists { + return []byte(val) + } + + return in + }) + + return string(result) +} + +func replaceWithMetricPart(metaPartName string, metricType string) []byte { + // https://cloud.google.com/monitoring/api/v3/metrics-details#label_names + shortMatches := metricNameFormat.FindStringSubmatch(metricType) + + if metaPartName == "metric.name" { + if len(shortMatches) > 0 { + return []byte(shortMatches[2]) + } + } + + if metaPartName == "metric.service" { + if len(shortMatches) > 0 { + return []byte(shortMatches[1]) + } + } + + return nil +} + +func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) { + u, _ := url.Parse(dsInfo.Url) + u.Path = path.Join(u.Path, "render") + + req, err := http.NewRequest(http.MethodGet, "https://monitoring.googleapis.com/", nil) + if err != nil { + slog.Error("Failed to create request", "error", err) + return nil, fmt.Errorf("Failed to create request. error: %v", err) + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion)) + + // find plugin + plugin, ok := plugins.DataSources[dsInfo.Type] + if !ok { + return nil, errors.New("Unable to find datasource plugin Stackdriver") + } + projectName := dsInfo.JsonData.Get("defaultProject").MustString() + proxyPass := fmt.Sprintf("stackdriver%s", "v3/projects/"+projectName+"/timeSeries") + + var stackdriverRoute *plugins.AppPluginRoute + for _, route := range plugin.Routes { + if route.Path == "stackdriver" { + stackdriverRoute = route + break + } + } + + pluginproxy.ApplyRoute(ctx, req, proxyPass, stackdriverRoute, dsInfo) + + return req, nil +} diff --git a/pkg/tsdb/stackdriver/stackdriver_test.go b/pkg/tsdb/stackdriver/stackdriver_test.go new file mode 100644 index 00000000000..da4d6890207 --- /dev/null +++ b/pkg/tsdb/stackdriver/stackdriver_test.go @@ -0,0 +1,357 @@ +package stackdriver + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "testing" + "time" + + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestStackdriver(t *testing.T) { + Convey("Stackdriver", t, func() { + executor := &StackdriverExecutor{} + + Convey("Parse queries from frontend and build Stackdriver API queries", func() { + fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) + tsdbQuery := &tsdb.TsdbQuery{ + TimeRange: &tsdb.TimeRange{ + From: fmt.Sprintf("%v", fromStart.Unix()*1000), + To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), + }, + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "metricType": "a/metric/type", + "view": "FULL", + "aliasBy": "testalias", + "type": "timeSeriesQuery", + }), + RefId: "A", + }, + }, + } + + Convey("and query has no aggregation set", func() { + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + + So(len(queries), ShouldEqual, 1) + So(queries[0].RefID, ShouldEqual, "A") + So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL") + So(len(queries[0].Params), ShouldEqual, 7) + So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z") + So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z") + So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN") + So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"") + So(queries[0].Params["view"][0], ShouldEqual, "FULL") + So(queries[0].AliasBy, ShouldEqual, "testalias") + }) + + Convey("and query has filters", func() { + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "metricType": "a/metric/type", + "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"}, + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(len(queries), ShouldEqual, 1) + So(queries[0].Params["filter"][0], ShouldEqual, `metric.type="a/metric/type" key="value" key2="value2"`) + }) + + Convey("and alignmentPeriod is set to grafana-auto", func() { + Convey("and IntervalMs is larger than 60000", func() { + tsdbQuery.Queries[0].IntervalMs = 1000000 + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "alignmentPeriod": "grafana-auto", + "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"}, + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+1000s`) + }) + Convey("and IntervalMs is less than 60000", func() { + tsdbQuery.Queries[0].IntervalMs = 30000 + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "alignmentPeriod": "grafana-auto", + "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"}, + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`) + }) + }) + + Convey("and alignmentPeriod is set to stackdriver-auto", func() { + Convey("and range is two hours", func() { + tsdbQuery.TimeRange.From = "1538033322461" + tsdbQuery.TimeRange.To = "1538040522461" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "stackdriver-auto", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`) + }) + + Convey("and range is 22 hours", func() { + tsdbQuery.TimeRange.From = "1538034524922" + tsdbQuery.TimeRange.To = "1538113724922" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "stackdriver-auto", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`) + }) + + Convey("and range is 23 hours", func() { + tsdbQuery.TimeRange.From = "1538034567985" + tsdbQuery.TimeRange.To = "1538117367985" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "stackdriver-auto", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+300s`) + }) + + Convey("and range is 7 days", func() { + tsdbQuery.TimeRange.From = "1538036324073" + tsdbQuery.TimeRange.To = "1538641124073" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "stackdriver-auto", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+3600s`) + }) + }) + + Convey("and alignmentPeriod is set in frontend", func() { + Convey("and alignment period is too big", func() { + tsdbQuery.Queries[0].IntervalMs = 1000 + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "alignmentPeriod": "+360000s", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+3600s`) + }) + + Convey("and alignment period is within accepted range", func() { + tsdbQuery.Queries[0].IntervalMs = 1000 + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "alignmentPeriod": "+600s", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+600s`) + }) + }) + + Convey("and query has aggregation mean set", func() { + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "metricType": "a/metric/type", + "primaryAggregation": "REDUCE_MEAN", + "view": "FULL", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + + So(len(queries), ShouldEqual, 1) + So(queries[0].RefID, ShouldEqual, "A") + So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_MEAN&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL") + So(len(queries[0].Params), ShouldEqual, 7) + So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z") + So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z") + So(queries[0].Params["aggregation.crossSeriesReducer"][0], ShouldEqual, "REDUCE_MEAN") + So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN") + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, "+60s") + So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"") + So(queries[0].Params["view"][0], ShouldEqual, "FULL") + }) + + Convey("and query has group bys", func() { + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "metricType": "a/metric/type", + "primaryAggregation": "REDUCE_NONE", + "groupBys": []interface{}{"metric.label.group1", "metric.label.group2"}, + "view": "FULL", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + + So(len(queries), ShouldEqual, 1) + So(queries[0].RefID, ShouldEqual, "A") + So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.groupByFields=metric.label.group1&aggregation.groupByFields=metric.label.group2&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL") + So(len(queries[0].Params), ShouldEqual, 8) + So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z") + So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z") + So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN") + So(queries[0].Params["aggregation.groupByFields"][0], ShouldEqual, "metric.label.group1") + So(queries[0].Params["aggregation.groupByFields"][1], ShouldEqual, "metric.label.group2") + So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"") + So(queries[0].Params["view"][0], ShouldEqual, "FULL") + }) + + }) + + Convey("Parse stackdriver response in the time series format", func() { + Convey("when data from query aggregated to one time series", func() { + data, err := loadTestFile("./test-data/1-series-response-agg-one-metric.json") + So(err, ShouldBeNil) + So(len(data.TimeSeries), ShouldEqual, 1) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &StackdriverQuery{} + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + So(len(res.Series), ShouldEqual, 1) + So(res.Series[0].Name, ShouldEqual, "serviceruntime.googleapis.com/api/request_count") + So(len(res.Series[0].Points), ShouldEqual, 3) + + Convey("timestamps should be in ascending order", func() { + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 0.05) + So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1536670020000) + + So(res.Series[0].Points[1][0].Float64, ShouldEqual, 1.05) + So(res.Series[0].Points[1][1].Float64, ShouldEqual, 1536670080000) + + So(res.Series[0].Points[2][0].Float64, ShouldEqual, 1.0666666666667) + So(res.Series[0].Points[2][1].Float64, ShouldEqual, 1536670260000) + }) + }) + + Convey("when data from query with no aggregation", func() { + data, err := loadTestFile("./test-data/2-series-response-no-agg.json") + So(err, ShouldBeNil) + So(len(data.TimeSeries), ShouldEqual, 3) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &StackdriverQuery{} + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + Convey("Should add labels to metric name", func() { + So(len(res.Series), ShouldEqual, 3) + So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1") + So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1") + So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1") + }) + + Convey("Should parse to time series", func() { + So(len(res.Series[0].Points), ShouldEqual, 3) + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 9.8566497180145) + So(res.Series[0].Points[1][0].Float64, ShouldEqual, 9.7323568146676) + So(res.Series[0].Points[2][0].Float64, ShouldEqual, 9.7730520330369) + }) + + Convey("Should add meta for labels to the response", func() { + metricLabels := res.Meta.Get("metricLabels").Interface().(map[string][]string) + So(metricLabels, ShouldNotBeNil) + So(len(metricLabels["instance_name"]), ShouldEqual, 3) + So(metricLabels["instance_name"][0], ShouldEqual, "collector-asia-east-1") + So(metricLabels["instance_name"][1], ShouldEqual, "collector-europe-west-1") + So(metricLabels["instance_name"][2], ShouldEqual, "collector-us-east-1") + + resourceLabels := res.Meta.Get("resourceLabels").Interface().(map[string][]string) + So(resourceLabels, ShouldNotBeNil) + So(len(resourceLabels["zone"]), ShouldEqual, 3) + So(resourceLabels["zone"][0], ShouldEqual, "asia-east1-a") + So(resourceLabels["zone"][1], ShouldEqual, "europe-west1-b") + So(resourceLabels["zone"][2], ShouldEqual, "us-east1-b") + + So(len(resourceLabels["project_id"]), ShouldEqual, 1) + So(resourceLabels["project_id"][0], ShouldEqual, "grafana-prod") + }) + }) + + Convey("when data from query with no aggregation and group bys", func() { + data, err := loadTestFile("./test-data/2-series-response-no-agg.json") + So(err, ShouldBeNil) + So(len(data.TimeSeries), ShouldEqual, 3) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &StackdriverQuery{GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}} + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + Convey("Should add instance name and zone labels to metric name", func() { + So(len(res.Series), ShouldEqual, 3) + So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1 asia-east1-a") + So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1 europe-west1-b") + So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1 us-east1-b") + }) + }) + + Convey("when data from query with no aggregation and alias by", func() { + data, err := loadTestFile("./test-data/2-series-response-no-agg.json") + So(err, ShouldBeNil) + So(len(data.TimeSeries), ShouldEqual, 3) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + + Convey("and the alias pattern is for metric type, a metric label and a resource label", func() { + + query := &StackdriverQuery{AliasBy: "{{metric.type}} - {{metric.label.instance_name}} - {{resource.label.zone}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}} + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + Convey("Should use alias by formatting and only show instance name", func() { + So(len(res.Series), ShouldEqual, 3) + So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-asia-east-1 - asia-east1-a") + So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-europe-west-1 - europe-west1-b") + So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-us-east-1 - us-east1-b") + }) + }) + + Convey("and the alias pattern is for metric name", func() { + + query := &StackdriverQuery{AliasBy: "metric {{metric.name}} service {{metric.service}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}} + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + Convey("Should use alias by formatting and only show instance name", func() { + So(len(res.Series), ShouldEqual, 3) + So(res.Series[0].Name, ShouldEqual, "metric instance/cpu/usage_time service compute") + So(res.Series[1].Name, ShouldEqual, "metric instance/cpu/usage_time service compute") + So(res.Series[2].Name, ShouldEqual, "metric instance/cpu/usage_time service compute") + }) + }) + }) + }) + }) +} + +func loadTestFile(path string) (StackdriverResponse, error) { + var data StackdriverResponse + + jsonBody, err := ioutil.ReadFile(path) + if err != nil { + return data, err + } + err = json.Unmarshal(jsonBody, &data) + return data, err +} diff --git a/pkg/tsdb/stackdriver/test-data/1-series-response-agg-one-metric.json b/pkg/tsdb/stackdriver/test-data/1-series-response-agg-one-metric.json new file mode 100644 index 00000000000..e1a84583cc4 --- /dev/null +++ b/pkg/tsdb/stackdriver/test-data/1-series-response-agg-one-metric.json @@ -0,0 +1,46 @@ +{ + "timeSeries": [ + { + "metric": { + "type": "serviceruntime.googleapis.com\/api\/request_count" + }, + "resource": { + "type": "consumed_api", + "labels": { + "project_id": "grafana-prod" + } + }, + "metricKind": "GAUGE", + "valueType": "DOUBLE", + "points": [ + { + "interval": { + "startTime": "2018-09-11T12:51:00Z", + "endTime": "2018-09-11T12:51:00Z" + }, + "value": { + "doubleValue": 1.0666666666667 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:48:00Z", + "endTime": "2018-09-11T12:48:00Z" + }, + "value": { + "doubleValue": 1.05 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:47:00Z", + "endTime": "2018-09-11T12:47:00Z" + }, + "value": { + "doubleValue": 0.05 + } + } + ] + } + ] +} diff --git a/pkg/tsdb/stackdriver/test-data/2-series-response-no-agg.json b/pkg/tsdb/stackdriver/test-data/2-series-response-no-agg.json new file mode 100644 index 00000000000..da615a168bf --- /dev/null +++ b/pkg/tsdb/stackdriver/test-data/2-series-response-no-agg.json @@ -0,0 +1,145 @@ +{ + "timeSeries": [ + { + "metric": { + "labels": { + "instance_name": "collector-asia-east-1" + }, + "type": "compute.googleapis.com\/instance\/cpu\/usage_time" + }, + "resource": { + "type": "gce_instance", + "labels": { + "instance_id": "1119268429530133111", + "zone": "asia-east1-a", + "project_id": "grafana-prod" + } + }, + "metricKind": "DELTA", + "valueType": "DOUBLE", + "points": [ + { + "interval": { + "startTime": "2018-09-11T12:30:00Z", + "endTime": "2018-09-11T12:31:00Z" + }, + "value": { + "doubleValue": 9.7730520330369 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:29:00Z", + "endTime": "2018-09-11T12:30:00Z" + }, + "value": { + "doubleValue": 9.7323568146676 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:28:00Z", + "endTime": "2018-09-11T12:29:00Z" + }, + "value": { + "doubleValue": 9.8566497180145 + } + } + ] + }, + { + "metric": { + "labels": { + "instance_name": "collector-europe-west-1" + }, + "type": "compute.googleapis.com\/instance\/cpu\/usage_time" + }, + "resource": { + "type": "gce_instance", + "labels": { + "instance_id": "22241654114540837222", + "zone": "europe-west1-b", + "project_id": "grafana-prod" + } + }, + "metricKind": "DELTA", + "valueType": "DOUBLE", + "points": [ + { + "interval": { + "startTime": "2018-09-11T12:30:00Z", + "endTime": "2018-09-11T12:31:00Z" + }, + "value": { + "doubleValue": 8.8210971239023 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:29:00Z", + "endTime": "2018-09-11T12:30:00Z" + }, + "value": { + "doubleValue": 8.9689492364414 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:28:00Z", + "endTime": "2018-09-11T12:29:00Z" + }, + "value": { + "doubleValue": 9.0238475054502 + } + } + ] + }, + { + "metric": { + "labels": { + "instance_name": "collector-us-east-1" + }, + "type": "compute.googleapis.com\/instance\/cpu\/usage_time" + }, + "resource": { + "type": "gce_instance", + "labels": { + "instance_id": "3332264424035095333", + "zone": "us-east1-b", + "project_id": "grafana-prod" + } + }, + "metricKind": "DELTA", + "valueType": "DOUBLE", + "points": [ + { + "interval": { + "startTime": "2018-09-11T12:30:00Z", + "endTime": "2018-09-11T12:31:00Z" + }, + "value": { + "doubleValue": 30.807846801355 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:29:00Z", + "endTime": "2018-09-11T12:30:00Z" + }, + "value": { + "doubleValue": 30.903974115849 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:28:00Z", + "endTime": "2018-09-11T12:29:00Z" + }, + "value": { + "doubleValue": 30.829426143318 + } + } + ] + } + ] +} diff --git a/pkg/tsdb/stackdriver/types.go b/pkg/tsdb/stackdriver/types.go new file mode 100644 index 00000000000..c58ac2968f2 --- /dev/null +++ b/pkg/tsdb/stackdriver/types.go @@ -0,0 +1,43 @@ +package stackdriver + +import ( + "net/url" + "time" +) + +// StackdriverQuery is the query that Grafana sends from the frontend +type StackdriverQuery struct { + Target string + Params url.Values + RefID string + GroupBys []string + AliasBy string +} + +// StackdriverResponse is the data returned from the external Google Stackdriver API +type StackdriverResponse struct { + TimeSeries []struct { + Metric struct { + Labels map[string]string `json:"labels"` + Type string `json:"type"` + } `json:"metric"` + Resource struct { + Type string `json:"type"` + Labels map[string]string `json:"labels"` + } `json:"resource"` + MetricKind string `json:"metricKind"` + ValueType string `json:"valueType"` + Points []struct { + Interval struct { + StartTime time.Time `json:"startTime"` + EndTime time.Time `json:"endTime"` + } `json:"interval"` + Value struct { + DoubleValue float64 `json:"doubleValue"` + StringValue string `json:"stringValue"` + BoolValue bool `json:"boolValue"` + IntValue string `json:"int64Value"` + } `json:"value"` + } `json:"points"` + } `json:"timeSeries"` +} diff --git a/pkg/tsdb/testdata/scenarios.go b/pkg/tsdb/testdata/scenarios.go index e907fa8aae0..421a907b5e9 100644 --- a/pkg/tsdb/testdata/scenarios.go +++ b/pkg/tsdb/testdata/scenarios.go @@ -95,27 +95,20 @@ func init() { Id: "random_walk", Name: "Random Walk", - Handler: func(query *tsdb.Query, tsdbQuery *tsdb.TsdbQuery) *tsdb.QueryResult { - timeWalkerMs := tsdbQuery.TimeRange.GetFromAsMsEpoch() - to := tsdbQuery.TimeRange.GetToAsMsEpoch() + Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult { + return getRandomWalk(query, context) + }, + }) - series := newSeriesForQuery(query) - - points := make(tsdb.TimeSeriesPoints, 0) - walker := rand.Float64() * 100 - - for i := int64(0); i < 10000 && timeWalkerMs < to; i++ { - points = append(points, tsdb.NewTimePoint(null.FloatFrom(walker), float64(timeWalkerMs))) - - walker += rand.Float64() - 0.5 - timeWalkerMs += query.IntervalMs - } - - series.Points = points - - queryRes := tsdb.NewQueryResult() - queryRes.Series = append(queryRes.Series, series) - return queryRes + registerScenario(&Scenario{ + Id: "slow_query", + Name: "Slow Query", + StringInput: "5s", + Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult { + stringInput := query.Model.Get("stringInput").MustString() + parsedInterval, _ := time.ParseDuration(stringInput) + time.Sleep(parsedInterval) + return getRandomWalk(query, context) }, }) @@ -221,6 +214,57 @@ func init() { return queryRes }, }) + + registerScenario(&Scenario{ + Id: "table_static", + Name: "Table Static", + + Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult { + timeWalkerMs := context.TimeRange.GetFromAsMsEpoch() + to := context.TimeRange.GetToAsMsEpoch() + + table := tsdb.Table{ + Columns: []tsdb.TableColumn{ + {Text: "Time"}, + {Text: "Message"}, + {Text: "Description"}, + {Text: "Value"}, + }, + Rows: []tsdb.RowValues{}, + } + for i := int64(0); i < 10 && timeWalkerMs < to; i++ { + table.Rows = append(table.Rows, tsdb.RowValues{float64(timeWalkerMs), "This is a message", "Description", 23.1}) + timeWalkerMs += query.IntervalMs + } + + queryRes := tsdb.NewQueryResult() + queryRes.Tables = append(queryRes.Tables, &table) + return queryRes + }, + }) +} + +func getRandomWalk(query *tsdb.Query, tsdbQuery *tsdb.TsdbQuery) *tsdb.QueryResult { + timeWalkerMs := tsdbQuery.TimeRange.GetFromAsMsEpoch() + to := tsdbQuery.TimeRange.GetToAsMsEpoch() + + series := newSeriesForQuery(query) + + points := make(tsdb.TimeSeriesPoints, 0) + walker := rand.Float64() * 100 + + for i := int64(0); i < 10000 && timeWalkerMs < to; i++ { + points = append(points, tsdb.NewTimePoint(null.FloatFrom(walker), float64(timeWalkerMs))) + + walker += rand.Float64() - 0.5 + timeWalkerMs += query.IntervalMs + } + + series.Points = points + + queryRes := tsdb.NewQueryResult() + queryRes.Series = append(queryRes.Series, series) + return queryRes } func registerScenario(scenario *Scenario) { diff --git a/pkg/util/md5_test.go b/pkg/util/md5_test.go index 1338d42bb51..43c685b8763 100644 --- a/pkg/util/md5_test.go +++ b/pkg/util/md5_test.go @@ -3,14 +3,14 @@ package util import "testing" func TestMd5Sum(t *testing.T) { - input := "dont hash passwords with md5" + input := "don't hash passwords with md5" have, err := Md5SumString(input) if err != nil { t.Fatal("expected err to be nil") } - want := "2d6a56c82d09d374643b926d3417afba" + want := "dd1f7fdb3466c0d09c2e839d1f1530f8" if have != want { t.Fatalf("expected: %s got: %s", want, have) } diff --git a/public/app/containers/Explore/Wrapper.tsx b/public/app/containers/Explore/Wrapper.tsx deleted file mode 100644 index 6bdbd7cc42f..00000000000 --- a/public/app/containers/Explore/Wrapper.tsx +++ /dev/null @@ -1,33 +0,0 @@ -import React, { PureComponent } from 'react'; - -import Explore from './Explore'; - -export default class Wrapper extends PureComponent { - state = { - initialState: null, - split: false, - }; - - handleChangeSplit = (split, initialState) => { - this.setState({ split, initialState }); - }; - - render() { - // State overrides for props from first Explore - const { initialState, split } = this.state; - return ( -
- - {split ? ( - - ) : null} -
- ); - } -} diff --git a/public/app/core/components/LayoutSelector/LayoutSelector.tsx b/public/app/core/components/LayoutSelector/LayoutSelector.tsx new file mode 100644 index 00000000000..d9e00102438 --- /dev/null +++ b/public/app/core/components/LayoutSelector/LayoutSelector.tsx @@ -0,0 +1,39 @@ +import React, { SFC } from 'react'; + +export type LayoutMode = LayoutModes.Grid | LayoutModes.List; + +export enum LayoutModes { + Grid = 'grid', + List = 'list', +} + +interface Props { + mode: LayoutMode; + onLayoutModeChanged: (mode: LayoutMode) => {}; +} + +const LayoutSelector: SFC = props => { + const { mode, onLayoutModeChanged } = props; + return ( +
+ + +
+ ); +}; + +export default LayoutSelector; diff --git a/public/app/core/components/OrgActionBar/OrgActionBar.test.tsx b/public/app/core/components/OrgActionBar/OrgActionBar.test.tsx new file mode 100644 index 00000000000..d838a2d5c34 --- /dev/null +++ b/public/app/core/components/OrgActionBar/OrgActionBar.test.tsx @@ -0,0 +1,23 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import OrgActionBar, { Props } from './OrgActionBar'; + +const setup = (propOverrides?: object) => { + const props: Props = { + searchQuery: '', + setSearchQuery: jest.fn(), + linkButton: { href: 'some/url', title: 'test' }, + }; + + Object.assign(props, propOverrides); + + return shallow(); +}; + +describe('Render', () => { + it('should render component', () => { + const wrapper = setup(); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/core/components/OrgActionBar/OrgActionBar.tsx b/public/app/core/components/OrgActionBar/OrgActionBar.tsx new file mode 100644 index 00000000000..9596a27acad --- /dev/null +++ b/public/app/core/components/OrgActionBar/OrgActionBar.tsx @@ -0,0 +1,38 @@ +import React, { PureComponent } from 'react'; +import LayoutSelector, { LayoutMode } from '../LayoutSelector/LayoutSelector'; + +export interface Props { + searchQuery: string; + layoutMode?: LayoutMode; + onSetLayoutMode?: (mode: LayoutMode) => {}; + setSearchQuery: (value: string) => {}; + linkButton: { href: string; title: string }; +} + +export default class OrgActionBar extends PureComponent { + render() { + const { searchQuery, layoutMode, onSetLayoutMode, linkButton, setSearchQuery } = this.props; + + return ( +
+
+ + onSetLayoutMode(mode)} /> +
+ + ); + } +} diff --git a/public/app/core/components/OrgActionBar/__snapshots__/OrgActionBar.test.tsx.snap b/public/app/core/components/OrgActionBar/__snapshots__/OrgActionBar.test.tsx.snap new file mode 100644 index 00000000000..dc53e7863ea --- /dev/null +++ b/public/app/core/components/OrgActionBar/__snapshots__/OrgActionBar.test.tsx.snap @@ -0,0 +1,39 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render component 1`] = ` +
+
+ + +
+ +`; diff --git a/public/app/core/components/PermissionList/AddPermission.tsx b/public/app/core/components/PermissionList/AddPermission.tsx index 77ac6953b74..fc062ce63e4 100644 --- a/public/app/core/components/PermissionList/AddPermission.tsx +++ b/public/app/core/components/PermissionList/AddPermission.tsx @@ -1,7 +1,8 @@ import React, { Component } from 'react'; -import { UserPicker, User } from 'app/core/components/Picker/UserPicker'; +import { UserPicker } from 'app/core/components/Picker/UserPicker'; import { TeamPicker, Team } from 'app/core/components/Picker/TeamPicker'; import DescriptionPicker, { OptionWithDescription } from 'app/core/components/Picker/DescriptionPicker'; +import { User } from 'app/types'; import { dashboardPermissionLevels, dashboardAclTargets, diff --git a/public/app/core/components/Picker/UserPicker.tsx b/public/app/core/components/Picker/UserPicker.tsx index e50513c44e1..8f48ba8f66a 100644 --- a/public/app/core/components/Picker/UserPicker.tsx +++ b/public/app/core/components/Picker/UserPicker.tsx @@ -3,6 +3,7 @@ import Select from 'react-select'; import PickerOption from './PickerOption'; import { debounce } from 'lodash'; import { getBackendSrv } from 'app/core/services/backend_srv'; +import { User } from 'app/types'; export interface Props { onSelected: (user: User) => void; @@ -14,13 +15,6 @@ export interface State { isLoading: boolean; } -export interface User { - id: number; - label: string; - avatarUrl: string; - login: string; -} - export class UserPicker extends Component { debouncedSearch: any; diff --git a/public/app/core/components/grafana_app.ts b/public/app/core/components/grafana_app.ts index 29e0c32d4d7..b1d7ec6fb9e 100644 --- a/public/app/core/components/grafana_app.ts +++ b/public/app/core/components/grafana_app.ts @@ -248,6 +248,9 @@ export function grafanaAppDirective(playlistSrv, contextSrv, $timeout, $rootScop return; } + // ensure dropdown menu doesn't impact on z-index + body.find('.dropdown-menu-open').removeClass('dropdown-menu-open'); + // for stuff that animates, slides out etc, clicking it needs to // hide it right away const clickAutoHide = target.closest('[data-click-hide]'); diff --git a/public/app/core/components/search/search_results.html b/public/app/core/components/search/search_results.html index 7435f8d0b7e..45258ded652 100644 --- a/public/app/core/components/search/search_results.html +++ b/public/app/core/components/search/search_results.html @@ -34,6 +34,7 @@
{{::item.title}}
+ {{::item.folderTitle}}
diff --git a/public/app/core/directives/metric_segment.ts b/public/app/core/directives/metric_segment.ts index 7759e14f2cc..de904e95fc6 100644 --- a/public/app/core/directives/metric_segment.ts +++ b/public/app/core/directives/metric_segment.ts @@ -118,6 +118,9 @@ export function metricSegment($compile, $sce) { }; $scope.matcher = function(item) { + if (linkMode) { + return false; + } let str = this.query; if (str[0] === '/') { str = str.substring(1); diff --git a/public/app/core/reducers/location.ts b/public/app/core/reducers/location.ts index 6a356c4ea5a..2089cfe9f59 100644 --- a/public/app/core/reducers/location.ts +++ b/public/app/core/reducers/location.ts @@ -1,6 +1,6 @@ import { Action } from 'app/core/actions/location'; -import { LocationState, UrlQueryMap } from 'app/types'; -import { toUrlParams } from 'app/core/utils/url'; +import { LocationState } from 'app/types'; +import { renderUrl } from 'app/core/utils/url'; export const initialState: LocationState = { url: '', @@ -9,13 +9,6 @@ export const initialState: LocationState = { routeParams: {}, }; -function renderUrl(path: string, query: UrlQueryMap | undefined): string { - if (query && Object.keys(query).length > 0) { - path += '?' + toUrlParams(query); - } - return path; -} - export const locationReducer = (state = initialState, action: Action): LocationState => { switch (action.type) { case 'UPDATE_LOCATION': { diff --git a/public/app/core/services/keybindingSrv.ts b/public/app/core/services/keybindingSrv.ts index d05e9b0c21c..d8dfc958dd4 100644 --- a/public/app/core/services/keybindingSrv.ts +++ b/public/app/core/services/keybindingSrv.ts @@ -4,7 +4,7 @@ import _ from 'lodash'; import config from 'app/core/config'; import coreModule from 'app/core/core_module'; import appEvents from 'app/core/app_events'; -import { encodePathComponent } from 'app/core/utils/location_util'; +import { getExploreUrl } from 'app/core/utils/explore'; import Mousetrap from 'mousetrap'; import 'mousetrap-global-bind'; @@ -15,7 +15,14 @@ export class KeybindingSrv { timepickerOpen = false; /** @ngInject */ - constructor(private $rootScope, private $location, private datasourceSrv, private timeSrv, private contextSrv) { + constructor( + private $rootScope, + private $location, + private $timeout, + private datasourceSrv, + private timeSrv, + private contextSrv + ) { // clear out all shortcuts on route change $rootScope.$on('$routeChangeSuccess', () => { Mousetrap.reset(); @@ -194,14 +201,9 @@ export class KeybindingSrv { if (dashboard.meta.focusPanelId) { const panel = dashboard.getPanelById(dashboard.meta.focusPanelId); const datasource = await this.datasourceSrv.get(panel.datasource); - if (datasource && datasource.supportsExplore) { - const range = this.timeSrv.timeRangeForUrl(); - const state = { - ...datasource.getExploreState(panel), - range, - }; - const exploreState = encodePathComponent(JSON.stringify(state)); - this.$location.url(`/explore?state=${exploreState}`); + const url = await getExploreUrl(panel, panel.targets, datasource, this.datasourceSrv, this.timeSrv); + if (url) { + this.$timeout(() => this.$location.url(url)); } } }); diff --git a/public/app/core/utils/explore.test.ts b/public/app/core/utils/explore.test.ts new file mode 100644 index 00000000000..13b863994bf --- /dev/null +++ b/public/app/core/utils/explore.test.ts @@ -0,0 +1,95 @@ +import { DEFAULT_RANGE, serializeStateToUrlParam, parseUrlState } from './explore'; +import { ExploreState } from 'app/types/explore'; + +const DEFAULT_EXPLORE_STATE: ExploreState = { + datasource: null, + datasourceError: null, + datasourceLoading: null, + datasourceMissing: false, + datasourceName: '', + graphResult: null, + history: [], + latency: 0, + loading: false, + logsResult: null, + queries: [], + queryErrors: [], + queryHints: [], + range: DEFAULT_RANGE, + requestOptions: null, + showingGraph: true, + showingLogs: true, + showingTable: true, + supportsGraph: null, + supportsLogs: null, + supportsTable: null, + tableResult: null, +}; + +describe('state functions', () => { + describe('parseUrlState', () => { + it('returns default state on empty string', () => { + expect(parseUrlState('')).toMatchObject({ + datasource: null, + queries: [], + range: DEFAULT_RANGE, + }); + }); + }); + describe('serializeStateToUrlParam', () => { + it('returns url parameter value for a state object', () => { + const state = { + ...DEFAULT_EXPLORE_STATE, + datasourceName: 'foo', + range: { + from: 'now - 5h', + to: 'now', + }, + queries: [ + { + query: 'metric{test="a/b"}', + }, + { + query: 'super{foo="x/z"}', + }, + ], + }; + expect(serializeStateToUrlParam(state)).toBe( + '{"datasource":"foo","queries":[{"query":"metric{test=\\"a/b\\"}"},' + + '{"query":"super{foo=\\"x/z\\"}"}],"range":{"from":"now - 5h","to":"now"}}' + ); + }); + }); + describe('interplay', () => { + it('can parse the serialized state into the original state', () => { + const state = { + ...DEFAULT_EXPLORE_STATE, + datasourceName: 'foo', + range: { + from: 'now - 5h', + to: 'now', + }, + queries: [ + { + query: 'metric{test="a/b"}', + }, + { + query: 'super{foo="x/z"}', + }, + ], + }; + const serialized = serializeStateToUrlParam(state); + const parsed = parseUrlState(serialized); + + // Account for datasource vs datasourceName + const { datasource, ...rest } = parsed; + const sameState = { + ...rest, + datasource: DEFAULT_EXPLORE_STATE.datasource, + datasourceName: datasource, + }; + + expect(state).toMatchObject(sameState); + }); + }); +}); diff --git a/public/app/core/utils/explore.ts b/public/app/core/utils/explore.ts new file mode 100644 index 00000000000..cca841a1725 --- /dev/null +++ b/public/app/core/utils/explore.ts @@ -0,0 +1,78 @@ +import { renderUrl } from 'app/core/utils/url'; +import { ExploreState, ExploreUrlState } from 'app/types/explore'; + +export const DEFAULT_RANGE = { + from: 'now-6h', + to: 'now', +}; + +/** + * Returns an Explore-URL that contains a panel's queries and the dashboard time range. + * + * @param panel Origin panel of the jump to Explore + * @param panelTargets The origin panel's query targets + * @param panelDatasource The origin panel's datasource + * @param datasourceSrv Datasource service to query other datasources in case the panel datasource is mixed + * @param timeSrv Time service to get the current dashboard range from + */ +export async function getExploreUrl( + panel: any, + panelTargets: any[], + panelDatasource: any, + datasourceSrv: any, + timeSrv: any +) { + let exploreDatasource = panelDatasource; + let exploreTargets = panelTargets; + let url; + + // Mixed datasources need to choose only one datasource + if (panelDatasource.meta.id === 'mixed' && panelTargets) { + // Find first explore datasource among targets + let mixedExploreDatasource; + for (const t of panel.targets) { + const datasource = await datasourceSrv.get(t.datasource); + if (datasource && datasource.meta.explore) { + mixedExploreDatasource = datasource; + break; + } + } + + // Add all its targets + if (mixedExploreDatasource) { + exploreDatasource = mixedExploreDatasource; + exploreTargets = panelTargets.filter(t => t.datasource === mixedExploreDatasource.name); + } + } + + if (exploreDatasource && exploreDatasource.meta.explore) { + const range = timeSrv.timeRangeForUrl(); + const state = { + ...exploreDatasource.getExploreState(exploreTargets), + range, + }; + const exploreState = JSON.stringify(state); + url = renderUrl('/explore', { state: exploreState }); + } + return url; +} + +export function parseUrlState(initial: string | undefined): ExploreUrlState { + if (initial) { + try { + return JSON.parse(decodeURI(initial)); + } catch (e) { + console.error(e); + } + } + return { datasource: null, queries: [], range: DEFAULT_RANGE }; +} + +export function serializeStateToUrlParam(state: ExploreState): string { + const urlState: ExploreUrlState = { + datasource: state.datasourceName, + queries: state.queries.map(q => ({ query: q.query })), + range: state.range, + }; + return JSON.stringify(urlState); +} diff --git a/public/app/core/utils/location_util.ts b/public/app/core/utils/location_util.ts index 735272285ff..76f2fc5881f 100644 --- a/public/app/core/utils/location_util.ts +++ b/public/app/core/utils/location_util.ts @@ -1,10 +1,5 @@ import config from 'app/core/config'; -// Slash encoding for angular location provider, see https://github.com/angular/angular.js/issues/10479 -const SLASH = ''; -export const decodePathComponent = (pc: string) => decodeURIComponent(pc).replace(new RegExp(SLASH, 'g'), '/'); -export const encodePathComponent = (pc: string) => encodeURIComponent(pc.replace(/\//g, SLASH)); - export const stripBaseFromUrl = url => { const appSubUrl = config.appSubUrl; const stripExtraChars = appSubUrl.endsWith('/') ? 1 : 0; diff --git a/public/app/core/utils/url.ts b/public/app/core/utils/url.ts index 198029b0e9f..ab8be8ad222 100644 --- a/public/app/core/utils/url.ts +++ b/public/app/core/utils/url.ts @@ -2,6 +2,15 @@ * @preserve jquery-param (c) 2015 KNOWLEDGECODE | MIT */ +import { UrlQueryMap } from 'app/types'; + +export function renderUrl(path: string, query: UrlQueryMap | undefined): string { + if (query && Object.keys(query).length > 0) { + path += '?' + toUrlParams(query); + } + return path; +} + export function toUrlParams(a) { const s = []; const rbracket = /\[\]$/; diff --git a/public/app/features/api-keys/ApiKeysAddedModal.test.tsx b/public/app/features/api-keys/ApiKeysAddedModal.test.tsx new file mode 100644 index 00000000000..160418a7ab8 --- /dev/null +++ b/public/app/features/api-keys/ApiKeysAddedModal.test.tsx @@ -0,0 +1,25 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import { ApiKeysAddedModal, Props } from './ApiKeysAddedModal'; + +const setup = (propOverrides?: object) => { + const props: Props = { + apiKey: 'api key test', + rootPath: 'test/path', + }; + + Object.assign(props, propOverrides); + + const wrapper = shallow(); + + return { + wrapper, + }; +}; + +describe('Render', () => { + it('should render component', () => { + const { wrapper } = setup(); + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/api-keys/ApiKeysAddedModal.tsx b/public/app/features/api-keys/ApiKeysAddedModal.tsx new file mode 100644 index 00000000000..995aa46c773 --- /dev/null +++ b/public/app/features/api-keys/ApiKeysAddedModal.tsx @@ -0,0 +1,46 @@ +import React from 'react'; + +export interface Props { + apiKey: string; + rootPath: string; +} + +export const ApiKeysAddedModal = (props: Props) => { + return ( +
+
+

+ + API Key Created +

+ + + + +
+ +
+
+
+ Key + {props.apiKey} +
+
+ +
+ You will only be able to view this key here once! It is not stored in this form. So be sure to copy it now. +
+
+ You can authenticate request using the Authorization HTTP header, example: +
+
+
+            curl -H "Authorization: Bearer {props.apiKey}" {props.rootPath}/api/dashboards/home
+          
+
+
+
+ ); +}; + +export default ApiKeysAddedModal; diff --git a/public/app/features/api-keys/ApiKeysPage.test.tsx b/public/app/features/api-keys/ApiKeysPage.test.tsx new file mode 100644 index 00000000000..518180fc424 --- /dev/null +++ b/public/app/features/api-keys/ApiKeysPage.test.tsx @@ -0,0 +1,73 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import { Props, ApiKeysPage } from './ApiKeysPage'; +import { NavModel, ApiKey } from 'app/types'; +import { getMultipleMockKeys, getMockKey } from './__mocks__/apiKeysMock'; + +const setup = (propOverrides?: object) => { + const props: Props = { + navModel: {} as NavModel, + apiKeys: [] as ApiKey[], + searchQuery: '', + loadApiKeys: jest.fn(), + deleteApiKey: jest.fn(), + setSearchQuery: jest.fn(), + addApiKey: jest.fn(), + }; + + Object.assign(props, propOverrides); + + const wrapper = shallow(); + const instance = wrapper.instance() as ApiKeysPage; + + return { + wrapper, + instance, + }; +}; + +describe('Render', () => { + it('should render component', () => { + const { wrapper } = setup(); + expect(wrapper).toMatchSnapshot(); + }); + + it('should render API keys table', () => { + const { wrapper } = setup({ + apiKeys: getMultipleMockKeys(5), + }); + + expect(wrapper).toMatchSnapshot(); + }); +}); + +describe('Life cycle', () => { + it('should call loadApiKeys', () => { + const { instance } = setup(); + + instance.componentDidMount(); + + expect(instance.props.loadApiKeys).toHaveBeenCalled(); + }); +}); + +describe('Functions', () => { + describe('Delete team', () => { + it('should call delete team', () => { + const { instance } = setup(); + instance.onDeleteApiKey(getMockKey()); + expect(instance.props.deleteApiKey).toHaveBeenCalledWith(1); + }); + }); + + describe('on search query change', () => { + it('should call setSearchQuery', () => { + const { instance } = setup(); + const mockEvent = { target: { value: 'test' } }; + + instance.onSearchQueryChange(mockEvent); + + expect(instance.props.setSearchQuery).toHaveBeenCalledWith('test'); + }); + }); +}); diff --git a/public/app/features/api-keys/ApiKeysPage.tsx b/public/app/features/api-keys/ApiKeysPage.tsx new file mode 100644 index 00000000000..2f19250e835 --- /dev/null +++ b/public/app/features/api-keys/ApiKeysPage.tsx @@ -0,0 +1,222 @@ +import React, { PureComponent } from 'react'; +import ReactDOMServer from 'react-dom/server'; +import { connect } from 'react-redux'; +import { hot } from 'react-hot-loader'; +import { NavModel, ApiKey, NewApiKey, OrgRole } from 'app/types'; +import { getNavModel } from 'app/core/selectors/navModel'; +import { getApiKeys } from './state/selectors'; +import { loadApiKeys, deleteApiKey, setSearchQuery, addApiKey } from './state/actions'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import SlideDown from 'app/core/components/Animations/SlideDown'; +import ApiKeysAddedModal from './ApiKeysAddedModal'; +import config from 'app/core/config'; +import appEvents from 'app/core/app_events'; + +export interface Props { + navModel: NavModel; + apiKeys: ApiKey[]; + searchQuery: string; + loadApiKeys: typeof loadApiKeys; + deleteApiKey: typeof deleteApiKey; + setSearchQuery: typeof setSearchQuery; + addApiKey: typeof addApiKey; +} + +export interface State { + isAdding: boolean; + newApiKey: NewApiKey; +} + +enum ApiKeyStateProps { + Name = 'name', + Role = 'role', +} + +const initialApiKeyState = { + name: '', + role: OrgRole.Viewer, +}; + +export class ApiKeysPage extends PureComponent { + constructor(props) { + super(props); + this.state = { isAdding: false, newApiKey: initialApiKeyState }; + } + + componentDidMount() { + this.fetchApiKeys(); + } + + async fetchApiKeys() { + await this.props.loadApiKeys(); + } + + onDeleteApiKey(key: ApiKey) { + this.props.deleteApiKey(key.id); + } + + onSearchQueryChange = evt => { + this.props.setSearchQuery(evt.target.value); + }; + + onToggleAdding = () => { + this.setState({ isAdding: !this.state.isAdding }); + }; + + onAddApiKey = async evt => { + evt.preventDefault(); + + const openModal = (apiKey: string) => { + const rootPath = window.location.origin + config.appSubUrl; + const modalTemplate = ReactDOMServer.renderToString(); + + appEvents.emit('show-modal', { + templateHtml: modalTemplate, + }); + }; + + this.props.addApiKey(this.state.newApiKey, openModal); + this.setState((prevState: State) => { + return { + ...prevState, + newApiKey: initialApiKeyState, + }; + }); + }; + + onApiKeyStateUpdate = (evt, prop: string) => { + const value = evt.currentTarget.value; + this.setState((prevState: State) => { + const newApiKey = { + ...prevState.newApiKey, + }; + newApiKey[prop] = value; + + return { + ...prevState, + newApiKey: newApiKey, + }; + }); + }; + + render() { + const { newApiKey, isAdding } = this.state; + const { navModel, apiKeys, searchQuery } = this.props; + + return ( +
+ +
+
+
+ +
+ +
+ +
+ + +
+ +
Add API Key
+
+
+
+ Key name + this.onApiKeyStateUpdate(evt, ApiKeyStateProps.Name)} + /> +
+
+ Role + + + +
+
+ +
+
+
+
+
+ +

Existing Keys

+ + + + + + + + {apiKeys.length > 0 ? ( + + {apiKeys.map(key => { + return ( + + + + + + ); + })} + + ) : null} +
NameRole +
{key.name}{key.role} + this.onDeleteApiKey(key)} className="btn btn-danger btn-mini"> + + +
+
+
+ ); + } +} + +function mapStateToProps(state) { + return { + navModel: getNavModel(state.navIndex, 'apikeys'), + apiKeys: getApiKeys(state.apiKeys), + searchQuery: state.apiKeys.searchQuery, + }; +} + +const mapDispatchToProps = { + loadApiKeys, + deleteApiKey, + setSearchQuery, + addApiKey, +}; + +export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(ApiKeysPage)); diff --git a/public/app/features/api-keys/__mocks__/apiKeysMock.ts b/public/app/features/api-keys/__mocks__/apiKeysMock.ts new file mode 100644 index 00000000000..117f0d6d0c6 --- /dev/null +++ b/public/app/features/api-keys/__mocks__/apiKeysMock.ts @@ -0,0 +1,22 @@ +import { ApiKey, OrgRole } from 'app/types'; + +export const getMultipleMockKeys = (numberOfKeys: number): ApiKey[] => { + const keys: ApiKey[] = []; + for (let i = 1; i <= numberOfKeys; i++) { + keys.push({ + id: i, + name: `test-${i}`, + role: OrgRole.Viewer, + }); + } + + return keys; +}; + +export const getMockKey = (): ApiKey => { + return { + id: 1, + name: 'test', + role: OrgRole.Admin, + }; +}; diff --git a/public/app/features/api-keys/__snapshots__/ApiKeysAddedModal.test.tsx.snap b/public/app/features/api-keys/__snapshots__/ApiKeysAddedModal.test.tsx.snap new file mode 100644 index 00000000000..0fcb13308eb --- /dev/null +++ b/public/app/features/api-keys/__snapshots__/ApiKeysAddedModal.test.tsx.snap @@ -0,0 +1,78 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render component 1`] = ` +
+
+

+ + + API Key Created + +

+ + + +
+
+
+
+ + Key + + + api key test + +
+
+
+ You will only be able to view this key here once! It is not stored in this form. So be sure to copy it now. +
+
+ You can authenticate request using the Authorization HTTP header, example: +
+
+
+        curl -H "Authorization: Bearer 
+        api key test
+        " 
+        test/path
+        /api/dashboards/home
+      
+
+
+
+`; diff --git a/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap b/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap new file mode 100644 index 00000000000..77c7f620173 --- /dev/null +++ b/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap @@ -0,0 +1,435 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render API keys table 1`] = ` +
+ +
+
+
+ +
+
+ +
+ +
+ +
+ Add API Key +
+
+
+
+ + Key name + + +
+
+ + Role + + + + +
+
+ +
+
+
+
+
+

+ Existing Keys +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Name + + Role + +
+ test-1 + + Viewer + + + + +
+ test-2 + + Viewer + + + + +
+ test-3 + + Viewer + + + + +
+ test-4 + + Viewer + + + + +
+ test-5 + + Viewer + + + + +
+
+
+`; + +exports[`Render should render component 1`] = ` +
+ +
+
+
+ +
+
+ +
+ +
+ +
+ Add API Key +
+
+
+
+ + Key name + + +
+
+ + Role + + + + +
+
+ +
+
+
+
+
+

+ Existing Keys +

+ + + + + + + +
+ Name + + Role + +
+
+
+`; diff --git a/public/app/features/api-keys/state/actions.ts b/public/app/features/api-keys/state/actions.ts new file mode 100644 index 00000000000..63e91088476 --- /dev/null +++ b/public/app/features/api-keys/state/actions.ts @@ -0,0 +1,56 @@ +import { ThunkAction } from 'redux-thunk'; +import { getBackendSrv } from 'app/core/services/backend_srv'; +import { StoreState, ApiKey } from 'app/types'; + +export enum ActionTypes { + LoadApiKeys = 'LOAD_API_KEYS', + SetApiKeysSearchQuery = 'SET_API_KEYS_SEARCH_QUERY', +} + +export interface LoadApiKeysAction { + type: ActionTypes.LoadApiKeys; + payload: ApiKey[]; +} + +export interface SetSearchQueryAction { + type: ActionTypes.SetApiKeysSearchQuery; + payload: string; +} + +export type Action = LoadApiKeysAction | SetSearchQueryAction; + +type ThunkResult = ThunkAction; + +const apiKeysLoaded = (apiKeys: ApiKey[]): LoadApiKeysAction => ({ + type: ActionTypes.LoadApiKeys, + payload: apiKeys, +}); + +export function addApiKey(apiKey: ApiKey, openModal: (key: string) => void): ThunkResult { + return async dispatch => { + const result = await getBackendSrv().post('/api/auth/keys', apiKey); + dispatch(setSearchQuery('')); + dispatch(loadApiKeys()); + openModal(result.key); + }; +} + +export function loadApiKeys(): ThunkResult { + return async dispatch => { + const response = await getBackendSrv().get('/api/auth/keys'); + dispatch(apiKeysLoaded(response)); + }; +} + +export function deleteApiKey(id: number): ThunkResult { + return async dispatch => { + getBackendSrv() + .delete('/api/auth/keys/' + id) + .then(dispatch(loadApiKeys())); + }; +} + +export const setSearchQuery = (searchQuery: string): SetSearchQueryAction => ({ + type: ActionTypes.SetApiKeysSearchQuery, + payload: searchQuery, +}); diff --git a/public/app/features/api-keys/state/reducers.test.ts b/public/app/features/api-keys/state/reducers.test.ts new file mode 100644 index 00000000000..3b2c831a5a3 --- /dev/null +++ b/public/app/features/api-keys/state/reducers.test.ts @@ -0,0 +1,31 @@ +import { Action, ActionTypes } from './actions'; +import { initialApiKeysState, apiKeysReducer } from './reducers'; +import { getMultipleMockKeys } from '../__mocks__/apiKeysMock'; + +describe('API Keys reducer', () => { + it('should set keys', () => { + const payload = getMultipleMockKeys(4); + + const action: Action = { + type: ActionTypes.LoadApiKeys, + payload, + }; + + const result = apiKeysReducer(initialApiKeysState, action); + + expect(result.keys).toEqual(payload); + }); + + it('should set search query', () => { + const payload = 'test query'; + + const action: Action = { + type: ActionTypes.SetApiKeysSearchQuery, + payload, + }; + + const result = apiKeysReducer(initialApiKeysState, action); + + expect(result.searchQuery).toEqual('test query'); + }); +}); diff --git a/public/app/features/api-keys/state/reducers.ts b/public/app/features/api-keys/state/reducers.ts new file mode 100644 index 00000000000..a21aa55dbf7 --- /dev/null +++ b/public/app/features/api-keys/state/reducers.ts @@ -0,0 +1,21 @@ +import { ApiKeysState } from 'app/types'; +import { Action, ActionTypes } from './actions'; + +export const initialApiKeysState: ApiKeysState = { + keys: [], + searchQuery: '', +}; + +export const apiKeysReducer = (state = initialApiKeysState, action: Action): ApiKeysState => { + switch (action.type) { + case ActionTypes.LoadApiKeys: + return { ...state, keys: action.payload }; + case ActionTypes.SetApiKeysSearchQuery: + return { ...state, searchQuery: action.payload }; + } + return state; +}; + +export default { + apiKeys: apiKeysReducer, +}; diff --git a/public/app/features/api-keys/state/selectors.test.ts b/public/app/features/api-keys/state/selectors.test.ts new file mode 100644 index 00000000000..7d8f3122ce6 --- /dev/null +++ b/public/app/features/api-keys/state/selectors.test.ts @@ -0,0 +1,25 @@ +import { getApiKeys } from './selectors'; +import { getMultipleMockKeys } from '../__mocks__/apiKeysMock'; +import { ApiKeysState } from 'app/types'; + +describe('API Keys selectors', () => { + describe('Get API Keys', () => { + const mockKeys = getMultipleMockKeys(5); + + it('should return all keys if no search query', () => { + const mockState: ApiKeysState = { keys: mockKeys, searchQuery: '' }; + + const keys = getApiKeys(mockState); + + expect(keys).toEqual(mockKeys); + }); + + it('should filter keys if search query exists', () => { + const mockState: ApiKeysState = { keys: mockKeys, searchQuery: '5' }; + + const keys = getApiKeys(mockState); + + expect(keys.length).toEqual(1); + }); + }); +}); diff --git a/public/app/features/api-keys/state/selectors.ts b/public/app/features/api-keys/state/selectors.ts new file mode 100644 index 00000000000..8065c252e85 --- /dev/null +++ b/public/app/features/api-keys/state/selectors.ts @@ -0,0 +1,9 @@ +import { ApiKeysState } from 'app/types'; + +export const getApiKeys = (state: ApiKeysState) => { + const regex = RegExp(state.searchQuery, 'i'); + + return state.keys.filter(key => { + return regex.test(key.name) || regex.test(key.role); + }); +}; diff --git a/public/app/features/dashboard/specs/time_srv.test.ts b/public/app/features/dashboard/specs/time_srv.test.ts index 514e0b90792..db0d11f2ebe 100644 --- a/public/app/features/dashboard/specs/time_srv.test.ts +++ b/public/app/features/dashboard/specs/time_srv.test.ts @@ -29,6 +29,7 @@ describe('timeSrv', () => { beforeEach(() => { timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() }); timeSrv.init(_dashboard); + _dashboard.refresh = false; }); describe('timeRange', () => { @@ -79,6 +80,23 @@ describe('timeSrv', () => { expect(time.to.valueOf()).toEqual(new Date('2014-05-20T03:10:22Z').getTime()); }); + it('should ignore refresh if time absolute', () => { + location = { + search: jest.fn(() => ({ + from: '20140410T052010', + to: '20140520T031022', + })), + }; + + timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() }); + + // dashboard saved with refresh on + _dashboard.refresh = true; + timeSrv.init(_dashboard); + + expect(timeSrv.refresh).toBe(false); + }); + it('should handle formatted dates without time', () => { location = { search: jest.fn(() => ({ diff --git a/public/app/features/dashboard/state/actions.ts b/public/app/features/dashboard/state/actions.ts index 82333817b2b..9e923f6bcb7 100644 --- a/public/app/features/dashboard/state/actions.ts +++ b/public/app/features/dashboard/state/actions.ts @@ -58,7 +58,7 @@ export function updateDashboardPermission( continue; } - const updated = toUpdateItem(itemToUpdate); + const updated = toUpdateItem(item); // if this is the item we want to update, update it's permisssion if (itemToUpdate === item) { diff --git a/public/app/features/dashboard/submenu/submenu.html b/public/app/features/dashboard/submenu/submenu.html index f240a86efba..d7cee33e6c3 100644 --- a/public/app/features/dashboard/submenu/submenu.html +++ b/public/app/features/dashboard/submenu/submenu.html @@ -4,7 +4,8 @@ - + +
diff --git a/public/app/features/dashboard/time_srv.ts b/public/app/features/dashboard/time_srv.ts index 5f5b1b02fe2..89aa94ed336 100644 --- a/public/app/features/dashboard/time_srv.ts +++ b/public/app/features/dashboard/time_srv.ts @@ -84,6 +84,12 @@ export class TimeSrv { if (params.to) { this.time.to = this.parseUrlParam(params.to) || this.time.to; } + // if absolute ignore refresh option saved to dashboard + if (params.to && params.to.indexOf('now') === -1) { + this.refresh = false; + this.dashboard.refresh = false; + } + // but if refresh explicitly set then use that if (params.refresh) { this.refresh = params.refresh || this.refresh; } @@ -106,7 +112,7 @@ export class TimeSrv { } private timeHasChangedSinceLoad() { - return this.timeAtLoad.from !== this.time.from || this.timeAtLoad.to !== this.time.to; + return this.timeAtLoad && (this.timeAtLoad.from !== this.time.from || this.timeAtLoad.to !== this.time.to); } setAutoRefresh(interval) { diff --git a/public/app/features/dashboard/upload.ts b/public/app/features/dashboard/upload.ts index 974a0c35cd2..42871327eb6 100644 --- a/public/app/features/dashboard/upload.ts +++ b/public/app/features/dashboard/upload.ts @@ -1,10 +1,12 @@ import coreModule from 'app/core/core_module'; +import appEvents from 'app/core/app_events'; +import angular from 'angular'; const template = ` - + `; @@ -15,8 +17,11 @@ function uploadDashboardDirective(timer, alertSrv, $location) { template: template, scope: { onUpload: '&', + btnText: '@?', }, - link: scope => { + link: (scope, elem) => { + scope.btnText = angular.isDefined(scope.btnText) ? scope.btnText : 'Upload .json File'; + function file_selected(evt) { const files = evt.target.files; // FileList object const readerOnload = () => { @@ -26,7 +31,7 @@ function uploadDashboardDirective(timer, alertSrv, $location) { dash = JSON.parse(e.target.result); } catch (err) { console.log(err); - scope.appEvent('alert-error', ['Import failed', 'JSON -> JS Serialization failed: ' + err.message]); + appEvents.emit('alert-error', ['Import failed', 'JSON -> JS Serialization failed: ' + err.message]); return; } @@ -52,7 +57,7 @@ function uploadDashboardDirective(timer, alertSrv, $location) { // Check for the various File API support. if (wnd.File && wnd.FileReader && wnd.FileList && wnd.Blob) { // Something - document.getElementById('dashupload').addEventListener('change', file_selected, false); + elem[0].addEventListener('change', file_selected, false); } else { alertSrv.set('Oops', 'Sorry, the HTML5 File APIs are not fully supported in this browser.', 'error'); } diff --git a/public/app/features/datasources/DataSourceList.test.tsx b/public/app/features/datasources/DataSourceList.test.tsx new file mode 100644 index 00000000000..6e097da2c53 --- /dev/null +++ b/public/app/features/datasources/DataSourceList.test.tsx @@ -0,0 +1,22 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import DataSourcesList from './DataSourcesList'; +import { getMockDataSources } from './__mocks__/dataSourcesMocks'; +import { LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector'; + +const setup = () => { + const props = { + dataSources: getMockDataSources(3), + layoutMode: LayoutModes.Grid, + }; + + return shallow(); +}; + +describe('Render', () => { + it('should render component', () => { + const wrapper = setup(); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/datasources/DataSourcesList.tsx b/public/app/features/datasources/DataSourcesList.tsx new file mode 100644 index 00000000000..904ed0cf679 --- /dev/null +++ b/public/app/features/datasources/DataSourcesList.tsx @@ -0,0 +1,34 @@ +import React, { PureComponent } from 'react'; +import classNames from 'classnames/bind'; +import DataSourcesListItem from './DataSourcesListItem'; +import { DataSource } from 'app/types'; +import { LayoutMode, LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector'; + +export interface Props { + dataSources: DataSource[]; + layoutMode: LayoutMode; +} + +export class DataSourcesList extends PureComponent { + render() { + const { dataSources, layoutMode } = this.props; + + const listStyle = classNames({ + 'card-section': true, + 'card-list-layout-grid': layoutMode === LayoutModes.Grid, + 'card-list-layout-list': layoutMode === LayoutModes.List, + }); + + return ( +
+
    + {dataSources.map((dataSource, index) => { + return ; + })} +
+
+ ); + } +} + +export default DataSourcesList; diff --git a/public/app/features/datasources/DataSourcesListItem.test.tsx b/public/app/features/datasources/DataSourcesListItem.test.tsx new file mode 100644 index 00000000000..138c71cb46a --- /dev/null +++ b/public/app/features/datasources/DataSourcesListItem.test.tsx @@ -0,0 +1,20 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import DataSourcesListItem from './DataSourcesListItem'; +import { getMockDataSource } from './__mocks__/dataSourcesMocks'; + +const setup = () => { + const props = { + dataSource: getMockDataSource(), + }; + + return shallow(); +}; + +describe('Render', () => { + it('should render component', () => { + const wrapper = setup(); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/datasources/DataSourcesListItem.tsx b/public/app/features/datasources/DataSourcesListItem.tsx new file mode 100644 index 00000000000..a4fedb893fb --- /dev/null +++ b/public/app/features/datasources/DataSourcesListItem.tsx @@ -0,0 +1,35 @@ +import React, { PureComponent } from 'react'; +import { DataSource } from 'app/types'; + +export interface Props { + dataSource: DataSource; +} + +export class DataSourcesListItem extends PureComponent { + render() { + const { dataSource } = this.props; + return ( +
  • + +
    +
    {dataSource.type}
    +
    +
    +
    + +
    +
    +
    + {dataSource.name} + {dataSource.isDefault && default} +
    +
    {dataSource.url}
    +
    +
    +
    +
  • + ); + } +} + +export default DataSourcesListItem; diff --git a/public/app/features/datasources/DataSourcesListPage.test.tsx b/public/app/features/datasources/DataSourcesListPage.test.tsx new file mode 100644 index 00000000000..96f6c304b16 --- /dev/null +++ b/public/app/features/datasources/DataSourcesListPage.test.tsx @@ -0,0 +1,40 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import { DataSourcesListPage, Props } from './DataSourcesListPage'; +import { DataSource, NavModel } from 'app/types'; +import { LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector'; +import { getMockDataSources } from './__mocks__/dataSourcesMocks'; + +const setup = (propOverrides?: object) => { + const props: Props = { + dataSources: [] as DataSource[], + layoutMode: LayoutModes.Grid, + loadDataSources: jest.fn(), + navModel: {} as NavModel, + dataSourcesCount: 0, + searchQuery: '', + setDataSourcesSearchQuery: jest.fn(), + setDataSourcesLayoutMode: jest.fn(), + }; + + Object.assign(props, propOverrides); + + return shallow(); +}; + +describe('Render', () => { + it('should render component', () => { + const wrapper = setup(); + + expect(wrapper).toMatchSnapshot(); + }); + + it('should render action bar and datasources', () => { + const wrapper = setup({ + dataSources: getMockDataSources(5), + dataSourcesCount: 5, + }); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/datasources/DataSourcesListPage.tsx b/public/app/features/datasources/DataSourcesListPage.tsx new file mode 100644 index 00000000000..a5887973a6b --- /dev/null +++ b/public/app/features/datasources/DataSourcesListPage.tsx @@ -0,0 +1,107 @@ +import React, { PureComponent } from 'react'; +import { connect } from 'react-redux'; +import { hot } from 'react-hot-loader'; +import PageHeader from '../../core/components/PageHeader/PageHeader'; +import OrgActionBar from '../../core/components/OrgActionBar/OrgActionBar'; +import EmptyListCTA from '../../core/components/EmptyListCTA/EmptyListCTA'; +import DataSourcesList from './DataSourcesList'; +import { DataSource, NavModel } from 'app/types'; +import { LayoutMode } from '../../core/components/LayoutSelector/LayoutSelector'; +import { loadDataSources, setDataSourcesLayoutMode, setDataSourcesSearchQuery } from './state/actions'; +import { getNavModel } from '../../core/selectors/navModel'; +import { + getDataSources, + getDataSourcesCount, + getDataSourcesLayoutMode, + getDataSourcesSearchQuery, +} from './state/selectors'; + +export interface Props { + navModel: NavModel; + dataSources: DataSource[]; + dataSourcesCount: number; + layoutMode: LayoutMode; + searchQuery: string; + loadDataSources: typeof loadDataSources; + setDataSourcesLayoutMode: typeof setDataSourcesLayoutMode; + setDataSourcesSearchQuery: typeof setDataSourcesSearchQuery; +} + +const emptyListModel = { + title: 'There are no data sources defined yet', + buttonIcon: 'gicon gicon-add-datasources', + buttonLink: 'datasources/new', + buttonTitle: 'Add data source', + proTip: 'You can also define data sources through configuration files.', + proTipLink: 'http://docs.grafana.org/administration/provisioning/#datasources?utm_source=grafana_ds_list', + proTipLinkTitle: 'Learn more', + proTipTarget: '_blank', +}; + +export class DataSourcesListPage extends PureComponent { + componentDidMount() { + this.fetchDataSources(); + } + + async fetchDataSources() { + return await this.props.loadDataSources(); + } + + render() { + const { + dataSources, + dataSourcesCount, + navModel, + layoutMode, + searchQuery, + setDataSourcesSearchQuery, + setDataSourcesLayoutMode, + } = this.props; + + const linkButton = { + href: 'datasources/new', + title: 'Add data source', + }; + + return ( +
    + +
    + {dataSourcesCount === 0 ? ( + + ) : ( + [ + setDataSourcesLayoutMode(mode)} + setSearchQuery={query => setDataSourcesSearchQuery(query)} + linkButton={linkButton} + key="action-bar" + />, + , + ] + )} +
    +
    + ); + } +} + +function mapStateToProps(state) { + return { + navModel: getNavModel(state.navIndex, 'datasources'), + dataSources: getDataSources(state.dataSources), + layoutMode: getDataSourcesLayoutMode(state.dataSources), + dataSourcesCount: getDataSourcesCount(state.dataSources), + searchQuery: getDataSourcesSearchQuery(state.dataSources), + }; +} + +const mapDispatchToProps = { + loadDataSources, + setDataSourcesSearchQuery, + setDataSourcesLayoutMode, +}; + +export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(DataSourcesListPage)); diff --git a/public/app/features/datasources/__mocks__/dataSourcesMocks.ts b/public/app/features/datasources/__mocks__/dataSourcesMocks.ts new file mode 100644 index 00000000000..97819a18c82 --- /dev/null +++ b/public/app/features/datasources/__mocks__/dataSourcesMocks.ts @@ -0,0 +1,45 @@ +import { DataSource } from 'app/types'; + +export const getMockDataSources = (amount: number): DataSource[] => { + const dataSources = []; + + for (let i = 0; i <= amount; i++) { + dataSources.push({ + access: '', + basicAuth: false, + database: `database-${i}`, + id: i, + isDefault: false, + jsonData: { authType: 'credentials', defaultRegion: 'eu-west-2' }, + name: `dataSource-${i}`, + orgId: 1, + password: '', + readOnly: false, + type: 'cloudwatch', + typeLogoUrl: 'public/app/plugins/datasource/cloudwatch/img/amazon-web-services.png', + url: '', + user: '', + }); + } + + return dataSources; +}; + +export const getMockDataSource = (): DataSource => { + return { + access: '', + basicAuth: false, + database: '', + id: 13, + isDefault: false, + jsonData: { authType: 'credentials', defaultRegion: 'eu-west-2' }, + name: 'gdev-cloudwatch', + orgId: 1, + password: '', + readOnly: false, + type: 'cloudwatch', + typeLogoUrl: 'public/app/plugins/datasource/cloudwatch/img/amazon-web-services.png', + url: '', + user: '', + }; +}; diff --git a/public/app/features/datasources/__snapshots__/DataSourceList.test.tsx.snap b/public/app/features/datasources/__snapshots__/DataSourceList.test.tsx.snap new file mode 100644 index 00000000000..7167f59b048 --- /dev/null +++ b/public/app/features/datasources/__snapshots__/DataSourceList.test.tsx.snap @@ -0,0 +1,108 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render component 1`] = ` +
    +
      + + + + +
    +
    +`; diff --git a/public/app/features/datasources/__snapshots__/DataSourcesListItem.test.tsx.snap b/public/app/features/datasources/__snapshots__/DataSourcesListItem.test.tsx.snap new file mode 100644 index 00000000000..a424276cf32 --- /dev/null +++ b/public/app/features/datasources/__snapshots__/DataSourcesListItem.test.tsx.snap @@ -0,0 +1,45 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render component 1`] = ` +
  • + +
    +
    + cloudwatch +
    +
    +
  • +`; diff --git a/public/app/features/datasources/__snapshots__/DataSourcesListPage.test.tsx.snap b/public/app/features/datasources/__snapshots__/DataSourcesListPage.test.tsx.snap new file mode 100644 index 00000000000..3f9dbab72ab --- /dev/null +++ b/public/app/features/datasources/__snapshots__/DataSourcesListPage.test.tsx.snap @@ -0,0 +1,174 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render action bar and datasources 1`] = ` +
    + +
    + + +
    +
    +`; + +exports[`Render should render component 1`] = ` +
    + +
    + +
    +
    +`; diff --git a/public/app/features/datasources/state/actions.ts b/public/app/features/datasources/state/actions.ts new file mode 100644 index 00000000000..297797f2e59 --- /dev/null +++ b/public/app/features/datasources/state/actions.ts @@ -0,0 +1,51 @@ +import { ThunkAction } from 'redux-thunk'; +import { DataSource, StoreState } from 'app/types'; +import { getBackendSrv } from '../../../core/services/backend_srv'; +import { LayoutMode } from '../../../core/components/LayoutSelector/LayoutSelector'; + +export enum ActionTypes { + LoadDataSources = 'LOAD_DATA_SOURCES', + SetDataSourcesSearchQuery = 'SET_DATA_SOURCES_SEARCH_QUERY', + SetDataSourcesLayoutMode = 'SET_DATA_SOURCES_LAYOUT_MODE', +} + +export interface LoadDataSourcesAction { + type: ActionTypes.LoadDataSources; + payload: DataSource[]; +} + +export interface SetDataSourcesSearchQueryAction { + type: ActionTypes.SetDataSourcesSearchQuery; + payload: string; +} + +export interface SetDataSourcesLayoutModeAction { + type: ActionTypes.SetDataSourcesLayoutMode; + payload: LayoutMode; +} + +const dataSourcesLoaded = (dataSources: DataSource[]): LoadDataSourcesAction => ({ + type: ActionTypes.LoadDataSources, + payload: dataSources, +}); + +export const setDataSourcesSearchQuery = (searchQuery: string): SetDataSourcesSearchQueryAction => ({ + type: ActionTypes.SetDataSourcesSearchQuery, + payload: searchQuery, +}); + +export const setDataSourcesLayoutMode = (layoutMode: LayoutMode): SetDataSourcesLayoutModeAction => ({ + type: ActionTypes.SetDataSourcesLayoutMode, + payload: layoutMode, +}); + +export type Action = LoadDataSourcesAction | SetDataSourcesSearchQueryAction | SetDataSourcesLayoutModeAction; + +type ThunkResult = ThunkAction; + +export function loadDataSources(): ThunkResult { + return async dispatch => { + const response = await getBackendSrv().get('/api/datasources'); + dispatch(dataSourcesLoaded(response)); + }; +} diff --git a/public/app/features/datasources/state/reducers.ts b/public/app/features/datasources/state/reducers.ts new file mode 100644 index 00000000000..d57b0ad523a --- /dev/null +++ b/public/app/features/datasources/state/reducers.ts @@ -0,0 +1,29 @@ +import { DataSource, DataSourcesState } from 'app/types'; +import { Action, ActionTypes } from './actions'; +import { LayoutModes } from '../../../core/components/LayoutSelector/LayoutSelector'; + +const initialState: DataSourcesState = { + dataSources: [] as DataSource[], + layoutMode: LayoutModes.Grid, + searchQuery: '', + dataSourcesCount: 0, +}; + +export const dataSourcesReducer = (state = initialState, action: Action): DataSourcesState => { + switch (action.type) { + case ActionTypes.LoadDataSources: + return { ...state, dataSources: action.payload, dataSourcesCount: action.payload.length }; + + case ActionTypes.SetDataSourcesSearchQuery: + return { ...state, searchQuery: action.payload }; + + case ActionTypes.SetDataSourcesLayoutMode: + return { ...state, layoutMode: action.payload }; + } + + return state; +}; + +export default { + dataSources: dataSourcesReducer, +}; diff --git a/public/app/features/datasources/state/selectors.ts b/public/app/features/datasources/state/selectors.ts new file mode 100644 index 00000000000..6df08f68037 --- /dev/null +++ b/public/app/features/datasources/state/selectors.ts @@ -0,0 +1,11 @@ +export const getDataSources = state => { + const regex = new RegExp(state.searchQuery, 'i'); + + return state.dataSources.filter(dataSource => { + return regex.test(dataSource.name) || regex.test(dataSource.database); + }); +}; + +export const getDataSourcesSearchQuery = state => state.searchQuery; +export const getDataSourcesLayoutMode = state => state.layoutMode; +export const getDataSourcesCount = state => state.dataSourcesCount; diff --git a/public/app/containers/Explore/ElapsedTime.tsx b/public/app/features/explore/ElapsedTime.tsx similarity index 100% rename from public/app/containers/Explore/ElapsedTime.tsx rename to public/app/features/explore/ElapsedTime.tsx diff --git a/public/app/containers/Explore/Explore.tsx b/public/app/features/explore/Explore.tsx similarity index 86% rename from public/app/containers/Explore/Explore.tsx rename to public/app/features/explore/Explore.tsx index 16175747a06..502ad65b353 100644 --- a/public/app/containers/Explore/Explore.tsx +++ b/public/app/features/explore/Explore.tsx @@ -2,19 +2,20 @@ import React from 'react'; import { hot } from 'react-hot-loader'; import Select from 'react-select'; +import { ExploreState, ExploreUrlState } from 'app/types/explore'; import kbn from 'app/core/utils/kbn'; import colors from 'app/core/utils/colors'; import store from 'app/core/store'; import TimeSeries from 'app/core/time_series2'; -import { decodePathComponent } from 'app/core/utils/location_util'; import { parse as parseDate } from 'app/core/utils/datemath'; +import { DEFAULT_RANGE } from 'app/core/utils/explore'; import ElapsedTime from './ElapsedTime'; import QueryRows from './QueryRows'; import Graph from './Graph'; import Logs from './Logs'; import Table from './Table'; -import TimePicker, { DEFAULT_RANGE } from './TimePicker'; +import TimePicker from './TimePicker'; import { ensureQueries, generateQueryKey, hasQuery } from './utils/query'; const MAX_HISTORY_ITEMS = 100; @@ -47,61 +48,32 @@ function makeTimeSeriesList(dataList, options) { }); } -function parseUrlState(initial: string | undefined) { - if (initial) { - try { - const parsed = JSON.parse(decodePathComponent(initial)); - return { - datasource: parsed.datasource, - queries: parsed.queries.map(q => q.query), - range: parsed.range, - }; - } catch (e) { - console.error(e); - } - } - return { datasource: null, queries: [], range: DEFAULT_RANGE }; +interface ExploreProps { + datasourceSrv: any; + onChangeSplit: (split: boolean, state?: ExploreState) => void; + onSaveState: (key: string, state: ExploreState) => void; + position: string; + split: boolean; + splitState?: ExploreState; + stateKey: string; + urlState: ExploreUrlState; } -interface ExploreState { - datasource: any; - datasourceError: any; - datasourceLoading: boolean | null; - datasourceMissing: boolean; - graphResult: any; - history: any[]; - initialDatasource?: string; - latency: number; - loading: any; - logsResult: any; - queries: any[]; - queryErrors: any[]; - queryHints: any[]; - range: any; - requestOptions: any; - showingGraph: boolean; - showingLogs: boolean; - showingTable: boolean; - supportsGraph: boolean | null; - supportsLogs: boolean | null; - supportsTable: boolean | null; - tableResult: any; -} - -export class Explore extends React.Component { +export class Explore extends React.PureComponent { el: any; constructor(props) { super(props); - const initialState: ExploreState = props.initialState; - const { datasource, queries, range } = parseUrlState(props.routeParams.state); + // Split state overrides everything + const splitState: ExploreState = props.splitState; + const { datasource, queries, range } = props.urlState; this.state = { datasource: null, datasourceError: null, datasourceLoading: null, datasourceMissing: false, + datasourceName: datasource, graphResult: null, - initialDatasource: datasource, history: [], latency: 0, loading: false, @@ -118,13 +90,13 @@ export class Explore extends React.Component { supportsLogs: null, supportsTable: null, tableResult: null, - ...initialState, + ...splitState, }; } async componentDidMount() { const { datasourceSrv } = this.props; - const { initialDatasource } = this.state; + const { datasourceName } = this.state; if (!datasourceSrv) { throw new Error('No datasource service passed as props.'); } @@ -133,15 +105,15 @@ export class Explore extends React.Component { this.setState({ datasourceLoading: true }); // Priority: datasource in url, default datasource, first explore datasource let datasource; - if (initialDatasource) { - datasource = await datasourceSrv.get(initialDatasource); + if (datasourceName) { + datasource = await datasourceSrv.get(datasourceName); } else { datasource = await datasourceSrv.get(); } if (!datasource.meta.explore) { datasource = await datasourceSrv.get(datasources[0].name); } - this.setDatasource(datasource); + await this.setDatasource(datasource); } else { this.setState({ datasourceMissing: true }); } @@ -188,9 +160,14 @@ export class Explore extends React.Component { supportsLogs, supportsTable, datasourceLoading: false, + datasourceName: datasource.name, queries: nextQueries, }, - () => datasourceError === null && this.onSubmit() + () => { + if (datasourceError === null) { + this.onSubmit(); + } + } ); } @@ -220,7 +197,8 @@ export class Explore extends React.Component { queryHints: [], tableResult: null, }); - const datasource = await this.props.datasourceSrv.get(option.value); + const datasourceName = option.value; + const datasource = await this.props.datasourceSrv.get(datasourceName); this.setDatasource(datasource); }; @@ -259,21 +237,25 @@ export class Explore extends React.Component { }; onClickClear = () => { - this.setState({ - graphResult: null, - logsResult: null, - latency: 0, - queries: ensureQueries(), - queryErrors: [], - queryHints: [], - tableResult: null, - }); + this.setState( + { + graphResult: null, + logsResult: null, + latency: 0, + queries: ensureQueries(), + queryErrors: [], + queryHints: [], + tableResult: null, + }, + this.saveState + ); }; onClickCloseSplit = () => { const { onChangeSplit } = this.props; if (onChangeSplit) { onChangeSplit(false); + this.saveState(); } }; @@ -291,6 +273,7 @@ export class Explore extends React.Component { state.queries = state.queries.map(({ edited, ...rest }) => rest); if (onChangeSplit) { onChangeSplit(true, state); + this.saveState(); } }; @@ -349,6 +332,7 @@ export class Explore extends React.Component { if (showingLogs && supportsLogs) { this.runLogsQuery(); } + this.saveState(); }; onQuerySuccess(datasourceId: string, queries: any[]): void { @@ -471,6 +455,11 @@ export class Explore extends React.Component { return datasource.metadataRequest(url); }; + saveState = () => { + const { stateKey, onSaveState } = this.props; + onSaveState(stateKey, this.state); + }; + render() { const { datasourceSrv, position, split } = this.props; const { @@ -528,10 +517,11 @@ export class Explore extends React.Component { {!datasourceMissing ? (
    -
    -
    - Role - - - -
    -
    - -
    -
    - - -

    Existing Keys

    - - - - - - - - - - - - - - - -
    NameRole
    {{t.name}}{{t.role}} - - - -
    -
    - - - diff --git a/public/app/features/org/partials/orgUsers.html b/public/app/features/org/partials/orgUsers.html deleted file mode 100644 index 697879c6ac2..00000000000 --- a/public/app/features/org/partials/orgUsers.html +++ /dev/null @@ -1,96 +0,0 @@ - - -
    -
    - - -
    - - - - - - Invite - - - - - {{ctrl.externalUserMngLinkName}} - -
    - -
    - -
    - -
    - - - - - - - - - - - - - - - - - - - -
    LoginEmail - Seen - Time since user was seen using Grafana - Role
    - - {{user.login}}{{user.email}}{{user.lastSeenAtAge}} -
    - -
    -
    - - - -
    -
    - -
    - - - - - - - - - - - - - - - -
    EmailName
    {{invite.email}}{{invite.name}} - -   - - -
    -
    -
    - diff --git a/public/app/features/panel/metrics_panel_ctrl.ts b/public/app/features/panel/metrics_panel_ctrl.ts index c7468216881..f7a3e22a134 100644 --- a/public/app/features/panel/metrics_panel_ctrl.ts +++ b/public/app/features/panel/metrics_panel_ctrl.ts @@ -6,7 +6,7 @@ import kbn from 'app/core/utils/kbn'; import { PanelCtrl } from 'app/features/panel/panel_ctrl'; import * as rangeUtil from 'app/core/utils/rangeutil'; import * as dateMath from 'app/core/utils/datemath'; -import { encodePathComponent } from 'app/core/utils/location_util'; +import { getExploreUrl } from 'app/core/utils/explore'; class MetricsPanelCtrl extends PanelCtrl { scope: any; @@ -308,7 +308,12 @@ class MetricsPanelCtrl extends PanelCtrl { getAdditionalMenuItems() { const items = []; - if (config.exploreEnabled && this.contextSrv.isEditor && this.datasource && this.datasource.supportsExplore) { + if ( + config.exploreEnabled && + this.contextSrv.isEditor && + this.datasource && + (this.datasource.meta.explore || this.datasource.meta.id === 'mixed') + ) { items.push({ text: 'Explore', click: 'ctrl.explore();', @@ -319,14 +324,11 @@ class MetricsPanelCtrl extends PanelCtrl { return items; } - explore() { - const range = this.timeSrv.timeRangeForUrl(); - const state = { - ...this.datasource.getExploreState(this.panel), - range, - }; - const exploreState = encodePathComponent(JSON.stringify(state)); - this.$location.url(`/explore?state=${exploreState}`); + async explore() { + const url = await getExploreUrl(this.panel, this.panel.targets, this.datasource, this.datasourceSrv, this.timeSrv); + if (url) { + this.$timeout(() => this.$location.url(url)); + } } addQuery(target) { diff --git a/public/app/features/panel/specs/metrics_panel_ctrl.test.ts b/public/app/features/panel/specs/metrics_panel_ctrl.test.ts index a28bf92e63b..913a2461fd0 100644 --- a/public/app/features/panel/specs/metrics_panel_ctrl.test.ts +++ b/public/app/features/panel/specs/metrics_panel_ctrl.test.ts @@ -38,7 +38,7 @@ describe('MetricsPanelCtrl', () => { describe('and has datasource set that supports explore and user has powers', () => { beforeEach(() => { ctrl.contextSrv = { isEditor: true }; - ctrl.datasource = { supportsExplore: true }; + ctrl.datasource = { meta: { explore: true } }; additionalItems = ctrl.getAdditionalMenuItems(); }); diff --git a/public/app/features/plugins/PluginList.test.tsx b/public/app/features/plugins/PluginList.test.tsx new file mode 100644 index 00000000000..201dd69b9db --- /dev/null +++ b/public/app/features/plugins/PluginList.test.tsx @@ -0,0 +1,25 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import PluginList from './PluginList'; +import { getMockPlugins } from './__mocks__/pluginMocks'; +import { LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector'; + +const setup = (propOverrides?: object) => { + const props = Object.assign( + { + plugins: getMockPlugins(5), + layoutMode: LayoutModes.Grid, + }, + propOverrides + ); + + return shallow(); +}; + +describe('Render', () => { + it('should render component', () => { + const wrapper = setup(); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/plugins/PluginList.tsx b/public/app/features/plugins/PluginList.tsx new file mode 100644 index 00000000000..0074839e754 --- /dev/null +++ b/public/app/features/plugins/PluginList.tsx @@ -0,0 +1,32 @@ +import React, { SFC } from 'react'; +import classNames from 'classnames/bind'; +import PluginListItem from './PluginListItem'; +import { Plugin } from 'app/types'; +import { LayoutMode, LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector'; + +interface Props { + plugins: Plugin[]; + layoutMode: LayoutMode; +} + +const PluginList: SFC = props => { + const { plugins, layoutMode } = props; + + const listStyle = classNames({ + 'card-section': true, + 'card-list-layout-grid': layoutMode === LayoutModes.Grid, + 'card-list-layout-list': layoutMode === LayoutModes.List, + }); + + return ( +
    +
      + {plugins.map((plugin, index) => { + return ; + })} +
    +
    + ); +}; + +export default PluginList; diff --git a/public/app/features/plugins/PluginListItem.test.tsx b/public/app/features/plugins/PluginListItem.test.tsx new file mode 100644 index 00000000000..175911c5e05 --- /dev/null +++ b/public/app/features/plugins/PluginListItem.test.tsx @@ -0,0 +1,33 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import PluginListItem from './PluginListItem'; +import { getMockPlugin } from './__mocks__/pluginMocks'; + +const setup = (propOverrides?: object) => { + const props = Object.assign( + { + plugin: getMockPlugin(), + }, + propOverrides + ); + + return shallow(); +}; + +describe('Render', () => { + it('should render component', () => { + const wrapper = setup(); + + expect(wrapper).toMatchSnapshot(); + }); + + it('should render has plugin section', () => { + const mockPlugin = getMockPlugin(); + mockPlugin.hasUpdate = true; + const wrapper = setup({ + plugin: mockPlugin, + }); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/plugins/PluginListItem.tsx b/public/app/features/plugins/PluginListItem.tsx new file mode 100644 index 00000000000..05eac614fd5 --- /dev/null +++ b/public/app/features/plugins/PluginListItem.tsx @@ -0,0 +1,39 @@ +import React, { SFC } from 'react'; +import { Plugin } from 'app/types'; + +interface Props { + plugin: Plugin; +} + +const PluginListItem: SFC = props => { + const { plugin } = props; + + return ( +
  • + +
    +
    + + {plugin.type} +
    + {plugin.hasUpdate && ( +
    + Update available! +
    + )} +
    +
    +
    + +
    +
    +
    {plugin.name}
    +
    {`By ${plugin.info.author.name}`}
    +
    +
    +
    +
  • + ); +}; + +export default PluginListItem; diff --git a/public/app/features/plugins/PluginListPage.test.tsx b/public/app/features/plugins/PluginListPage.test.tsx new file mode 100644 index 00000000000..b173ef51a2a --- /dev/null +++ b/public/app/features/plugins/PluginListPage.test.tsx @@ -0,0 +1,35 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import { PluginListPage, Props } from './PluginListPage'; +import { NavModel, Plugin } from '../../types'; +import { LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector'; + +const setup = (propOverrides?: object) => { + const props: Props = { + navModel: {} as NavModel, + plugins: [] as Plugin[], + searchQuery: '', + setPluginsSearchQuery: jest.fn(), + setPluginsLayoutMode: jest.fn(), + layoutMode: LayoutModes.Grid, + loadPlugins: jest.fn(), + }; + + Object.assign(props, propOverrides); + + const wrapper = shallow(); + const instance = wrapper.instance() as PluginListPage; + + return { + wrapper, + instance, + }; +}; + +describe('Render', () => { + it('should render component', () => { + const { wrapper } = setup(); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/plugins/PluginListPage.tsx b/public/app/features/plugins/PluginListPage.tsx new file mode 100644 index 00000000000..d654ebd7cff --- /dev/null +++ b/public/app/features/plugins/PluginListPage.tsx @@ -0,0 +1,72 @@ +import React, { PureComponent } from 'react'; +import { hot } from 'react-hot-loader'; +import { connect } from 'react-redux'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import OrgActionBar from 'app/core/components/OrgActionBar/OrgActionBar'; +import PluginList from './PluginList'; +import { NavModel, Plugin } from 'app/types'; +import { loadPlugins, setPluginsLayoutMode, setPluginsSearchQuery } from './state/actions'; +import { getNavModel } from '../../core/selectors/navModel'; +import { getLayoutMode, getPlugins, getPluginsSearchQuery } from './state/selectors'; +import { LayoutMode } from '../../core/components/LayoutSelector/LayoutSelector'; + +export interface Props { + navModel: NavModel; + plugins: Plugin[]; + layoutMode: LayoutMode; + searchQuery: string; + loadPlugins: typeof loadPlugins; + setPluginsLayoutMode: typeof setPluginsLayoutMode; + setPluginsSearchQuery: typeof setPluginsSearchQuery; +} + +export class PluginListPage extends PureComponent { + componentDidMount() { + this.fetchPlugins(); + } + + async fetchPlugins() { + await this.props.loadPlugins(); + } + + render() { + const { navModel, plugins, layoutMode, setPluginsLayoutMode, setPluginsSearchQuery, searchQuery } = this.props; + + const linkButton = { + href: 'https://grafana.com/plugins?utm_source=grafana_plugin_list', + title: 'Find more plugins on Grafana.com', + }; + return ( +
    + +
    + setPluginsLayoutMode(mode)} + setSearchQuery={query => setPluginsSearchQuery(query)} + linkButton={linkButton} + /> + {plugins && } +
    +
    + ); + } +} + +function mapStateToProps(state) { + return { + navModel: getNavModel(state.navIndex, 'plugins'), + plugins: getPlugins(state.plugins), + layoutMode: getLayoutMode(state.plugins), + searchQuery: getPluginsSearchQuery(state.plugins), + }; +} + +const mapDispatchToProps = { + loadPlugins, + setPluginsLayoutMode, + setPluginsSearchQuery, +}; + +export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(PluginListPage)); diff --git a/public/app/features/plugins/__mocks__/pluginMocks.ts b/public/app/features/plugins/__mocks__/pluginMocks.ts new file mode 100644 index 00000000000..d8dd67d5b61 --- /dev/null +++ b/public/app/features/plugins/__mocks__/pluginMocks.ts @@ -0,0 +1,59 @@ +import { Plugin } from 'app/types'; + +export const getMockPlugins = (amount: number): Plugin[] => { + const plugins = []; + + for (let i = 0; i <= amount; i++) { + plugins.push({ + defaultNavUrl: 'some/url', + enabled: false, + hasUpdate: false, + id: `${i}`, + info: { + author: { + name: 'Grafana Labs', + url: 'url/to/GrafanaLabs', + }, + description: 'pretty decent plugin', + links: ['one link'], + logos: { small: 'small/logo', large: 'large/logo' }, + screenshots: `screenshot/${i}`, + updated: '2018-09-26', + version: '1', + }, + latestVersion: `1.${i}`, + name: `pretty cool plugin-${i}`, + pinned: false, + state: '', + type: '', + }); + } + + return plugins; +}; + +export const getMockPlugin = () => { + return { + defaultNavUrl: 'some/url', + enabled: false, + hasUpdate: false, + id: '1', + info: { + author: { + name: 'Grafana Labs', + url: 'url/to/GrafanaLabs', + }, + description: 'pretty decent plugin', + links: ['one link'], + logos: { small: 'small/logo', large: 'large/logo' }, + screenshots: 'screenshot/1', + updated: '2018-09-26', + version: '1', + }, + latestVersion: '1', + name: 'pretty cool plugin 1', + pinned: false, + state: '', + type: '', + }; +}; diff --git a/public/app/features/plugins/__snapshots__/PluginList.test.tsx.snap b/public/app/features/plugins/__snapshots__/PluginList.test.tsx.snap new file mode 100644 index 00000000000..176304b7b11 --- /dev/null +++ b/public/app/features/plugins/__snapshots__/PluginList.test.tsx.snap @@ -0,0 +1,210 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render component 1`] = ` +
    +
      + + + + + + +
    +
    +`; diff --git a/public/app/features/plugins/__snapshots__/PluginListItem.test.tsx.snap b/public/app/features/plugins/__snapshots__/PluginListItem.test.tsx.snap new file mode 100644 index 00000000000..fc0cc68c522 --- /dev/null +++ b/public/app/features/plugins/__snapshots__/PluginListItem.test.tsx.snap @@ -0,0 +1,106 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render component 1`] = ` +
  • + +
    +
    + +
    +
    +
    +
    + +
    +
    +
    + pretty cool plugin 1 +
    +
    + By Grafana Labs +
    +
    +
    +
    +
  • +`; + +exports[`Render should render has plugin section 1`] = ` +
  • + +
    +
    + +
    +
    + + Update available! + +
    +
    +
    +
    + +
    +
    +
    + pretty cool plugin 1 +
    +
    + By Grafana Labs +
    +
    +
    +
    +
  • +`; diff --git a/public/app/features/plugins/__snapshots__/PluginListPage.test.tsx.snap b/public/app/features/plugins/__snapshots__/PluginListPage.test.tsx.snap new file mode 100644 index 00000000000..43d9f45883d --- /dev/null +++ b/public/app/features/plugins/__snapshots__/PluginListPage.test.tsx.snap @@ -0,0 +1,29 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render component 1`] = ` +
    + +
    + + +
    +
    +`; diff --git a/public/app/features/plugins/all.ts b/public/app/features/plugins/all.ts index fd19ea963b6..d164a6d4255 100644 --- a/public/app/features/plugins/all.ts +++ b/public/app/features/plugins/all.ts @@ -1,9 +1,7 @@ import './plugin_edit_ctrl'; import './plugin_page_ctrl'; -import './plugin_list_ctrl'; import './import_list/import_list'; import './ds_edit_ctrl'; import './ds_dashboards_ctrl'; -import './ds_list_ctrl'; import './datasource_srv'; import './plugin_component'; diff --git a/public/app/features/plugins/built_in_plugins.ts b/public/app/features/plugins/built_in_plugins.ts index d5c05e3d0c3..b9779190a8b 100644 --- a/public/app/features/plugins/built_in_plugins.ts +++ b/public/app/features/plugins/built_in_plugins.ts @@ -11,6 +11,7 @@ import * as postgresPlugin from 'app/plugins/datasource/postgres/module'; import * as prometheusPlugin from 'app/plugins/datasource/prometheus/module'; import * as mssqlPlugin from 'app/plugins/datasource/mssql/module'; import * as testDataDSPlugin from 'app/plugins/datasource/testdata/module'; +import * as stackdriverPlugin from 'app/plugins/datasource/stackdriver/module'; import * as textPanel from 'app/plugins/panel/text/module'; import * as text2Panel from 'app/plugins/panel/text2/module'; @@ -38,6 +39,7 @@ const builtInPlugins = { 'app/plugins/datasource/mssql/module': mssqlPlugin, 'app/plugins/datasource/prometheus/module': prometheusPlugin, 'app/plugins/datasource/testdata/module': testDataDSPlugin, + 'app/plugins/datasource/stackdriver/module': stackdriverPlugin, 'app/plugins/panel/text/module': textPanel, 'app/plugins/panel/text2/module': text2Panel, diff --git a/public/app/features/plugins/ds_list_ctrl.ts b/public/app/features/plugins/ds_list_ctrl.ts deleted file mode 100644 index 71c1a516842..00000000000 --- a/public/app/features/plugins/ds_list_ctrl.ts +++ /dev/null @@ -1,61 +0,0 @@ -import coreModule from '../../core/core_module'; -import _ from 'lodash'; - -export class DataSourcesCtrl { - datasources: any; - unfiltered: any; - navModel: any; - searchQuery: string; - - /** @ngInject */ - constructor(private $scope, private backendSrv, private datasourceSrv, private navModelSrv) { - this.navModel = this.navModelSrv.getNav('cfg', 'datasources', 0); - backendSrv.get('/api/datasources').then(result => { - this.datasources = result; - this.unfiltered = result; - }); - } - - onQueryUpdated() { - const regex = new RegExp(this.searchQuery, 'ig'); - this.datasources = _.filter(this.unfiltered, item => { - regex.lastIndex = 0; - return regex.test(item.name) || regex.test(item.type); - }); - } - - removeDataSourceConfirmed(ds) { - this.backendSrv - .delete('/api/datasources/' + ds.id) - .then( - () => { - this.$scope.appEvent('alert-success', ['Datasource deleted', '']); - }, - () => { - this.$scope.appEvent('alert-error', ['Unable to delete datasource', '']); - } - ) - .then(() => { - this.backendSrv.get('/api/datasources').then(result => { - this.datasources = result; - }); - this.backendSrv.get('/api/frontend/settings').then(settings => { - this.datasourceSrv.init(settings.datasources); - }); - }); - } - - removeDataSource(ds) { - this.$scope.appEvent('confirm-modal', { - title: 'Delete', - text: 'Are you sure you want to delete datasource ' + ds.name + '?', - yesText: 'Delete', - icon: 'fa-trash', - onConfirm: () => { - this.removeDataSourceConfirmed(ds); - }, - }); - } -} - -coreModule.controller('DataSourcesCtrl', DataSourcesCtrl); diff --git a/public/app/features/plugins/partials/ds_edit.html b/public/app/features/plugins/partials/ds_edit.html index f0bb8867f83..41605a99d02 100644 --- a/public/app/features/plugins/partials/ds_edit.html +++ b/public/app/features/plugins/partials/ds_edit.html @@ -31,11 +31,16 @@
    -
    +
    This plugin is marked as being in alpha state, which means it is in early development phase and updates will include breaking changes.
    +
    + This plugin is marked as being in a beta development state. This means it is in currently in active development and could be + missing important features. +
    + diff --git a/public/app/features/plugins/partials/ds_list.html b/public/app/features/plugins/partials/ds_list.html deleted file mode 100644 index fd537fc47d4..00000000000 --- a/public/app/features/plugins/partials/ds_list.html +++ /dev/null @@ -1,63 +0,0 @@ - - - diff --git a/public/app/features/plugins/partials/plugin_list.html b/public/app/features/plugins/partials/plugin_list.html deleted file mode 100644 index 04b5bf9c791..00000000000 --- a/public/app/features/plugins/partials/plugin_list.html +++ /dev/null @@ -1,45 +0,0 @@ - - - diff --git a/public/app/features/plugins/plugin_list_ctrl.ts b/public/app/features/plugins/plugin_list_ctrl.ts deleted file mode 100644 index 315252364cc..00000000000 --- a/public/app/features/plugins/plugin_list_ctrl.ts +++ /dev/null @@ -1,30 +0,0 @@ -import angular from 'angular'; -import _ from 'lodash'; - -export class PluginListCtrl { - plugins: any[]; - tabIndex: number; - navModel: any; - searchQuery: string; - allPlugins: any[]; - - /** @ngInject */ - constructor(private backendSrv: any, $location, navModelSrv) { - this.tabIndex = 0; - this.navModel = navModelSrv.getNav('cfg', 'plugins', 0); - - this.backendSrv.get('api/plugins', { embedded: 0 }).then(plugins => { - this.plugins = plugins; - this.allPlugins = plugins; - }); - } - - onQueryUpdated() { - const regex = new RegExp(this.searchQuery, 'ig'); - this.plugins = _.filter(this.allPlugins, item => { - return regex.test(item.name) || regex.test(item.type); - }); - } -} - -angular.module('grafana.controllers').controller('PluginListCtrl', PluginListCtrl); diff --git a/public/app/features/plugins/state/actions.ts b/public/app/features/plugins/state/actions.ts new file mode 100644 index 00000000000..dcfd510ffa0 --- /dev/null +++ b/public/app/features/plugins/state/actions.ts @@ -0,0 +1,51 @@ +import { Plugin, StoreState } from 'app/types'; +import { ThunkAction } from 'redux-thunk'; +import { getBackendSrv } from '../../../core/services/backend_srv'; +import { LayoutMode } from '../../../core/components/LayoutSelector/LayoutSelector'; + +export enum ActionTypes { + LoadPlugins = 'LOAD_PLUGINS', + SetPluginsSearchQuery = 'SET_PLUGIN_SEARCH_QUERY', + SetLayoutMode = 'SET_LAYOUT_MODE', +} + +export interface LoadPluginsAction { + type: ActionTypes.LoadPlugins; + payload: Plugin[]; +} + +export interface SetPluginsSearchQueryAction { + type: ActionTypes.SetPluginsSearchQuery; + payload: string; +} + +export interface SetLayoutModeAction { + type: ActionTypes.SetLayoutMode; + payload: LayoutMode; +} + +export const setPluginsLayoutMode = (mode: LayoutMode): SetLayoutModeAction => ({ + type: ActionTypes.SetLayoutMode, + payload: mode, +}); + +export const setPluginsSearchQuery = (query: string): SetPluginsSearchQueryAction => ({ + type: ActionTypes.SetPluginsSearchQuery, + payload: query, +}); + +const pluginsLoaded = (plugins: Plugin[]): LoadPluginsAction => ({ + type: ActionTypes.LoadPlugins, + payload: plugins, +}); + +export type Action = LoadPluginsAction | SetPluginsSearchQueryAction | SetLayoutModeAction; + +type ThunkResult = ThunkAction; + +export function loadPlugins(): ThunkResult { + return async dispatch => { + const result = await getBackendSrv().get('api/plugins', { embedded: 0 }); + dispatch(pluginsLoaded(result)); + }; +} diff --git a/public/app/features/plugins/state/reducers.ts b/public/app/features/plugins/state/reducers.ts new file mode 100644 index 00000000000..1ca2880282c --- /dev/null +++ b/public/app/features/plugins/state/reducers.ts @@ -0,0 +1,27 @@ +import { Action, ActionTypes } from './actions'; +import { Plugin, PluginsState } from 'app/types'; +import { LayoutModes } from '../../../core/components/LayoutSelector/LayoutSelector'; + +export const initialState: PluginsState = { + plugins: [] as Plugin[], + searchQuery: '', + layoutMode: LayoutModes.Grid, +}; + +export const pluginsReducer = (state = initialState, action: Action): PluginsState => { + switch (action.type) { + case ActionTypes.LoadPlugins: + return { ...state, plugins: action.payload }; + + case ActionTypes.SetPluginsSearchQuery: + return { ...state, searchQuery: action.payload }; + + case ActionTypes.SetLayoutMode: + return { ...state, layoutMode: action.payload }; + } + return state; +}; + +export default { + plugins: pluginsReducer, +}; diff --git a/public/app/features/plugins/state/selectors.test.ts b/public/app/features/plugins/state/selectors.test.ts new file mode 100644 index 00000000000..09b1ce4c259 --- /dev/null +++ b/public/app/features/plugins/state/selectors.test.ts @@ -0,0 +1,31 @@ +import { getPlugins, getPluginsSearchQuery } from './selectors'; +import { initialState } from './reducers'; +import { getMockPlugins } from '../__mocks__/pluginMocks'; + +describe('Selectors', () => { + const mockState = initialState; + + it('should return search query', () => { + mockState.searchQuery = 'test'; + const query = getPluginsSearchQuery(mockState); + + expect(query).toEqual(mockState.searchQuery); + }); + + it('should return plugins', () => { + mockState.plugins = getMockPlugins(5); + mockState.searchQuery = ''; + + const plugins = getPlugins(mockState); + + expect(plugins).toEqual(mockState.plugins); + }); + + it('should filter plugins', () => { + mockState.searchQuery = 'plugin-1'; + + const plugins = getPlugins(mockState); + + expect(plugins.length).toEqual(1); + }); +}); diff --git a/public/app/features/plugins/state/selectors.ts b/public/app/features/plugins/state/selectors.ts new file mode 100644 index 00000000000..e1d16462527 --- /dev/null +++ b/public/app/features/plugins/state/selectors.ts @@ -0,0 +1,10 @@ +export const getPlugins = state => { + const regex = new RegExp(state.searchQuery, 'i'); + + return state.plugins.filter(item => { + return regex.test(item.name) || regex.test(item.info.author.name) || regex.test(item.info.description); + }); +}; + +export const getPluginsSearchQuery = state => state.searchQuery; +export const getLayoutMode = state => state.layoutMode; diff --git a/public/app/features/teams/TeamMembers.tsx b/public/app/features/teams/TeamMembers.tsx index cda175f4395..588745eea37 100644 --- a/public/app/features/teams/TeamMembers.tsx +++ b/public/app/features/teams/TeamMembers.tsx @@ -1,10 +1,10 @@ import React, { PureComponent } from 'react'; import { connect } from 'react-redux'; import SlideDown from 'app/core/components/Animations/SlideDown'; -import { UserPicker, User } from 'app/core/components/Picker/UserPicker'; +import { UserPicker } from 'app/core/components/Picker/UserPicker'; import DeleteButton from 'app/core/components/DeleteButton/DeleteButton'; import { TagBadge } from 'app/core/components/TagFilter/TagBadge'; -import { TeamMember } from '../../types'; +import { TeamMember, User } from 'app/types'; import { loadTeamMembers, addTeamMember, removeTeamMember, setSearchMemberQuery } from './state/actions'; import { getSearchMemberQuery, getTeamMembers } from './state/selectors'; diff --git a/public/app/features/teams/__mocks__/teamMocks.ts b/public/app/features/teams/__mocks__/teamMocks.ts index 34fa06b2d09..339f227c081 100644 --- a/public/app/features/teams/__mocks__/teamMocks.ts +++ b/public/app/features/teams/__mocks__/teamMocks.ts @@ -1,4 +1,4 @@ -import { Team, TeamGroup, TeamMember } from '../../../types'; +import { Team, TeamGroup, TeamMember } from 'app/types'; export const getMultipleMockTeams = (numberOfTeams: number): Team[] => { const teams: Team[] = []; diff --git a/public/app/features/templating/TextBoxVariable.ts b/public/app/features/templating/TextBoxVariable.ts new file mode 100644 index 00000000000..331ff4f95b8 --- /dev/null +++ b/public/app/features/templating/TextBoxVariable.ts @@ -0,0 +1,58 @@ +import { Variable, assignModelProperties, variableTypes } from './variable'; + +export class TextBoxVariable implements Variable { + query: string; + current: any; + options: any[]; + skipUrlSync: boolean; + + defaults = { + type: 'textbox', + name: '', + hide: 2, + label: '', + query: '', + current: {}, + options: [], + skipUrlSync: false, + }; + + /** @ngInject */ + constructor(private model, private variableSrv) { + assignModelProperties(this, model, this.defaults); + } + + getSaveModel() { + assignModelProperties(this.model, this, this.defaults); + return this.model; + } + + setValue(option) { + this.variableSrv.setOptionAsCurrent(this, option); + } + + updateOptions() { + this.options = [{ text: this.query.trim(), value: this.query.trim() }]; + this.current = this.options[0]; + return Promise.resolve(); + } + + dependsOn(variable) { + return false; + } + + setValueFromUrl(urlValue) { + this.query = urlValue; + return this.variableSrv.setOptionFromUrl(this, urlValue); + } + + getValueForUrl() { + return this.current.value; + } +} + +variableTypes['textbox'] = { + name: 'Text box', + ctor: TextBoxVariable, + description: 'Define a textbox variable, where users can enter any arbitrary string', +}; diff --git a/public/app/features/templating/all.ts b/public/app/features/templating/all.ts index 16465740642..b872fa6cd4a 100644 --- a/public/app/features/templating/all.ts +++ b/public/app/features/templating/all.ts @@ -9,6 +9,7 @@ import { DatasourceVariable } from './datasource_variable'; import { CustomVariable } from './custom_variable'; import { ConstantVariable } from './constant_variable'; import { AdhocVariable } from './adhoc_variable'; +import { TextBoxVariable } from './TextBoxVariable'; coreModule.factory('templateSrv', () => { return templateSrv; @@ -22,4 +23,5 @@ export { CustomVariable, ConstantVariable, AdhocVariable, + TextBoxVariable, }; diff --git a/public/app/features/templating/partials/editor.html b/public/app/features/templating/partials/editor.html index 0d8b0ace327..ac4450c20a2 100644 --- a/public/app/features/templating/partials/editor.html +++ b/public/app/features/templating/partials/editor.html @@ -155,6 +155,14 @@
    +
    +
    Text options
    +
    + Default value + +
    +
    +
    Query Options
    diff --git a/public/app/features/users/InviteesTable.test.tsx b/public/app/features/users/InviteesTable.test.tsx new file mode 100644 index 00000000000..e40ad033c57 --- /dev/null +++ b/public/app/features/users/InviteesTable.test.tsx @@ -0,0 +1,32 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import InviteesTable, { Props } from './InviteesTable'; +import { Invitee } from 'app/types'; +import { getMockInvitees } from './__mocks__/userMocks'; + +const setup = (propOverrides?: object) => { + const props: Props = { + invitees: [] as Invitee[], + onRevokeInvite: jest.fn(), + }; + + Object.assign(props, propOverrides); + + return shallow(); +}; + +describe('Render', () => { + it('should render component', () => { + const wrapper = setup(); + + expect(wrapper).toMatchSnapshot(); + }); + + it('should render invitees', () => { + const wrapper = setup({ + invitees: getMockInvitees(5), + }); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/users/InviteesTable.tsx b/public/app/features/users/InviteesTable.tsx new file mode 100644 index 00000000000..2521fbd09e2 --- /dev/null +++ b/public/app/features/users/InviteesTable.tsx @@ -0,0 +1,64 @@ +import React, { createRef, PureComponent } from 'react'; +import { Invitee } from 'app/types'; + +export interface Props { + invitees: Invitee[]; + onRevokeInvite: (code: string) => void; +} + +export default class InviteesTable extends PureComponent { + private copyUrlRef = createRef(); + + copyToClipboard = () => { + const node = this.copyUrlRef.current; + + if (node) { + node.select(); + document.execCommand('copy'); + } + }; + + render() { + const { invitees, onRevokeInvite } = this.props; + + return ( + + + + + + + + + {invitees.map((invitee, index) => { + return ( + + + +
    EmailName + +
    {invitee.email}{invitee.name} +