diff --git a/pkg/api/dataproxy.go b/pkg/api/dataproxy.go index 965653c2501..da2fd3b4c0a 100644 --- a/pkg/api/dataproxy.go +++ b/pkg/api/dataproxy.go @@ -1,94 +1,13 @@ package api import ( - "bytes" - "io/ioutil" - "net" - "net/http" - "net/http/httputil" - "net/url" - "strings" - "time" - - "github.com/grafana/grafana/pkg/api/cloudwatch" + "github.com/grafana/grafana/pkg/api/pluginproxy" "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/setting" - "github.com/grafana/grafana/pkg/util" ) -var ( - dataproxyLogger log.Logger = log.New("data-proxy-log") -) - -func NewReverseProxy(ds *m.DataSource, proxyPath string, targetUrl *url.URL) *httputil.ReverseProxy { - director := func(req *http.Request) { - req.URL.Scheme = targetUrl.Scheme - req.URL.Host = targetUrl.Host - req.Host = targetUrl.Host - - reqQueryVals := req.URL.Query() - - if ds.Type == m.DS_INFLUXDB_08 { - req.URL.Path = util.JoinUrlFragments(targetUrl.Path, "db/"+ds.Database+"/"+proxyPath) - reqQueryVals.Add("u", ds.User) - reqQueryVals.Add("p", ds.Password) - req.URL.RawQuery = reqQueryVals.Encode() - } else if ds.Type == m.DS_INFLUXDB { - req.URL.Path = util.JoinUrlFragments(targetUrl.Path, proxyPath) - req.URL.RawQuery = reqQueryVals.Encode() - if !ds.BasicAuth { - req.Header.Del("Authorization") - req.Header.Add("Authorization", util.GetBasicAuthHeader(ds.User, ds.Password)) - } - } else { - req.URL.Path = util.JoinUrlFragments(targetUrl.Path, proxyPath) - } - - if ds.BasicAuth { - req.Header.Del("Authorization") - req.Header.Add("Authorization", util.GetBasicAuthHeader(ds.BasicAuthUser, ds.BasicAuthPassword)) - } - - dsAuth := req.Header.Get("X-DS-Authorization") - if len(dsAuth) > 0 { - req.Header.Del("X-DS-Authorization") - req.Header.Del("Authorization") - req.Header.Add("Authorization", dsAuth) - } - - // clear cookie headers - req.Header.Del("Cookie") - req.Header.Del("Set-Cookie") - - // clear X-Forwarded Host/Port/Proto headers - req.Header.Del("X-Forwarded-Host") - req.Header.Del("X-Forwarded-Port") - req.Header.Del("X-Forwarded-Proto") - - // set X-Forwarded-For header - if req.RemoteAddr != "" { - remoteAddr, _, err := net.SplitHostPort(req.RemoteAddr) - if err != nil { - remoteAddr = req.RemoteAddr - } - if req.Header.Get("X-Forwarded-For") != "" { - req.Header.Set("X-Forwarded-For", req.Header.Get("X-Forwarded-For")+", "+remoteAddr) - } else { - req.Header.Set("X-Forwarded-For", remoteAddr) - } - } - - // reqBytes, _ := httputil.DumpRequestOut(req, true); - // log.Trace("Proxying datasource request: %s", string(reqBytes)) - } - - return &httputil.ReverseProxy{Director: director, FlushInterval: time.Millisecond * 200} -} - func getDatasource(id int64, orgId int64) (*m.DataSource, error) { query := m.GetDataSourceByIdQuery{Id: id, OrgId: orgId} if err := bus.Dispatch(&query); err != nil { @@ -108,90 +27,7 @@ func ProxyDataSourceRequest(c *middleware.Context) { return } - if ds.Type == m.DS_INFLUXDB { - if c.Query("db") != ds.Database { - c.JsonApiErr(403, "Datasource is not configured to allow this database", nil) - return - } - } - - if ds.Type == m.DS_CLOUDWATCH { - cloudwatch.HandleRequest(c, ds) - return - } - - targetUrl, _ := url.Parse(ds.Url) - if !checkWhiteList(c, targetUrl.Host) { - return - } - proxyPath := c.Params("*") - - if ds.Type == m.DS_PROMETHEUS { - if c.Req.Request.Method != http.MethodGet || !strings.HasPrefix(proxyPath, "api/") { - c.JsonApiErr(403, "GET is only allowed on proxied Prometheus datasource", nil) - return - } - } - - if ds.Type == m.DS_ES { - if c.Req.Request.Method == "DELETE" { - c.JsonApiErr(403, "Deletes not allowed on proxied Elasticsearch datasource", nil) - return - } - if c.Req.Request.Method == "PUT" { - c.JsonApiErr(403, "Puts not allowed on proxied Elasticsearch datasource", nil) - return - } - if c.Req.Request.Method == "POST" && proxyPath != "_msearch" { - c.JsonApiErr(403, "Posts not allowed on proxied Elasticsearch datasource except on /_msearch", nil) - return - } - } - - proxy := NewReverseProxy(ds, proxyPath, targetUrl) - proxy.Transport, err = ds.GetHttpTransport() - if err != nil { - c.JsonApiErr(400, "Unable to load TLS certificate", err) - return - } - - logProxyRequest(ds.Type, c) - proxy.ServeHTTP(c.Resp, c.Req.Request) - c.Resp.Header().Del("Set-Cookie") -} - -func logProxyRequest(dataSourceType string, c *middleware.Context) { - if !setting.DataProxyLogging { - return - } - - var body string - if c.Req.Request.Body != nil { - buffer, err := ioutil.ReadAll(c.Req.Request.Body) - if err == nil { - c.Req.Request.Body = ioutil.NopCloser(bytes.NewBuffer(buffer)) - body = string(buffer) - } - } - - dataproxyLogger.Info("Proxying incoming request", - "userid", c.UserId, - "orgid", c.OrgId, - "username", c.Login, - "datasource", dataSourceType, - "uri", c.Req.RequestURI, - "method", c.Req.Request.Method, - "body", body) -} - -func checkWhiteList(c *middleware.Context, host string) bool { - if host != "" && len(setting.DataProxyWhiteList) > 0 { - if _, exists := setting.DataProxyWhiteList[host]; !exists { - c.JsonApiErr(403, "Data proxy hostname and ip are not included in whitelist", nil) - return false - } - } - - return true + proxy := pluginproxy.NewDataSourceProxy(ds, c, proxyPath) + proxy.HandleRequest() } diff --git a/pkg/api/pluginproxy/ds_proxy.go b/pkg/api/pluginproxy/ds_proxy.go new file mode 100644 index 00000000000..2e200d099b5 --- /dev/null +++ b/pkg/api/pluginproxy/ds_proxy.go @@ -0,0 +1,272 @@ +package pluginproxy + +import ( + "bytes" + "errors" + "fmt" + "html/template" + "io/ioutil" + "net" + "net/http" + "net/http/httputil" + "net/url" + "strings" + "time" + + "github.com/grafana/grafana/pkg/api/cloudwatch" + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/middleware" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/util" +) + +var ( + logger log.Logger = log.New("data-proxy-log") +) + +type DataSourceProxy struct { + ds *m.DataSource + ctx *middleware.Context + targetUrl *url.URL + proxyPath string + route *plugins.AppPluginRoute +} + +func NewDataSourceProxy(ds *m.DataSource, ctx *middleware.Context, proxyPath string) *DataSourceProxy { + return &DataSourceProxy{ + ds: ds, + ctx: ctx, + proxyPath: proxyPath, + } +} + +func (proxy *DataSourceProxy) HandleRequest() { + if proxy.ds.Type == m.DS_CLOUDWATCH { + cloudwatch.HandleRequest(proxy.ctx, proxy.ds) + return + } + + if err := proxy.validateRequest(); err != nil { + proxy.ctx.JsonApiErr(403, err.Error(), nil) + return + } + + reverseProxy := &httputil.ReverseProxy{ + Director: proxy.getDirector(), + FlushInterval: time.Millisecond * 200, + } + + var err error + reverseProxy.Transport, err = proxy.ds.GetHttpTransport() + if err != nil { + proxy.ctx.JsonApiErr(400, "Unable to load TLS certificate", err) + return + } + + proxy.logRequest() + + reverseProxy.ServeHTTP(proxy.ctx.Resp, proxy.ctx.Req.Request) + proxy.ctx.Resp.Header().Del("Set-Cookie") +} + +func (proxy *DataSourceProxy) getDirector() func(req *http.Request) { + return func(req *http.Request) { + req.URL.Scheme = proxy.targetUrl.Scheme + req.URL.Host = proxy.targetUrl.Host + req.Host = proxy.targetUrl.Host + + reqQueryVals := req.URL.Query() + + if proxy.ds.Type == m.DS_INFLUXDB_08 { + req.URL.Path = util.JoinUrlFragments(proxy.targetUrl.Path, "db/"+proxy.ds.Database+"/"+proxy.proxyPath) + reqQueryVals.Add("u", proxy.ds.User) + reqQueryVals.Add("p", proxy.ds.Password) + req.URL.RawQuery = reqQueryVals.Encode() + } else if proxy.ds.Type == m.DS_INFLUXDB { + req.URL.Path = util.JoinUrlFragments(proxy.targetUrl.Path, proxy.proxyPath) + req.URL.RawQuery = reqQueryVals.Encode() + if !proxy.ds.BasicAuth { + req.Header.Del("Authorization") + req.Header.Add("Authorization", util.GetBasicAuthHeader(proxy.ds.User, proxy.ds.Password)) + } + } else { + req.URL.Path = util.JoinUrlFragments(proxy.targetUrl.Path, proxy.proxyPath) + } + + if proxy.ds.BasicAuth { + req.Header.Del("Authorization") + req.Header.Add("Authorization", util.GetBasicAuthHeader(proxy.ds.BasicAuthUser, proxy.ds.BasicAuthPassword)) + } + + dsAuth := req.Header.Get("X-DS-Authorization") + if len(dsAuth) > 0 { + req.Header.Del("X-DS-Authorization") + req.Header.Del("Authorization") + req.Header.Add("Authorization", dsAuth) + } + + // clear cookie headers + req.Header.Del("Cookie") + req.Header.Del("Set-Cookie") + + // clear X-Forwarded Host/Port/Proto headers + req.Header.Del("X-Forwarded-Host") + req.Header.Del("X-Forwarded-Port") + req.Header.Del("X-Forwarded-Proto") + + // set X-Forwarded-For header + if req.RemoteAddr != "" { + remoteAddr, _, err := net.SplitHostPort(req.RemoteAddr) + if err != nil { + remoteAddr = req.RemoteAddr + } + if req.Header.Get("X-Forwarded-For") != "" { + req.Header.Set("X-Forwarded-For", req.Header.Get("X-Forwarded-For")+", "+remoteAddr) + } else { + req.Header.Set("X-Forwarded-For", remoteAddr) + } + } + + if proxy.route != nil { + proxy.applyRoute(req) + } + } +} + +func (proxy *DataSourceProxy) validateRequest() error { + if proxy.ds.Type == m.DS_INFLUXDB { + if proxy.ctx.Query("db") != proxy.ds.Database { + return errors.New("Datasource is not configured to allow this database") + } + } + + targetUrl, _ := url.Parse(proxy.ds.Url) + if !checkWhiteList(proxy.ctx, targetUrl.Host) { + return errors.New("Target url is not a valid target") + } + + if proxy.ds.Type == m.DS_PROMETHEUS { + if proxy.ctx.Req.Request.Method != http.MethodGet || !strings.HasPrefix(proxy.proxyPath, "api/") { + return errors.New("GET is only allowed on proxied Prometheus datasource") + } + } + + if proxy.ds.Type == m.DS_ES { + if proxy.ctx.Req.Request.Method == "DELETE" { + return errors.New("Deletes not allowed on proxied Elasticsearch datasource") + } + if proxy.ctx.Req.Request.Method == "PUT" { + return errors.New("Puts not allowed on proxied Elasticsearch datasource") + } + if proxy.ctx.Req.Request.Method == "POST" && proxy.proxyPath != "_msearch" { + return errors.New("Posts not allowed on proxied Elasticsearch datasource except on /_msearch") + } + } + + // found route if there are any + if plugin, ok := plugins.DataSources[proxy.ds.Type]; ok { + if len(plugin.Routes) > 0 { + for _, route := range plugin.Routes { + // method match + if route.Method != "*" && route.Method != proxy.ctx.Req.Method { + continue + } + + if strings.HasPrefix(proxy.proxyPath, route.Path) { + logger.Info("Apply Route Rule", "rule", route.Path) + proxy.proxyPath = strings.TrimPrefix(proxy.proxyPath, route.Path) + proxy.route = route + break + } + } + } + } + + proxy.targetUrl = targetUrl + return nil +} + +func (proxy *DataSourceProxy) logRequest() { + if !setting.DataProxyLogging { + return + } + + var body string + if proxy.ctx.Req.Request.Body != nil { + buffer, err := ioutil.ReadAll(proxy.ctx.Req.Request.Body) + if err == nil { + proxy.ctx.Req.Request.Body = ioutil.NopCloser(bytes.NewBuffer(buffer)) + body = string(buffer) + } + } + + logger.Info("Proxying incoming request", + "userid", proxy.ctx.UserId, + "orgid", proxy.ctx.OrgId, + "username", proxy.ctx.Login, + "datasource", proxy.ds.Type, + "uri", proxy.ctx.Req.RequestURI, + "method", proxy.ctx.Req.Request.Method, + "body", body) +} + +func checkWhiteList(c *middleware.Context, host string) bool { + if host != "" && len(setting.DataProxyWhiteList) > 0 { + if _, exists := setting.DataProxyWhiteList[host]; !exists { + c.JsonApiErr(403, "Data proxy hostname and ip are not included in whitelist", nil) + return false + } + } + + return true +} + +func (proxy *DataSourceProxy) applyRoute(req *http.Request) { + logger.Info("ApplyDataSourceRouteRules", "route", proxy.route.Path, "proxyPath", proxy.proxyPath) + + data := templateData{ + JsonData: proxy.ds.JsonData.Interface().(map[string]interface{}), + SecureJsonData: proxy.ds.SecureJsonData.Decrypt(), + } + + logger.Info("Apply Route Rule", "rule", proxy.route.Path) + + routeUrl, err := url.Parse(proxy.route.Url) + if err != nil { + logger.Error("Error parsing plugin route url") + return + } + + req.URL.Scheme = routeUrl.Scheme + req.URL.Host = routeUrl.Host + req.Host = routeUrl.Host + req.URL.Path = util.JoinUrlFragments(routeUrl.Path, proxy.proxyPath) + + if err := addHeaders(&req.Header, proxy.route, data); err != nil { + logger.Error("Failed to render plugin headers", "error", err) + } +} + +func addHeaders(reqHeaders *http.Header, route *plugins.AppPluginRoute, data templateData) error { + for _, header := range route.Headers { + var contentBuf bytes.Buffer + t, err := template.New("content").Parse(header.Content) + if err != nil { + return errors.New(fmt.Sprintf("could not parse header content template for header %s.", header.Name)) + } + + err = t.Execute(&contentBuf, data) + if err != nil { + return errors.New(fmt.Sprintf("failed to execute header content template for header %s.", header.Name)) + } + + value := contentBuf.String() + + logger.Info("Adding headers", "name", header.Name, "value", value) + reqHeaders.Add(header.Name, value) + } + + return nil +} diff --git a/pkg/api/pluginproxy/ds_proxy_test.go b/pkg/api/pluginproxy/ds_proxy_test.go new file mode 100644 index 00000000000..e2c20966c72 --- /dev/null +++ b/pkg/api/pluginproxy/ds_proxy_test.go @@ -0,0 +1,64 @@ +package pluginproxy + +import ( + "net/http" + "testing" + + "github.com/grafana/grafana/pkg/components/simplejson" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/util" + . "github.com/smartystreets/goconvey/convey" +) + +func TestDSRouteRule(t *testing.T) { + + Convey("When applying ds route rule", t, func() { + plugin := &plugins.DataSourcePlugin{ + Routes: []*plugins.AppPluginRoute{ + { + Path: "api/v4/", + Url: "https://www.google.com", + Headers: []plugins.AppPluginRouteHeader{ + {Name: "x-header", Content: "my secret {{.SecureJsonData.key}}"}, + }, + }, + }, + } + + setting.SecretKey = "password" + key, _ := util.Encrypt([]byte("123"), "password") + + ds := &m.DataSource{ + JsonData: simplejson.NewFromAny(map[string]interface{}{ + "clientId": "asd", + }), + SecureJsonData: map[string][]byte{ + "key": key, + }, + } + + req, _ := http.NewRequest("GET", "http://localhost/asd", nil) + + Convey("When not matching route path", func() { + ApplyDataSourceRouteRules(req, plugin, ds, "/asdas/asd") + + Convey("should not touch req", func() { + So(len(req.Header), ShouldEqual, 0) + So(req.URL.String(), ShouldEqual, "http://localhost/asd") + }) + }) + + Convey("When matching route path", func() { + ApplyDataSourceRouteRules(req, plugin, ds, "api/v4/some/method") + + Convey("should add headers and update url", func() { + So(req.URL.String(), ShouldEqual, "https://www.google.com/some/method") + So(req.Header.Get("x-header"), ShouldEqual, "my secret 123") + }) + }) + + }) + +} diff --git a/pkg/api/pluginproxy/pluginproxy.go b/pkg/api/pluginproxy/pluginproxy.go index a5139bb69f7..59138884228 100644 --- a/pkg/api/pluginproxy/pluginproxy.go +++ b/pkg/api/pluginproxy/pluginproxy.go @@ -1,15 +1,11 @@ package pluginproxy import ( - "bytes" "encoding/json" - "errors" - "fmt" "net" "net/http" "net/http/httputil" "net/url" - "text/template" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" @@ -38,23 +34,8 @@ func getHeaders(route *plugins.AppPluginRoute, orgId int64, appId string) (http. SecureJsonData: query.Result.SecureJsonData.Decrypt(), } - for _, header := range route.Headers { - var contentBuf bytes.Buffer - t, err := template.New("content").Parse(header.Content) - if err != nil { - return nil, errors.New(fmt.Sprintf("could not parse header content template for header %s.", header.Name)) - } - - err = t.Execute(&contentBuf, data) - if err != nil { - return nil, errors.New(fmt.Sprintf("failed to execute header content template for header %s.", header.Name)) - } - - log.Trace("Adding header to proxy request. %s: %s", header.Name, contentBuf.String()) - result.Add(header.Name, contentBuf.String()) - } - - return result, nil + err := addHeaders(&result, route, data) + return result, err } func NewApiPluginProxy(ctx *middleware.Context, proxyPath string, route *plugins.AppPluginRoute, appId string) *httputil.ReverseProxy { diff --git a/pkg/plugins/datasource_plugin.go b/pkg/plugins/datasource_plugin.go index aa092c2bc20..8cb0748936b 100644 --- a/pkg/plugins/datasource_plugin.go +++ b/pkg/plugins/datasource_plugin.go @@ -4,12 +4,12 @@ import "encoding/json" type DataSourcePlugin struct { FrontendPluginBase - Annotations bool `json:"annotations"` - Metrics bool `json:"metrics"` - Alerting bool `json:"alerting"` - BuiltIn bool `json:"builtIn"` - Mixed bool `json:"mixed"` - App string `json:"app"` + Annotations bool `json:"annotations"` + Metrics bool `json:"metrics"` + Alerting bool `json:"alerting"` + BuiltIn bool `json:"builtIn"` + Mixed bool `json:"mixed"` + Routes []*AppPluginRoute `json:"routes"` } func (p *DataSourcePlugin) Load(decoder *json.Decoder, pluginDir string) error { diff --git a/tests/datasource-test/module.js b/tests/datasource-test/module.js new file mode 100644 index 00000000000..83db563abb9 --- /dev/null +++ b/tests/datasource-test/module.js @@ -0,0 +1,38 @@ +System.register([], function (_export) { + "use strict"; + + return { + setters: [], + execute: function () { + + function Datasource(instanceSettings, backendSrv) { + this.url = instanceSettings.url; + + this.testDatasource = function() { + return backendSrv.datasourceRequest({ + method: 'GET', + url: this.url + '/api/v4/search' + }); + } + } + + function ConfigCtrl() { + + } + + ConfigCtrl.template = ` +