diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 51ffb9dd8ce..442478068b5 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -259,7 +259,6 @@ /pkg/services/searchV2/ @grafana/multitenancy-squad /pkg/services/store/ @grafana/multitenancy-squad /pkg/services/querylibrary/ @grafana/multitenancy-squad -/pkg/services/export/ @grafana/multitenancy-squad /pkg/infra/filestorage/ @grafana/multitenancy-squad /pkg/util/converter/ @grafana/multitenancy-squad diff --git a/docs/sources/setup-grafana/configure-grafana/feature-toggles/index.md b/docs/sources/setup-grafana/configure-grafana/feature-toggles/index.md index 9d62292c479..9353b6202aa 100644 --- a/docs/sources/setup-grafana/configure-grafana/feature-toggles/index.md +++ b/docs/sources/setup-grafana/configure-grafana/feature-toggles/index.md @@ -101,7 +101,6 @@ The following toggles require explicitly setting Grafana's [app mode]({{< relref | `publicDashboardsEmailSharing` | Allows public dashboard sharing to be restricted to only allowed emails | | `k8s` | Explore native k8s integrations | | `dashboardsFromStorage` | Load dashboards from the generic storage interface | -| `export` | Export grafana instance (to git, etc) | | `grpcServer` | Run GRPC server | | `entityStore` | SQL-based entity store (requires storage flag also) | | `queryLibrary` | Reusable query library | diff --git a/packages/grafana-data/src/types/featureToggles.gen.ts b/packages/grafana-data/src/types/featureToggles.gen.ts index 53774b6f6b0..3df66b76d6b 100644 --- a/packages/grafana-data/src/types/featureToggles.gen.ts +++ b/packages/grafana-data/src/types/featureToggles.gen.ts @@ -37,7 +37,6 @@ export interface FeatureToggles { storage?: boolean; k8s?: boolean; dashboardsFromStorage?: boolean; - export?: boolean; exploreMixedDatasource?: boolean; tracing?: boolean; newTraceView?: boolean; diff --git a/pkg/api/api.go b/pkg/api/api.go index 8a9b6248415..170853569e9 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -630,13 +630,6 @@ func (hs *HTTPServer) registerRoutes() { adminRoute.Get("/stats", authorize(reqGrafanaAdmin, ac.EvalPermission(ac.ActionServerStatsRead)), routing.Wrap(hs.AdminGetStats)) adminRoute.Post("/pause-all-alerts", reqGrafanaAdmin, routing.Wrap(hs.PauseAllAlerts(setting.AlertingEnabled))) - if hs.Features.IsEnabled(featuremgmt.FlagExport) { - adminRoute.Get("/export", reqGrafanaAdmin, routing.Wrap(hs.ExportService.HandleGetStatus)) - adminRoute.Post("/export", reqGrafanaAdmin, routing.Wrap(hs.ExportService.HandleRequestExport)) - adminRoute.Post("/export/stop", reqGrafanaAdmin, routing.Wrap(hs.ExportService.HandleRequestStop)) - adminRoute.Get("/export/options", reqGrafanaAdmin, routing.Wrap(hs.ExportService.HandleGetOptions)) - } - adminRoute.Post("/encryption/rotate-data-keys", reqGrafanaAdmin, routing.Wrap(hs.AdminRotateDataEncryptionKeys)) adminRoute.Post("/encryption/reencrypt-data-keys", reqGrafanaAdmin, routing.Wrap(hs.AdminReEncryptEncryptionKeys)) adminRoute.Post("/encryption/reencrypt-secrets", reqGrafanaAdmin, routing.Wrap(hs.AdminReEncryptSecrets)) diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go index 1a1c193ef5b..710d8c3dee6 100644 --- a/pkg/api/http_server.go +++ b/pkg/api/http_server.go @@ -52,7 +52,6 @@ import ( "github.com/grafana/grafana/pkg/services/datasources" "github.com/grafana/grafana/pkg/services/datasources/permissions" "github.com/grafana/grafana/pkg/services/encryption" - "github.com/grafana/grafana/pkg/services/export" "github.com/grafana/grafana/pkg/services/featuremgmt" "github.com/grafana/grafana/pkg/services/folder" "github.com/grafana/grafana/pkg/services/hooks" @@ -146,7 +145,6 @@ type HTTPServer struct { Live *live.GrafanaLive LivePushGateway *pushhttp.Gateway ThumbService thumbs.Service - ExportService export.ExportService StorageService store.StorageService httpEntityStore httpentitystore.HTTPEntityStore SearchV2HTTPService searchV2.SearchHTTPService @@ -234,7 +232,7 @@ func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routi live *live.GrafanaLive, livePushGateway *pushhttp.Gateway, plugCtxProvider *plugincontext.Provider, contextHandler *contexthandler.ContextHandler, features *featuremgmt.FeatureManager, alertNG *ngalert.AlertNG, libraryPanelService librarypanels.Service, libraryElementService libraryelements.Service, - quotaService quota.Service, socialService social.Service, tracer tracing.Tracer, exportService export.ExportService, + quotaService quota.Service, socialService social.Service, tracer tracing.Tracer, encryptionService encryption.Internal, grafanaUpdateChecker *updatechecker.GrafanaService, pluginsUpdateChecker *updatechecker.PluginsService, searchUsersService searchusers.Service, dataSourcesService datasources.DataSourceService, queryDataService *query.Service, @@ -298,7 +296,6 @@ func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routi DataProxy: dataSourceProxy, SearchV2HTTPService: searchv2HTTPService, SearchService: searchService, - ExportService: exportService, Live: live, LivePushGateway: livePushGateway, PluginContextProvider: plugCtxProvider, diff --git a/pkg/cmd/grafana-cli/runner/wire.go b/pkg/cmd/grafana-cli/runner/wire.go index 4a872b394ad..af7bb2e7cda 100644 --- a/pkg/cmd/grafana-cli/runner/wire.go +++ b/pkg/cmd/grafana-cli/runner/wire.go @@ -5,6 +5,7 @@ package runner import ( "context" + "github.com/grafana/grafana/pkg/services/folder" "github.com/grafana/grafana/pkg/services/folder/folderimpl" @@ -57,7 +58,6 @@ import ( datasourceservice "github.com/grafana/grafana/pkg/services/datasources/service" "github.com/grafana/grafana/pkg/services/encryption" encryptionservice "github.com/grafana/grafana/pkg/services/encryption/service" - "github.com/grafana/grafana/pkg/services/export" "github.com/grafana/grafana/pkg/services/featuremgmt" "github.com/grafana/grafana/pkg/services/guardian" "github.com/grafana/grafana/pkg/services/hooks" @@ -209,7 +209,6 @@ var wireSet = wire.NewSet( search.ProvideService, searchV2.ProvideService, store.ProvideService, - export.ProvideService, live.ProvideService, pushhttp.ProvideService, contexthandler.ProvideService, diff --git a/pkg/server/wire.go b/pkg/server/wire.go index 3a6e96eb07f..65491f0ad62 100644 --- a/pkg/server/wire.go +++ b/pkg/server/wire.go @@ -6,6 +6,7 @@ package server import ( "github.com/google/wire" sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient" + "github.com/grafana/grafana/pkg/services/folder" "github.com/grafana/grafana/pkg/api" @@ -59,7 +60,6 @@ import ( datasourceservice "github.com/grafana/grafana/pkg/services/datasources/service" "github.com/grafana/grafana/pkg/services/encryption" encryptionservice "github.com/grafana/grafana/pkg/services/encryption/service" - "github.com/grafana/grafana/pkg/services/export" "github.com/grafana/grafana/pkg/services/featuremgmt" "github.com/grafana/grafana/pkg/services/folder/folderimpl" "github.com/grafana/grafana/pkg/services/grpcserver" @@ -231,7 +231,6 @@ var wireBasicSet = wire.NewSet( searchV2.ProvideSearchHTTPService, store.ProvideService, store.ProvideSystemUsersService, - export.ProvideService, live.ProvideService, pushhttp.ProvideService, contexthandler.ProvideService, diff --git a/pkg/services/export/commit_helper.go b/pkg/services/export/commit_helper.go deleted file mode 100644 index 0cff51fcf76..00000000000 --- a/pkg/services/export/commit_helper.go +++ /dev/null @@ -1,196 +0,0 @@ -package export - -import ( - "context" - "fmt" - "os" - "path" - "strings" - "time" - - "github.com/go-git/go-git/v5" - "github.com/go-git/go-git/v5/plumbing/object" - "github.com/grafana/grafana-plugin-sdk-go/data" - jsoniter "github.com/json-iterator/go" - - "github.com/grafana/grafana/pkg/infra/appcontext" - "github.com/grafana/grafana/pkg/infra/db" - "github.com/grafana/grafana/pkg/services/user" -) - -type commitHelper struct { - ctx context.Context - repo *git.Repository - work *git.Worktree - orgDir string // includes the orgID - workDir string // same as the worktree root - orgID int64 - users map[int64]*userInfo - stopRequested bool - broadcast func(path string) - exporter string // key for the current exporter - - counter int -} - -type commitBody struct { - fpath string // absolute - body []byte - frame *data.Frame -} - -type commitOptions struct { - body []commitBody - when time.Time - userID int64 - comment string -} - -func (ch *commitHelper) initOrg(ctx context.Context, sql db.DB, orgID int64) error { - return sql.WithDbSession(ch.ctx, func(sess *db.Session) error { - userprefix := "user" - if isPostgreSQL(sql) { - userprefix = `"user"` // postgres has special needs - } - sess.Table("user"). - Join("inner", "org_user", userprefix+`.id = org_user.user_id`). - Cols(userprefix+`.*`, "org_user.role"). - Where("org_user.org_id = ?", orgID). - Asc(userprefix + `.id`) - - rows := make([]*userInfo, 0) - err := sess.Find(&rows) - if err != nil { - return err - } - - lookup := make(map[int64]*userInfo, len(rows)) - for _, row := range rows { - lookup[row.ID] = row - } - ch.users = lookup - ch.orgID = orgID - - // Set an admin user with the - rowUser := &user.SignedInUser{ - Login: "", - OrgID: orgID, // gets filled in from each row - UserID: 0, - } - ch.ctx = appcontext.WithUser(context.Background(), rowUser) - return err - }) -} - -func (ch *commitHelper) add(opts commitOptions) error { - if ch.stopRequested { - return fmt.Errorf("stop requested") - } - - if len(opts.body) < 1 { - return nil // nothing to commit - } - - user, ok := ch.users[opts.userID] - if !ok { - user = &userInfo{ - Name: "admin", - Email: "admin@unknown.org", - } - } - sig := user.getAuthor() - if opts.when.Unix() > 100 { - sig.When = opts.when - } - - for _, b := range opts.body { - if !strings.HasPrefix(b.fpath, ch.orgDir) { - return fmt.Errorf("invalid path, must be within the root folder") - } - - // make sure the parent exists - err := os.MkdirAll(path.Dir(b.fpath), 0750) - if err != nil { - return err - } - - body := b.body - if b.frame != nil { - body, err = jsoniter.ConfigCompatibleWithStandardLibrary.MarshalIndent(b.frame, "", " ") - if err != nil { - return err - } - } - - err = os.WriteFile(b.fpath, body, 0644) - if err != nil { - return err - } - err = os.Chtimes(b.fpath, sig.When, sig.When) - if err != nil { - return err - } - - sub := b.fpath[len(ch.workDir)+1:] - _, err = ch.work.Add(sub) - if err != nil { - status, e2 := ch.work.Status() - if e2 != nil { - return fmt.Errorf("error adding: %s (invalud work status: %s)", sub, e2.Error()) - } - fmt.Printf("STATUS: %+v\n", status) - return fmt.Errorf("unable to add file: %s (%d)", sub, len(b.body)) - } - ch.counter++ - } - - copts := &git.CommitOptions{ - Author: &sig, - } - - ch.broadcast(opts.body[0].fpath) - _, err := ch.work.Commit(opts.comment, copts) - return err -} - -type userInfo struct { - ID int64 `json:"-" db:"id"` - Login string `json:"login"` - Email string `json:"email"` - Name string `json:"name"` - Password string `json:"password"` - Salt string `json:"salt"` - Company string `json:"company,omitempty"` - Rands string `json:"-"` - Role string `json:"org_role"` // org role - Theme string `json:"-"` // managed in preferences - Created time.Time `json:"-"` // managed in git or external source - Updated time.Time `json:"-"` // managed in git or external source - IsDisabled bool `json:"disabled" db:"is_disabled"` - IsServiceAccount bool `json:"serviceAccount" db:"is_service_account"` - LastSeenAt time.Time `json:"-" db:"last_seen_at"` - - // Added to make sqlx happy - Version int `json:"-"` - HelpFlags1 int `json:"-" db:"help_flags1"` - OrgID int64 `json:"-" db:"org_id"` - EmailVerified bool `json:"-" db:"email_verified"` - IsAdmin bool `json:"-" db:"is_admin"` -} - -func (u *userInfo) getAuthor() object.Signature { - return object.Signature{ - Name: firstRealStringX(u.Name, u.Login, u.Email, "?"), - Email: firstRealStringX(u.Email, u.Login, u.Name, "?"), - When: time.Now(), - } -} - -func firstRealStringX(vals ...string) string { - for _, v := range vals { - if v != "" { - return v - } - } - return "?" -} diff --git a/pkg/services/export/dummy_job.go b/pkg/services/export/dummy_job.go deleted file mode 100644 index 5671c742bcb..00000000000 --- a/pkg/services/export/dummy_job.go +++ /dev/null @@ -1,105 +0,0 @@ -package export - -import ( - "fmt" - "math" - "math/rand" - "sync" - "time" - - "github.com/grafana/grafana/pkg/infra/log" -) - -var _ Job = new(dummyExportJob) - -type dummyExportJob struct { - logger log.Logger - - statusMu sync.Mutex - status ExportStatus - cfg ExportConfig - broadcaster statusBroadcaster - stopRequested bool - total int -} - -func startDummyExportJob(cfg ExportConfig, broadcaster statusBroadcaster) (Job, error) { - job := &dummyExportJob{ - logger: log.New("dummy_export_job"), - cfg: cfg, - broadcaster: broadcaster, - status: ExportStatus{ - Running: true, - Target: "dummy export", - Started: time.Now().UnixMilli(), - Count: make(map[string]int, 10), - Index: 0, - }, - total: int(math.Round(10 + rand.Float64()*20)), - } - - broadcaster(job.status) - go job.start() - return job, nil -} - -func (e *dummyExportJob) requestStop() { - e.stopRequested = true -} - -func (e *dummyExportJob) start() { - defer func() { - e.logger.Info("Finished dummy export job") - - e.statusMu.Lock() - defer e.statusMu.Unlock() - s := e.status - if err := recover(); err != nil { - e.logger.Error("export panic", "error", err) - s.Status = fmt.Sprintf("ERROR: %v", err) - } - // Make sure it finishes OK - if s.Finished < 10 { - s.Finished = time.Now().UnixMilli() - } - s.Running = false - if s.Status == "" { - s.Status = "done" - } - e.status = s - e.broadcaster(s) - }() - - e.logger.Info("Starting dummy export job") - - ticker := time.NewTicker(1 * time.Second) - for t := range ticker.C { - e.statusMu.Lock() - e.status.Changed = t.UnixMilli() - e.status.Index++ - e.status.Last = fmt.Sprintf("ITEM: %d", e.status.Index) - e.statusMu.Unlock() - - // Wait till we are done - shouldStop := e.stopRequested || e.status.Index >= e.total - e.broadcaster(e.status) - - if shouldStop { - break - } - } -} - -func (e *dummyExportJob) getStatus() ExportStatus { - e.statusMu.Lock() - defer e.statusMu.Unlock() - - return e.status -} - -func (e *dummyExportJob) getConfig() ExportConfig { - e.statusMu.Lock() - defer e.statusMu.Unlock() - - return e.cfg -} diff --git a/pkg/services/export/entity_store.go b/pkg/services/export/entity_store.go deleted file mode 100644 index 9e956f4adff..00000000000 --- a/pkg/services/export/entity_store.go +++ /dev/null @@ -1,374 +0,0 @@ -package export - -import ( - "context" - "encoding/json" - "fmt" - "sync" - "time" - - "github.com/grafana/grafana/pkg/infra/appcontext" - "github.com/grafana/grafana/pkg/infra/db" - "github.com/grafana/grafana/pkg/infra/log" - "github.com/grafana/grafana/pkg/services/dashboardsnapshots" - "github.com/grafana/grafana/pkg/services/playlist" - "github.com/grafana/grafana/pkg/services/sqlstore/session" - "github.com/grafana/grafana/pkg/services/store/entity" - "github.com/grafana/grafana/pkg/services/store/kind/folder" - "github.com/grafana/grafana/pkg/services/store/kind/snapshot" - "github.com/grafana/grafana/pkg/services/user" -) - -var _ Job = new(entityStoreJob) - -type entityStoreJob struct { - logger log.Logger - - statusMu sync.Mutex - status ExportStatus - cfg ExportConfig - broadcaster statusBroadcaster - stopRequested bool - ctx context.Context - - sess *session.SessionDB - playlistService playlist.Service - store entity.EntityStoreServer - dashboardsnapshots dashboardsnapshots.Service -} - -func startEntityStoreJob(ctx context.Context, - cfg ExportConfig, - broadcaster statusBroadcaster, - db db.DB, - playlistService playlist.Service, - store entity.EntityStoreServer, - dashboardsnapshots dashboardsnapshots.Service, -) (Job, error) { - job := &entityStoreJob{ - logger: log.New("export_to_object_store_job"), - cfg: cfg, - ctx: ctx, - broadcaster: broadcaster, - status: ExportStatus{ - Running: true, - Target: "object store export", - Started: time.Now().UnixMilli(), - Count: make(map[string]int, 10), - Index: 0, - }, - sess: db.GetSqlxSession(), - playlistService: playlistService, - store: store, - dashboardsnapshots: dashboardsnapshots, - } - - broadcaster(job.status) - go job.start(ctx) - return job, nil -} - -func (e *entityStoreJob) requestStop() { - e.stopRequested = true -} - -func (e *entityStoreJob) start(ctx context.Context) { - defer func() { - e.logger.Info("Finished dummy export job") - - e.statusMu.Lock() - defer e.statusMu.Unlock() - s := e.status - if err := recover(); err != nil { - e.logger.Error("export panic", "error", err) - s.Status = fmt.Sprintf("ERROR: %v", err) - } - // Make sure it finishes OK - if s.Finished < 10 { - s.Finished = time.Now().UnixMilli() - } - s.Running = false - if s.Status == "" { - s.Status = "done" - } - e.status = s - e.broadcaster(s) - }() - - e.logger.Info("Starting dummy export job") - // Select all dashboards - rowUser := &user.SignedInUser{ - Login: "", - OrgID: 0, // gets filled in from each row - UserID: 0, - } - ctx = appcontext.WithUser(ctx, rowUser) - - what := entity.StandardKindFolder - e.status.Count[what] = 0 - - folders := make(map[int64]string) - folderInfo, err := e.getFolders(ctx) - if err != nil { - e.status.Status = "error: " + err.Error() - return - } - e.status.Last = fmt.Sprintf("export %d folders", len(folderInfo)) - e.broadcaster(e.status) - - for _, dash := range folderInfo { - folders[dash.ID] = dash.UID - } - - for _, dash := range folderInfo { - rowUser.OrgID = dash.OrgID - rowUser.UserID = dash.UpdatedBy - if dash.UpdatedBy < 0 { - rowUser.UserID = 0 // avoid Uint64Val issue???? - } - f := folder.Model{Name: dash.Title} - d, _ := json.Marshal(f) - - _, err = e.store.AdminWrite(ctx, &entity.AdminWriteEntityRequest{ - GRN: &entity.GRN{ - UID: dash.UID, - Kind: entity.StandardKindFolder, - }, - ClearHistory: true, - CreatedAt: dash.Created.UnixMilli(), - UpdatedAt: dash.Updated.UnixMilli(), - UpdatedBy: fmt.Sprintf("user:%d", dash.UpdatedBy), - CreatedBy: fmt.Sprintf("user:%d", dash.CreatedBy), - Body: d, - Folder: folders[dash.FolderID], - Comment: "(exported from SQL)", - Origin: &entity.EntityOriginInfo{ - Source: "export-from-sql", - }, - }) - if err != nil { - e.status.Status = "error: " + err.Error() - return - } - e.status.Changed = time.Now().UnixMilli() - e.status.Index++ - e.status.Count[what] += 1 - e.status.Last = fmt.Sprintf("ITEM: %s", dash.UID) - e.broadcaster(e.status) - } - - what = entity.StandardKindDashboard - e.status.Count[what] = 0 - - // TODO paging etc - // NOTE: doing work inside rows.Next() leads to database locked - dashInfo, err := e.getDashboards(ctx) - if err != nil { - e.status.Status = "error: " + err.Error() - return - } - e.status.Last = fmt.Sprintf("export %d dashboards", len(dashInfo)) - e.broadcaster(e.status) - - for _, dash := range dashInfo { - rowUser.OrgID = dash.OrgID - rowUser.UserID = dash.UpdatedBy - if dash.UpdatedBy < 0 { - rowUser.UserID = 0 // avoid Uint64Val issue???? - } - - _, err = e.store.AdminWrite(ctx, &entity.AdminWriteEntityRequest{ - GRN: &entity.GRN{ - UID: dash.UID, - Kind: entity.StandardKindDashboard, - }, - ClearHistory: true, - Version: fmt.Sprintf("%d", dash.Version), - CreatedAt: dash.Created.UnixMilli(), - UpdatedAt: dash.Updated.UnixMilli(), - UpdatedBy: fmt.Sprintf("user:%d", dash.UpdatedBy), - CreatedBy: fmt.Sprintf("user:%d", dash.CreatedBy), - Body: dash.Data, - Folder: folders[dash.FolderID], - Comment: "(exported from SQL)", - Origin: &entity.EntityOriginInfo{ - Source: "export-from-sql", - }, - }) - if err != nil { - e.status.Status = "error: " + err.Error() - return - } - e.status.Changed = time.Now().UnixMilli() - e.status.Index++ - e.status.Count[what] += 1 - e.status.Last = fmt.Sprintf("ITEM: %s", dash.UID) - e.broadcaster(e.status) - } - - // Playlists - what = entity.StandardKindPlaylist - e.status.Count[what] = 0 - rowUser.OrgID = 1 - rowUser.UserID = 1 - res, err := e.playlistService.Search(ctx, &playlist.GetPlaylistsQuery{ - OrgId: rowUser.OrgID, // TODO... all or orgs - Limit: 5000, - }) - if err != nil { - e.status.Status = "error: " + err.Error() - return - } - for _, item := range res { - playlist, err := e.playlistService.Get(ctx, &playlist.GetPlaylistByUidQuery{ - UID: item.UID, - OrgId: rowUser.OrgID, - }) - if err != nil { - e.status.Status = "error: " + err.Error() - return - } - - _, err = e.store.Write(ctx, &entity.WriteEntityRequest{ - GRN: &entity.GRN{ - UID: playlist.Uid, - Kind: entity.StandardKindPlaylist, - }, - Body: prettyJSON(playlist), - Comment: "export from playlists", - }) - if err != nil { - e.status.Status = "error: " + err.Error() - return - } - e.status.Changed = time.Now().UnixMilli() - e.status.Index++ - e.status.Count[what] += 1 - e.status.Last = fmt.Sprintf("ITEM: %s", playlist.Uid) - e.broadcaster(e.status) - } - - // TODO.. query lookup - orgIDs := []int64{1} - what = "snapshot" - for _, orgId := range orgIDs { - rowUser.OrgID = orgId - rowUser.UserID = 1 - cmd := &dashboardsnapshots.GetDashboardSnapshotsQuery{ - OrgID: orgId, - Limit: 500000, - SignedInUser: rowUser, - } - - result, err := e.dashboardsnapshots.SearchDashboardSnapshots(ctx, cmd) - if err != nil { - e.status.Status = "error: " + err.Error() - return - } - - for _, dto := range result { - m := snapshot.Model{ - Name: dto.Name, - ExternalURL: dto.ExternalURL, - Expires: dto.Expires.UnixMilli(), - } - rowUser.OrgID = dto.OrgID - rowUser.UserID = dto.UserID - - snapcmd := &dashboardsnapshots.GetDashboardSnapshotQuery{ - Key: dto.Key, - } - snapcmdResult, err := e.dashboardsnapshots.GetDashboardSnapshot(ctx, snapcmd) - if err == nil { - res := snapcmdResult - m.DeleteKey = res.DeleteKey - m.ExternalURL = res.ExternalURL - - snap := res.Dashboard - m.DashboardUID = snap.Get("uid").MustString("") - snap.Del("uid") - snap.Del("id") - - b, _ := snap.MarshalJSON() - m.Snapshot = b - } - - _, err = e.store.Write(ctx, &entity.WriteEntityRequest{ - GRN: &entity.GRN{ - UID: dto.Key, - Kind: entity.StandardKindSnapshot, - }, - Body: prettyJSON(m), - Comment: "export from snapshtts", - }) - if err != nil { - e.status.Status = "error: " + err.Error() - return - } - e.status.Changed = time.Now().UnixMilli() - e.status.Index++ - e.status.Count[what] += 1 - e.status.Last = fmt.Sprintf("ITEM: %s", dto.Name) - e.broadcaster(e.status) - } - } -} - -type dashInfo struct { - OrgID int64 `db:"org_id"` - UID string - Version int64 - Slug string - Data []byte - Created time.Time - Updated time.Time - CreatedBy int64 `db:"created_by"` - UpdatedBy int64 `db:"updated_by"` - FolderID int64 `db:"folder_id"` -} - -type folderInfo struct { - ID int64 `db:"id"` - OrgID int64 `db:"org_id"` - UID string - Title string - Created time.Time - Updated time.Time - CreatedBy int64 `db:"created_by"` - UpdatedBy int64 `db:"updated_by"` - FolderID int64 `db:"folder_id"` -} - -// TODO, paging etc -func (e *entityStoreJob) getDashboards(ctx context.Context) ([]dashInfo, error) { - e.status.Last = "find dashbaords...." - e.broadcaster(e.status) - - dash := make([]dashInfo, 0) - err := e.sess.Select(ctx, &dash, "SELECT org_id,uid,version,slug,data,folder_id,created,updated,created_by,updated_by FROM dashboard WHERE is_folder=false") - return dash, err -} - -// TODO, paging etc -func (e *entityStoreJob) getFolders(ctx context.Context) ([]folderInfo, error) { - e.status.Last = "find dashbaords...." - e.broadcaster(e.status) - - dash := make([]folderInfo, 0) - err := e.sess.Select(ctx, &dash, "SELECT id,org_id,uid,title,folder_id,created,updated,created_by,updated_by FROM dashboard WHERE is_folder=true") - return dash, err -} - -func (e *entityStoreJob) getStatus() ExportStatus { - e.statusMu.Lock() - defer e.statusMu.Unlock() - - return e.status -} - -func (e *entityStoreJob) getConfig() ExportConfig { - e.statusMu.Lock() - defer e.statusMu.Unlock() - - return e.cfg -} diff --git a/pkg/services/export/export_alerts.go b/pkg/services/export/export_alerts.go deleted file mode 100644 index c6d608c08f3..00000000000 --- a/pkg/services/export/export_alerts.go +++ /dev/null @@ -1,51 +0,0 @@ -package export - -import ( - "encoding/json" - "fmt" - "path" - "time" - - "github.com/grafana/grafana/pkg/infra/db" -) - -func exportAlerts(helper *commitHelper, job *gitExportJob) error { - alertDir := path.Join(helper.orgDir, "alerts") - - return job.sql.WithDbSession(helper.ctx, func(sess *db.Session) error { - type ruleResult struct { - Title string `xorm:"title"` - UID string `xorm:"uid"` - NamespaceUID string `xorm:"namespace_uid"` - RuleGroup string `xorm:"rule_group"` - Condition json.RawMessage `xorm:"data"` - DashboardUID string `xorm:"dashboard_uid"` - PanelID int64 `xorm:"panel_id"` - Updated time.Time `xorm:"updated" json:"-"` - } - - rows := make([]*ruleResult, 0) - - sess.Table("alert_rule").Where("org_id = ?", helper.orgID) - - err := sess.Find(&rows) - if err != nil { - return err - } - - for _, row := range rows { - err = helper.add(commitOptions{ - body: []commitBody{{ - body: prettyJSON(row), - fpath: path.Join(alertDir, row.UID) + ".json", // must be JSON files - }}, - comment: fmt.Sprintf("Alert: %s", row.Title), - when: row.Updated, - }) - if err != nil { - return err - } - } - return err - }) -} diff --git a/pkg/services/export/export_anno.go b/pkg/services/export/export_anno.go deleted file mode 100644 index c8c43dc4286..00000000000 --- a/pkg/services/export/export_anno.go +++ /dev/null @@ -1,132 +0,0 @@ -package export - -import ( - "encoding/json" - "fmt" - "path/filepath" - "time" - - "github.com/grafana/grafana-plugin-sdk-go/data" - jsoniter "github.com/json-iterator/go" - - "github.com/grafana/grafana/pkg/infra/db" -) - -func exportAnnotations(helper *commitHelper, job *gitExportJob) error { - return job.sql.WithDbSession(helper.ctx, func(sess *db.Session) error { - type annoResult struct { - ID int64 `xorm:"id"` - DashboardID int64 `xorm:"dashboard_id"` - PanelID int64 `xorm:"panel_id"` - UserID int64 `xorm:"user_id"` - Text string `xorm:"text"` - Epoch int64 `xorm:"epoch"` - EpochEnd int64 `xorm:"epoch_end"` - Created int64 `xorm:"created"` // not used - Tags string `xorm:"tags"` // JSON Array - } - - type annoEvent struct { - PanelID int64 `json:"panel"` - Text string `json:"text"` - Epoch int64 `json:"epoch"` // dashboard/start+end is really the UID - EpochEnd int64 `json:"epoch_end,omitempty"` - Tags []string - } - - rows := make([]*annoResult, 0) - - sess.Table("annotation"). - Where("org_id = ? AND alert_id = 0", helper.orgID).Asc("epoch") - - err := sess.Find(&rows) - if err != nil { - return err - } - - count := len(rows) - f_ID := data.NewFieldFromFieldType(data.FieldTypeInt64, count) - f_DashboardID := data.NewFieldFromFieldType(data.FieldTypeInt64, count) - f_PanelID := data.NewFieldFromFieldType(data.FieldTypeInt64, count) - f_Epoch := data.NewFieldFromFieldType(data.FieldTypeTime, count) - f_EpochEnd := data.NewFieldFromFieldType(data.FieldTypeNullableTime, count) - f_Text := data.NewFieldFromFieldType(data.FieldTypeString, count) - f_Tags := data.NewFieldFromFieldType(data.FieldTypeJSON, count) - - f_ID.Name = "ID" - f_DashboardID.Name = "DashboardID" - f_PanelID.Name = "PanelID" - f_Epoch.Name = "Epoch" - f_EpochEnd.Name = "EpochEnd" - f_Text.Name = "Text" - f_Tags.Name = "Tags" - - for id, row := range rows { - f_ID.Set(id, row.ID) - f_DashboardID.Set(id, row.DashboardID) - f_PanelID.Set(id, row.PanelID) - f_Epoch.Set(id, time.UnixMilli(row.Epoch)) - if row.Epoch != row.EpochEnd { - f_EpochEnd.SetConcrete(id, time.UnixMilli(row.EpochEnd)) - } - f_Text.Set(id, row.Text) - f_Tags.Set(id, json.RawMessage(row.Tags)) - - // Save a file for each - event := &annoEvent{ - PanelID: row.PanelID, - Text: row.Text, - } - err = json.Unmarshal([]byte(row.Tags), &event.Tags) - if err != nil { - return err - } - fname := fmt.Sprintf("%d", row.Epoch) - if row.Epoch != row.EpochEnd { - fname += "-" + fmt.Sprintf("%d", row.EpochEnd) - } - - err = helper.add(commitOptions{ - body: []commitBody{ - { - fpath: filepath.Join(helper.orgDir, - "annotations", - "dashboard", - fmt.Sprintf("id-%d", row.DashboardID), - fname+".json"), - body: prettyJSON(event), - }, - }, - when: time.UnixMilli(row.Epoch), - comment: fmt.Sprintf("Added annotation (%d)", row.ID), - userID: row.UserID, - }) - if err != nil { - return err - } - } - - if f_ID.Len() > 0 { - frame := data.NewFrame("", f_ID, f_DashboardID, f_PanelID, f_Epoch, f_EpochEnd, f_Text, f_Tags) - js, err := jsoniter.ConfigCompatibleWithStandardLibrary.MarshalIndent(frame, "", " ") - if err != nil { - return err - } - - err = helper.add(commitOptions{ - body: []commitBody{ - { - fpath: filepath.Join(helper.orgDir, "annotations", "annotations.json"), - body: js, // TODO, pretty? - }, - }, - when: time.Now(), - comment: "Exported annotations", - }) - if err != nil { - return err - } - } - return err - }) -} diff --git a/pkg/services/export/export_auth.go b/pkg/services/export/export_auth.go deleted file mode 100644 index d3315a6599c..00000000000 --- a/pkg/services/export/export_auth.go +++ /dev/null @@ -1,138 +0,0 @@ -package export - -import ( - "path" - "strconv" - - "github.com/grafana/grafana-plugin-sdk-go/data" - "github.com/grafana/grafana-plugin-sdk-go/data/sqlutil" - - "github.com/grafana/grafana/pkg/infra/db" -) - -func dumpAuthTables(helper *commitHelper, job *gitExportJob) error { - isMySQL := isMySQLEngine(job.sql) - - return job.sql.WithDbSession(helper.ctx, func(sess *db.Session) error { - commit := commitOptions{ - comment: "auth tables dump", - } - - type statsTables struct { - table string - sql string - converters []sqlutil.Converter - drop []string - } - - dump := []statsTables{ - { - table: "user", - sql: removeQuotesFromQuery(` - SELECT "user".*, org_user.role - FROM "user" - JOIN org_user ON "user".id = org_user.user_id - WHERE org_user.org_id =`+strconv.FormatInt(helper.orgID, 10), isMySQL), - converters: []sqlutil.Converter{{Dynamic: true}}, - drop: []string{ - "id", "version", - "password", // UMMMMM... for now - "org_id", - }, - }, - { - table: "user_role", - sql: ` - SELECT * FROM user_role - WHERE org_id =` + strconv.FormatInt(helper.orgID, 10), - }, - { - table: "builtin_role", - sql: ` - SELECT * FROM builtin_role - WHERE org_id =` + strconv.FormatInt(helper.orgID, 10), - }, - { - table: "api_key", - sql: ` - SELECT * FROM api_key - WHERE org_id =` + strconv.FormatInt(helper.orgID, 10), - }, - { - table: "permission", - sql: ` - SELECT permission.* - FROM permission - JOIN role ON permission.role_id = role.id - WHERE org_id =` + strconv.FormatInt(helper.orgID, 10), - }, - { - table: "user_auth_token", - sql: ` - SELECT user_auth_token.* - FROM user_auth_token - JOIN org_user ON user_auth_token.id = org_user.user_id - WHERE org_user.org_id =` + strconv.FormatInt(helper.orgID, 10), - }, - {table: "team"}, - {table: "team_role"}, - {table: "team_member"}, - {table: "temp_user"}, - {table: "role"}, - } - - for _, auth := range dump { - if auth.sql == "" { - auth.sql = ` - SELECT * FROM ` + auth.table + ` - WHERE org_id =` + strconv.FormatInt(helper.orgID, 10) - } - if auth.converters == nil { - auth.converters = []sqlutil.Converter{{Dynamic: true}} - } - if auth.drop == nil { - auth.drop = []string{ - "id", - "org_id", - } - } - - rows, err := sess.DB().QueryContext(helper.ctx, auth.sql) - if err != nil { - if isTableNotExistsError(err) { - continue - } - return err - } - - frame, err := sqlutil.FrameFromRows(rows.Rows, -1, auth.converters...) - if err != nil { - return err - } - if frame.Fields[0].Len() < 1 { - continue // do not write empty structures - } - - if len(auth.drop) > 0 { - lookup := make(map[string]bool, len(auth.drop)) - for _, v := range auth.drop { - lookup[v] = true - } - fields := make([]*data.Field, 0, len(frame.Fields)) - for _, f := range frame.Fields { - if lookup[f.Name] { - continue - } - fields = append(fields, f) - } - frame.Fields = fields - } - frame.Name = auth.table - commit.body = append(commit.body, commitBody{ - fpath: path.Join(helper.orgDir, "auth", "sql.dump", auth.table+".json"), - frame: frame, - }) - } - return helper.add(commit) - }) -} diff --git a/pkg/services/export/export_dash.go b/pkg/services/export/export_dash.go deleted file mode 100644 index 976abdb8a9a..00000000000 --- a/pkg/services/export/export_dash.go +++ /dev/null @@ -1,241 +0,0 @@ -package export - -import ( - "encoding/json" - "fmt" - "path" - "path/filepath" - "strings" - "time" - - "github.com/google/uuid" - - "github.com/grafana/grafana/pkg/infra/db" - "github.com/grafana/grafana/pkg/infra/filestorage" - "github.com/grafana/grafana/pkg/services/store/kind/dashboard" -) - -func exportDashboards(helper *commitHelper, job *gitExportJob) error { - alias := make(map[string]string, 100) - ids := make(map[int64]string, 100) - folders := make(map[int64]string, 100) - - // Should root files be at the root or in a subfolder called "general"? - if len(job.cfg.GeneralFolderPath) > 0 { - folders[0] = job.cfg.GeneralFolderPath // "general" - } - - lookup, err := dashboard.LoadDatasourceLookup(helper.ctx, helper.orgID, job.sql) - if err != nil { - return err - } - - rootDir := path.Join(helper.orgDir, "drive") - folderStructure := commitOptions{ - when: time.Now(), - comment: "Exported folder structure", - } - - err = job.sql.WithDbSession(helper.ctx, func(sess *db.Session) error { - type dashDataQueryResult struct { - Id int64 - UID string `xorm:"uid"` - IsFolder bool `xorm:"is_folder"` - FolderID int64 `xorm:"folder_id"` - Slug string `xorm:"slug"` - Data []byte - Created time.Time - Updated time.Time - } - - rows := make([]*dashDataQueryResult, 0) - - sess.Table("dashboard"). - Where("org_id = ?", helper.orgID). - Cols("id", "is_folder", "folder_id", "data", "slug", "created", "updated", "uid") - - err := sess.Find(&rows) - if err != nil { - return err - } - - reader := dashboard.NewStaticDashboardSummaryBuilder(lookup, false) - - // Process all folders - for _, row := range rows { - if !row.IsFolder { - continue - } - dash, _, err := reader(helper.ctx, row.UID, row.Data) - if err != nil { - return err - } - - dash.UID = row.UID - slug := cleanFileName(dash.Name) - folder := map[string]string{ - "title": dash.Name, - } - - folderStructure.body = append(folderStructure.body, commitBody{ - fpath: path.Join(rootDir, slug, "__folder.json"), - body: prettyJSON(folder), - }) - - alias[dash.UID] = slug - folders[row.Id] = slug - - if row.Created.Before(folderStructure.when) { - folderStructure.when = row.Created - } - } - - // Now process the dashboards in each folder - for _, row := range rows { - if row.IsFolder { - continue - } - fname := row.Slug + "-dashboard.json" - fpath, ok := folders[row.FolderID] - if ok { - fpath = path.Join(fpath, fname) - } else { - fpath = fname - } - - alias[row.UID] = fpath - ids[row.Id] = fpath - } - - return err - }) - if err != nil { - return err - } - - err = helper.add(folderStructure) - if err != nil { - return err - } - - err = helper.add(commitOptions{ - body: []commitBody{ - { - fpath: filepath.Join(helper.orgDir, "root-alias.json"), - body: prettyJSON(alias), - }, - { - fpath: filepath.Join(helper.orgDir, "root-ids.json"), - body: prettyJSON(ids), - }, - }, - when: folderStructure.when, - comment: "adding UID alias structure", - }) - if err != nil { - return err - } - - // Now walk the history - err = job.sql.WithDbSession(helper.ctx, func(sess *db.Session) error { - type dashVersionResult struct { - DashId int64 `xorm:"id"` - Version int64 `xorm:"version"` - Created time.Time `xorm:"created"` - CreatedBy int64 `xorm:"created_by"` - Message string `xorm:"message"` - Data []byte - } - - rows := make([]*dashVersionResult, 0, len(ids)) - - if job.cfg.KeepHistory { - sess.Table("dashboard_version"). - Join("INNER", "dashboard", "dashboard.id = dashboard_version.dashboard_id"). - Where("org_id = ?", helper.orgID). - Cols("dashboard.id", - "dashboard_version.version", - "dashboard_version.created", - "dashboard_version.created_by", - "dashboard_version.message", - "dashboard_version.data"). - Asc("dashboard_version.created") - } else { - sess.Table("dashboard"). - Where("org_id = ?", helper.orgID). - Cols("id", - "version", - "created", - "created_by", - "data"). - Asc("created") - } - - err := sess.Find(&rows) - if err != nil { - return err - } - - count := int64(0) - - // Process all folders (only one level deep!!!) - for _, row := range rows { - fpath, ok := ids[row.DashId] - if !ok { - continue - } - - msg := row.Message - if msg == "" { - msg = fmt.Sprintf("Version: %d", row.Version) - } - - err = helper.add(commitOptions{ - body: []commitBody{ - { - fpath: filepath.Join(rootDir, fpath), - body: cleanDashboardJSON(row.Data), - }, - }, - userID: row.CreatedBy, - when: row.Created, - comment: msg, - }) - if err != nil { - return err - } - count++ - fmt.Printf("COMMIT: %d // %s (%d)\n", count, fpath, row.Version) - } - - return nil - }) - - return err -} - -func cleanDashboardJSON(data []byte) []byte { - var dash map[string]interface{} - err := json.Unmarshal(data, &dash) - if err != nil { - return nil - } - delete(dash, "id") - delete(dash, "uid") - delete(dash, "version") - - clean, _ := json.MarshalIndent(dash, "", " ") - return clean -} - -// replace any unsafe file name characters... TODO, but be a standard way to do this cleanly!!! -func cleanFileName(name string) string { - name = strings.ReplaceAll(name, "/", "-") - name = strings.ReplaceAll(name, "\\", "-") - name = strings.ReplaceAll(name, ":", "-") - if err := filestorage.ValidatePath(filestorage.Delimiter + name); err != nil { - randomName, _ := uuid.NewRandom() - return randomName.String() - } - return name -} diff --git a/pkg/services/export/export_dash_thumbs.go b/pkg/services/export/export_dash_thumbs.go deleted file mode 100644 index fa90dd8bc07..00000000000 --- a/pkg/services/export/export_dash_thumbs.go +++ /dev/null @@ -1,80 +0,0 @@ -package export - -import ( - "encoding/json" - "fmt" - "os" - "path/filepath" - "strings" - "time" - - "github.com/grafana/grafana/pkg/infra/db" -) - -func exportDashboardThumbnails(helper *commitHelper, job *gitExportJob) error { - alias := make(map[string]string, 100) - aliasLookup, err := os.ReadFile(filepath.Join(helper.orgDir, "root-alias.json")) - if err != nil { - return fmt.Errorf("missing dashboard alias files (must export dashboards first)") - } - err = json.Unmarshal(aliasLookup, &alias) - if err != nil { - return err - } - - return job.sql.WithDbSession(helper.ctx, func(sess *db.Session) error { - type dashboardThumb struct { - UID string `xorm:"uid"` - Image []byte `xorm:"image"` - Theme string `xorm:"theme"` - Kind string `xorm:"kind"` - MimeType string `xorm:"mime_type"` - Updated time.Time - } - - rows := make([]*dashboardThumb, 0) - - // SELECT uid,image,theme,kind,mime_type,dashboard_thumbnail.updated - // FROM dashboard_thumbnail - // JOIN dashboard ON dashboard.id = dashboard_thumbnail.dashboard_id - // WHERE org_id = 2; //dashboard.uid = '2VVbg06nz'; - - sess.Table("dashboard_thumbnail"). - Join("INNER", "dashboard", "dashboard.id = dashboard_thumbnail.dashboard_id"). - Cols("uid", "image", "theme", "kind", "mime_type", "dashboard_thumbnail.updated"). - Where("dashboard.org_id = ?", helper.orgID) - - err := sess.Find(&rows) - if err != nil { - if isTableNotExistsError(err) { - return nil - } - return err - } - - // Process all folders - for _, row := range rows { - p, ok := alias[row.UID] - if !ok { - p = "uid/" + row.UID - } else { - p = strings.TrimSuffix(p, "-dash.json") - } - - err := helper.add(commitOptions{ - body: []commitBody{ - { - fpath: filepath.Join(helper.orgDir, "thumbs", fmt.Sprintf("%s.thumb-%s.png", p, row.Theme)), - body: row.Image, - }, - }, - when: row.Updated, - comment: "Thumbnail", - }) - if err != nil { - return err - } - } - return nil - }) -} diff --git a/pkg/services/export/export_ds.go b/pkg/services/export/export_ds.go deleted file mode 100644 index 5e99baed6b6..00000000000 --- a/pkg/services/export/export_ds.go +++ /dev/null @@ -1,47 +0,0 @@ -package export - -import ( - "fmt" - "path/filepath" - "sort" - - "github.com/grafana/grafana/pkg/services/datasources" -) - -func exportDataSources(helper *commitHelper, job *gitExportJob) error { - cmd := &datasources.GetDataSourcesQuery{ - OrgID: helper.orgID, - } - dataSources, err := job.datasourceService.GetDataSources(helper.ctx, cmd) - if err != nil { - return nil - } - - sort.SliceStable(dataSources, func(i, j int) bool { - return dataSources[i].Created.After(dataSources[j].Created) - }) - - for _, ds := range dataSources { - ds.OrgID = 0 - ds.Version = 0 - ds.SecureJsonData = map[string][]byte{ - "TODO": []byte("XXX"), - } - - err := helper.add(commitOptions{ - body: []commitBody{ - { - fpath: filepath.Join(helper.orgDir, "datasources", fmt.Sprintf("%s-ds.json", ds.UID)), - body: prettyJSON(ds), - }, - }, - when: ds.Created, - comment: fmt.Sprintf("Add datasource: %s", ds.Name), - }) - if err != nil { - return err - } - } - - return nil -} diff --git a/pkg/services/export/export_files.go b/pkg/services/export/export_files.go deleted file mode 100644 index 0530224626d..00000000000 --- a/pkg/services/export/export_files.go +++ /dev/null @@ -1,48 +0,0 @@ -package export - -import ( - "fmt" - "path" - - "github.com/grafana/grafana/pkg/infra/filestorage" - "github.com/grafana/grafana/pkg/infra/log" -) - -func exportFiles(helper *commitHelper, job *gitExportJob) error { - fs := filestorage.NewDbStorage(log.New("grafanaStorageLogger"), job.sql, nil, fmt.Sprintf("/%d/", helper.orgID)) - - paging := &filestorage.Paging{} - for { - rsp, err := fs.List(helper.ctx, "/resources", paging, &filestorage.ListOptions{ - WithFolders: false, // ???? - Recursive: true, - WithContents: true, - }) - if err != nil { - return err - } - - for _, f := range rsp.Files { - if f.Size < 1 { - continue - } - err = helper.add(commitOptions{ - body: []commitBody{{ - body: f.Contents, - fpath: path.Join(helper.orgDir, f.FullPath), - }}, - comment: fmt.Sprintf("Adding: %s", path.Base(f.FullPath)), - when: f.Created, - }) - if err != nil { - return err - } - } - - paging.After = rsp.LastPath - if !rsp.HasMore { - break - } - } - return nil -} diff --git a/pkg/services/export/export_kv_store.go b/pkg/services/export/export_kv_store.go deleted file mode 100644 index ec22d3fa02a..00000000000 --- a/pkg/services/export/export_kv_store.go +++ /dev/null @@ -1,46 +0,0 @@ -package export - -import ( - "fmt" - "path" - "time" - - "github.com/grafana/grafana/pkg/infra/db" -) - -func exportKVStore(helper *commitHelper, job *gitExportJob) error { - kvdir := path.Join(helper.orgDir, "system", "kv_store") - - return job.sql.WithDbSession(helper.ctx, func(sess *db.Session) error { - type kvResult struct { - Namespace string `xorm:"namespace"` - Key string `xorm:"key"` - Value string `xorm:"value"` - Updated time.Time `xorm:"updated"` - } - - rows := make([]*kvResult, 0) - - sess.Table("kv_store").Where("org_id = ? OR org_id = 0", helper.orgID) - - err := sess.Find(&rows) - if err != nil { - return err - } - - for _, row := range rows { - err = helper.add(commitOptions{ - body: []commitBody{{ - body: []byte(row.Value), - fpath: path.Join(kvdir, row.Namespace, row.Key), - }}, - comment: fmt.Sprintf("Exporting: %s/%s", row.Namespace, row.Key), - when: row.Updated, - }) - if err != nil { - return err - } - } - return err - }) -} diff --git a/pkg/services/export/export_plugins.go b/pkg/services/export/export_plugins.go deleted file mode 100644 index 37dde5ec378..00000000000 --- a/pkg/services/export/export_plugins.go +++ /dev/null @@ -1,52 +0,0 @@ -package export - -import ( - "encoding/json" - "fmt" - "path" - "time" - - "github.com/grafana/grafana/pkg/infra/db" -) - -func exportPlugins(helper *commitHelper, job *gitExportJob) error { - return job.sql.WithDbSession(helper.ctx, func(sess *db.Session) error { - type pResult struct { - PluginID string `xorm:"plugin_id" json:"-"` - Enabled string `xorm:"enabled" json:"enabled"` - Pinned string `xorm:"pinned" json:"pinned"` - JSONData json.RawMessage `xorm:"json_data" json:"json_data,omitempty"` - // TODO: secure!!!! - PluginVersion string `xorm:"plugin_version" json:"version"` - Created time.Time `xorm:"created" json:"created"` - Updated time.Time `xorm:"updated" json:"updated"` - } - - rows := make([]*pResult, 0) - - sess.Table("plugin_setting").Where("org_id = ?", helper.orgID) - - err := sess.Find(&rows) - if err != nil { - if isTableNotExistsError(err) { - return nil - } - return err - } - - for _, row := range rows { - err = helper.add(commitOptions{ - body: []commitBody{{ - body: prettyJSON(row), - fpath: path.Join(helper.orgDir, "plugins", row.PluginID, "settings.json"), - }}, - comment: fmt.Sprintf("Plugin: %s", row.PluginID), - when: row.Updated, - }) - if err != nil { - return err - } - } - return err - }) -} diff --git a/pkg/services/export/export_snapshots.go b/pkg/services/export/export_snapshots.go deleted file mode 100644 index 715e93d087d..00000000000 --- a/pkg/services/export/export_snapshots.go +++ /dev/null @@ -1,43 +0,0 @@ -package export - -import ( - "fmt" - "path/filepath" - "time" - - "github.com/grafana/grafana/pkg/services/dashboardsnapshots" -) - -func exportSnapshots(helper *commitHelper, job *gitExportJob) error { - cmd := &dashboardsnapshots.GetDashboardSnapshotsQuery{ - OrgID: helper.orgID, - Limit: 500000, - SignedInUser: nil, - } - if cmd.SignedInUser == nil { - return fmt.Errorf("snapshots requires an admin user") - } - - result, err := job.dashboardsnapshotsService.SearchDashboardSnapshots(helper.ctx, cmd) - if err != nil { - return err - } - - if len(result) < 1 { - return nil // nothing - } - - gitcmd := commitOptions{ - when: time.Now(), - comment: "Export snapshots", - } - - for _, snapshot := range result { - gitcmd.body = append(gitcmd.body, commitBody{ - fpath: filepath.Join(helper.orgDir, "snapshot", fmt.Sprintf("%d-snapshot.json", snapshot.ID)), - body: prettyJSON(snapshot), - }) - } - - return helper.add(gitcmd) -} diff --git a/pkg/services/export/export_sys_playlists.go b/pkg/services/export/export_sys_playlists.go deleted file mode 100644 index c9007c8e4d9..00000000000 --- a/pkg/services/export/export_sys_playlists.go +++ /dev/null @@ -1,51 +0,0 @@ -package export - -import ( - "fmt" - "path/filepath" - "time" - - "github.com/grafana/grafana/pkg/services/playlist" - "github.com/grafana/grafana/pkg/services/store/entity" -) - -func exportSystemPlaylists(helper *commitHelper, job *gitExportJob) error { - cmd := &playlist.GetPlaylistsQuery{ - OrgId: helper.orgID, - Limit: 500000, - } - res, err := job.playlistService.Search(helper.ctx, cmd) - if err != nil { - return err - } - - if len(res) < 1 { - return nil // nothing - } - - gitcmd := commitOptions{ - when: time.Now(), - comment: "Export playlists", - } - - for _, item := range res { - playlist, err := job.playlistService.Get(helper.ctx, &playlist.GetPlaylistByUidQuery{ - UID: item.UID, - OrgId: helper.orgID, - }) - if err != nil { - return err - } - - gitcmd.body = append(gitcmd.body, commitBody{ - fpath: filepath.Join( - helper.orgDir, - "entity", - entity.StandardKindPlaylist, - fmt.Sprintf("%s.json", playlist.Uid)), - body: prettyJSON(playlist), - }) - } - - return helper.add(gitcmd) -} diff --git a/pkg/services/export/export_sys_preferences.go b/pkg/services/export/export_sys_preferences.go deleted file mode 100644 index 57594998497..00000000000 --- a/pkg/services/export/export_sys_preferences.go +++ /dev/null @@ -1,136 +0,0 @@ -package export - -import ( - "fmt" - "path" - "path/filepath" - "time" - - "github.com/grafana/grafana/pkg/infra/db" -) - -func exportSystemPreferences(helper *commitHelper, job *gitExportJob) error { - type preferences struct { - UserID int64 `json:"-" xorm:"user_id"` - TeamID int64 `json:"-" xorm:"team_id"` - HomeDashboardID int64 `json:"-" xorm:"home_dashboard_id"` - Updated time.Time `json:"-" xorm:"updated"` - JSONData map[string]interface{} `json:"-" xorm:"json_data"` - - Theme string `json:"theme"` - Locale string `json:"locale"` - Timezone string `json:"timezone"` - WeekStart string `json:"week_start,omitempty"` - HomeDashboard string `json:"home,omitempty" xorm:"uid"` // dashboard - NavBar interface{} `json:"navbar,omitempty"` - QueryHistory interface{} `json:"queryHistory,omitempty"` - } - - prefsDir := path.Join(helper.orgDir, "system", "preferences") - users := make(map[int64]*userInfo, len(helper.users)) - for _, user := range helper.users { - users[user.ID] = user - } - - return job.sql.WithDbSession(helper.ctx, func(sess *db.Session) error { - rows := make([]*preferences, 0) - - sess.Table("preferences"). - Join("LEFT", "dashboard", "dashboard.id = preferences.home_dashboard_id"). - Cols("preferences.*", "dashboard.uid"). - Where("preferences.org_id = ?", helper.orgID) - - err := sess.Find(&rows) - if err != nil { - return err - } - - var comment string - var fpath string - for _, row := range rows { - if row.TeamID > 0 { - fpath = filepath.Join(prefsDir, "team", fmt.Sprintf("%d.json", row.TeamID)) - comment = fmt.Sprintf("Team preferences: %d", row.TeamID) - } else if row.UserID == 0 { - fpath = filepath.Join(prefsDir, "default.json") - comment = "Default preferences" - } else { - user, ok := users[row.UserID] - if ok { - delete(users, row.UserID) - if user.IsServiceAccount { - continue // don't write preferences for service account - } - } else { - user = &userInfo{ - Login: fmt.Sprintf("__%d__", row.UserID), - } - } - - fpath = filepath.Join(prefsDir, "user", fmt.Sprintf("%s.json", user.Login)) - comment = fmt.Sprintf("User preferences: %s", user.getAuthor().Name) - } - - if row.JSONData != nil { - v, ok := row.JSONData["locale"] - if ok && row.Locale == "" { - s, ok := v.(string) - if ok { - row.Locale = s - } - } - - v, ok = row.JSONData["navbar"] - if ok && row.NavBar == nil { - row.NavBar = v - } - - v, ok = row.JSONData["queryHistory"] - if ok && row.QueryHistory == nil { - row.QueryHistory = v - } - } - - err := helper.add(commitOptions{ - body: []commitBody{ - { - fpath: fpath, - body: prettyJSON(row), - }, - }, - when: row.Updated, - comment: comment, - userID: row.UserID, - }) - if err != nil { - return err - } - } - - // add a file for all useres that may not be in the system - for _, user := range users { - if user.IsServiceAccount { - continue - } - - row := preferences{ - Theme: user.Theme, // never set? - } - err := helper.add(commitOptions{ - body: []commitBody{ - { - fpath: filepath.Join(prefsDir, "user", fmt.Sprintf("%s.json", user.Login)), - body: prettyJSON(row), - }, - }, - when: user.Updated, - comment: "user preferences", - userID: row.UserID, - }) - if err != nil { - return err - } - } - return err - }) -} diff --git a/pkg/services/export/export_sys_short_url.go b/pkg/services/export/export_sys_short_url.go deleted file mode 100644 index 4d83d679c44..00000000000 --- a/pkg/services/export/export_sys_short_url.go +++ /dev/null @@ -1,72 +0,0 @@ -package export - -import ( - "fmt" - "path/filepath" - "time" - - "github.com/grafana/grafana/pkg/infra/db" -) - -func exportSystemShortURL(helper *commitHelper, job *gitExportJob) error { - mostRecent := int64(0) - lastSeen := make(map[string]int64, 50) - dir := filepath.Join(helper.orgDir, "system", "short_url") - - err := job.sql.WithDbSession(helper.ctx, func(sess *db.Session) error { - type urlResult struct { - UID string `xorm:"uid" json:"-"` - Path string `xorm:"path" json:"path"` - CreatedBy int64 `xorm:"created_by" json:"-"` - CreatedAt time.Time `xorm:"created_at" json:"-"` - LastSeenAt int64 `xorm:"last_seen_at" json:"-"` - } - - rows := make([]*urlResult, 0) - - sess.Table("short_url").Where("org_id = ?", helper.orgID) - - err := sess.Find(&rows) - if err != nil { - return err - } - - for _, row := range rows { - if row.LastSeenAt > 0 { - lastSeen[row.UID] = row.LastSeenAt - if mostRecent < row.LastSeenAt { - mostRecent = row.LastSeenAt - } - } - err := helper.add(commitOptions{ - body: []commitBody{ - { - fpath: filepath.Join(dir, "uid", fmt.Sprintf("%s.json", row.UID)), - body: prettyJSON(row), - }, - }, - when: row.CreatedAt, - comment: "short URL", - userID: row.CreatedBy, - }) - if err != nil { - return err - } - } - return err - }) - if err != nil || len(lastSeen) < 1 { - return err - } - - return helper.add(commitOptions{ - body: []commitBody{ - { - fpath: filepath.Join(dir, "last_seen_at.json"), - body: prettyJSON(lastSeen), - }, - }, - when: time.UnixMilli(mostRecent), - comment: "short URL", - }) -} diff --git a/pkg/services/export/export_sys_stars.go b/pkg/services/export/export_sys_stars.go deleted file mode 100644 index f0c8792a166..00000000000 --- a/pkg/services/export/export_sys_stars.go +++ /dev/null @@ -1,65 +0,0 @@ -package export - -import ( - "fmt" - "path/filepath" - - "github.com/grafana/grafana/pkg/infra/db" -) - -func exportSystemStars(helper *commitHelper, job *gitExportJob) error { - byUser := make(map[int64][]string, 50) - - err := job.sql.WithDbSession(helper.ctx, func(sess *db.Session) error { - type starResult struct { - User int64 `xorm:"user_id"` - UID string `xorm:"uid"` - } - - rows := make([]*starResult, 0) - - sess.Table("star"). - Join("INNER", "dashboard", "dashboard.id = star.dashboard_id"). - Cols("star.user_id", "dashboard.uid"). - Where("dashboard.org_id = ?", helper.orgID) - - err := sess.Find(&rows) - if err != nil { - return err - } - - for _, row := range rows { - stars := append(byUser[row.User], fmt.Sprintf("dashboard/%s", row.UID)) - byUser[row.User] = stars - } - return err - }) - if err != nil { - return err - } - - for userID, stars := range byUser { - user, ok := helper.users[userID] - if !ok { - user = &userInfo{ - Login: fmt.Sprintf("__unknown_%d", userID), - } - } - - err := helper.add(commitOptions{ - body: []commitBody{ - { - fpath: filepath.Join(helper.orgDir, "system", "stars", fmt.Sprintf("%s.json", user.Login)), - body: prettyJSON(stars), - }, - }, - when: user.Updated, - comment: "user preferences", - userID: userID, - }) - if err != nil { - return err - } - } - return nil -} diff --git a/pkg/services/export/export_usage.go b/pkg/services/export/export_usage.go deleted file mode 100644 index 85c16e98552..00000000000 --- a/pkg/services/export/export_usage.go +++ /dev/null @@ -1,85 +0,0 @@ -package export - -import ( - "path" - "strconv" - - "github.com/grafana/grafana-plugin-sdk-go/data/sqlutil" - - "github.com/grafana/grafana/pkg/infra/db" -) - -func exportUsage(helper *commitHelper, job *gitExportJob) error { - return job.sql.WithDbSession(helper.ctx, func(sess *db.Session) error { - commit := commitOptions{ - comment: "usage stats", - } - - type statsTables struct { - table string - sql string - converters []sqlutil.Converter - } - - dump := []statsTables{ - { - table: "data_source_usage_by_day", - sql: `SELECT day,uid,queries,errors,load_duration_ms - FROM data_source_usage_by_day - JOIN data_source ON data_source.id = data_source_usage_by_day.data_source_id - WHERE org_id =` + strconv.FormatInt(helper.orgID, 10), - converters: []sqlutil.Converter{{Dynamic: true}}, - }, - { - table: "dashboard_usage_by_day", - sql: `SELECT uid,day,views,queries,errors,load_duration - FROM dashboard_usage_by_day - JOIN dashboard ON dashboard_usage_by_day.dashboard_id = dashboard.id - WHERE org_id =` + strconv.FormatInt(helper.orgID, 10), - converters: []sqlutil.Converter{{Dynamic: true}}, - }, - { - table: "dashboard_usage_sums", - sql: `SELECT uid, - views_last_1_days, - views_last_7_days, - views_last_30_days, - views_total, - queries_last_1_days, - queries_last_7_days, - queries_last_30_days, - queries_total, - errors_last_1_days, - errors_last_7_days, - errors_last_30_days, - errors_total - FROM dashboard_usage_sums - JOIN dashboard ON dashboard_usage_sums.dashboard_id = dashboard.id - WHERE org_id =` + strconv.FormatInt(helper.orgID, 10), - converters: []sqlutil.Converter{{Dynamic: true}}, - }, - } - - for _, usage := range dump { - rows, err := sess.DB().QueryContext(helper.ctx, usage.sql) - if err != nil { - if isTableNotExistsError(err) { - continue - } - return err - } - - frame, err := sqlutil.FrameFromRows(rows.Rows, -1, usage.converters...) - if err != nil { - return err - } - frame.Name = usage.table - commit.body = append(commit.body, commitBody{ - fpath: path.Join(helper.orgDir, "usage", usage.table+".json"), - frame: frame, - }) - } - - return helper.add(commit) - }) -} diff --git a/pkg/services/export/git_export_job.go b/pkg/services/export/git_export_job.go deleted file mode 100644 index ab8822ae91c..00000000000 --- a/pkg/services/export/git_export_job.go +++ /dev/null @@ -1,233 +0,0 @@ -package export - -import ( - "context" - "encoding/json" - "fmt" - "path" - "runtime/debug" - "sync" - "time" - - "github.com/go-git/go-git/v5" - "github.com/go-git/go-git/v5/plumbing" - - "github.com/grafana/grafana/pkg/infra/db" - "github.com/grafana/grafana/pkg/infra/log" - "github.com/grafana/grafana/pkg/services/dashboardsnapshots" - "github.com/grafana/grafana/pkg/services/datasources" - "github.com/grafana/grafana/pkg/services/org" - "github.com/grafana/grafana/pkg/services/playlist" -) - -var _ Job = new(gitExportJob) - -type gitExportJob struct { - logger log.Logger - sql db.DB - dashboardsnapshotsService dashboardsnapshots.Service - datasourceService datasources.DataSourceService - playlistService playlist.Service - orgService org.Service - rootDir string - - statusMu sync.Mutex - status ExportStatus - cfg ExportConfig - broadcaster statusBroadcaster - helper *commitHelper -} - -func startGitExportJob(ctx context.Context, cfg ExportConfig, sql db.DB, - dashboardsnapshotsService dashboardsnapshots.Service, rootDir string, orgID int64, - broadcaster statusBroadcaster, playlistService playlist.Service, orgService org.Service, - datasourceService datasources.DataSourceService) (Job, error) { - job := &gitExportJob{ - logger: log.New("git_export_job"), - cfg: cfg, - sql: sql, - dashboardsnapshotsService: dashboardsnapshotsService, - playlistService: playlistService, - orgService: orgService, - datasourceService: datasourceService, - rootDir: rootDir, - broadcaster: broadcaster, - status: ExportStatus{ - Running: true, - Target: "git export", - Started: time.Now().UnixMilli(), - Count: make(map[string]int, len(exporters)*2), - }, - } - - broadcaster(job.status) - go job.start(ctx) - return job, nil -} - -func (e *gitExportJob) getStatus() ExportStatus { - e.statusMu.Lock() - defer e.statusMu.Unlock() - - return e.status -} - -func (e *gitExportJob) getConfig() ExportConfig { - e.statusMu.Lock() - defer e.statusMu.Unlock() - - return e.cfg -} - -func (e *gitExportJob) requestStop() { - e.helper.stopRequested = true // will error on the next write -} - -// Utility function to export dashboards -func (e *gitExportJob) start(ctx context.Context) { - defer func() { - e.logger.Info("Finished git export job") - e.statusMu.Lock() - defer e.statusMu.Unlock() - s := e.status - if err := recover(); err != nil { - e.logger.Error("export panic", "error", err) - e.logger.Error("trace", "error", string(debug.Stack())) - s.Status = fmt.Sprintf("ERROR: %v", err) - } - // Make sure it finishes OK - if s.Finished < 10 { - s.Finished = time.Now().UnixMilli() - } - s.Running = false - if s.Status == "" { - s.Status = "done" - } - s.Target = e.rootDir - e.status = s - e.broadcaster(s) - }() - - err := e.doExportWithHistory(ctx) - if err != nil { - e.logger.Error("ERROR", "e", err) - e.status.Status = "ERROR" - e.status.Last = err.Error() - e.broadcaster(e.status) - } -} - -func (e *gitExportJob) doExportWithHistory(ctx context.Context) error { - r, err := git.PlainInit(e.rootDir, false) - if err != nil { - return err - } - // default to "main" branch - h := plumbing.NewSymbolicReference(plumbing.HEAD, plumbing.ReferenceName("refs/heads/main")) - err = r.Storer.SetReference(h) - if err != nil { - return err - } - - w, err := r.Worktree() - if err != nil { - return err - } - e.helper = &commitHelper{ - repo: r, - work: w, - ctx: ctx, - workDir: e.rootDir, - orgDir: e.rootDir, - broadcast: func(p string) { - e.status.Index++ - e.status.Last = p[len(e.rootDir):] - e.status.Changed = time.Now().UnixMilli() - e.broadcaster(e.status) - }, - } - - cmd := &org.SearchOrgsQuery{} - result, err := e.orgService.Search(e.helper.ctx, cmd) - if err != nil { - return err - } - - // Export each org - for _, org := range result { - if len(result) > 1 { - e.helper.orgDir = path.Join(e.rootDir, fmt.Sprintf("org_%d", org.ID)) - e.status.Count["orgs"] += 1 - } - err = e.helper.initOrg(ctx, e.sql, org.ID) - if err != nil { - return err - } - - err := e.process(exporters) - if err != nil { - return err - } - } - - // cleanup the folder - e.status.Target = "pruning..." - e.broadcaster(e.status) - err = r.Prune(git.PruneOptions{}) - - // TODO - // git gc --prune=now --aggressive - - return err -} - -func (e *gitExportJob) process(exporters []Exporter) error { - if false { // NEEDS a real user ID first - err := exportSnapshots(e.helper, e) - if err != nil { - return err - } - } - - for _, exp := range exporters { - if e.cfg.Exclude[exp.Key] { - continue - } - - e.status.Target = exp.Key - e.helper.exporter = exp.Key - - before := e.helper.counter - if exp.process != nil { - err := exp.process(e.helper, e) - - if err != nil { - return err - } - } - - if exp.Exporters != nil { - err := e.process(exp.Exporters) - if err != nil { - return err - } - } - - // Aggregate the counts for each org in the same report - e.status.Count[exp.Key] += (e.helper.counter - before) - } - return nil -} - -func prettyJSON(v interface{}) []byte { - b, _ := json.MarshalIndent(v, "", " ") - return b -} - -/** - -git remote add origin git@github.com:ryantxu/test-dash-repo.git -git branch -M main -git push -u origin main - -**/ diff --git a/pkg/services/export/service.go b/pkg/services/export/service.go deleted file mode 100644 index 011d68eb9cd..00000000000 --- a/pkg/services/export/service.go +++ /dev/null @@ -1,267 +0,0 @@ -package export - -import ( - "context" - "encoding/json" - "fmt" - "net/http" - "os" - "path/filepath" - "sync" - "time" - - "github.com/grafana/grafana/pkg/api/response" - "github.com/grafana/grafana/pkg/infra/appcontext" - "github.com/grafana/grafana/pkg/infra/db" - "github.com/grafana/grafana/pkg/infra/log" - contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model" - "github.com/grafana/grafana/pkg/services/dashboardsnapshots" - "github.com/grafana/grafana/pkg/services/datasources" - "github.com/grafana/grafana/pkg/services/featuremgmt" - "github.com/grafana/grafana/pkg/services/live" - "github.com/grafana/grafana/pkg/services/org" - "github.com/grafana/grafana/pkg/services/playlist" - "github.com/grafana/grafana/pkg/services/store/entity" - "github.com/grafana/grafana/pkg/setting" -) - -type ExportService interface { - // List folder contents - HandleGetStatus(c *contextmodel.ReqContext) response.Response - - // List Get Options - HandleGetOptions(c *contextmodel.ReqContext) response.Response - - // Read raw file contents out of the store - HandleRequestExport(c *contextmodel.ReqContext) response.Response - - // Cancel any running export - HandleRequestStop(c *contextmodel.ReqContext) response.Response -} - -var exporters = []Exporter{ - { - Key: "auth", - Name: "Authentication", - Description: "Saves raw SQL tables", - process: dumpAuthTables, - }, - { - Key: "dash", - Name: "Dashboards", - Description: "Save dashboard JSON", - process: exportDashboards, - Exporters: []Exporter{ - { - Key: "dash_thumbs", - Name: "Dashboard thumbnails", - Description: "Save current dashboard preview images", - process: exportDashboardThumbnails, - }, - }, - }, - { - Key: "alerts", - Name: "Alerts", - Description: "Archive alert rules and configuration", - process: exportAlerts, - }, - { - Key: "ds", - Name: "Data sources", - Description: "Data source configurations", - process: exportDataSources, - }, - { - Key: "system", - Name: "System", - Description: "Save service settings", - Exporters: []Exporter{ - { - Key: "system_preferences", - Name: "Preferences", - Description: "User and team preferences", - process: exportSystemPreferences, - }, - { - Key: "system_stars", - Name: "Stars", - Description: "User stars", - process: exportSystemStars, - }, - { - Key: "system_playlists", - Name: "Playlists", - Description: "Playlists", - process: exportSystemPlaylists, - }, - { - Key: "system_kv_store", - Name: "Key Value store", - Description: "Internal KV store", - process: exportKVStore, - }, - { - Key: "system_short_url", - Name: "Short URLs", - Description: "saved links", - process: exportSystemShortURL, - }, - }, - }, - { - Key: "files", - Name: "Files", - Description: "Export internal file system", - process: exportFiles, - }, - { - Key: "anno", - Name: "Annotations", - Description: "Write an DataFrame for all annotations on a dashboard", - process: exportAnnotations, - }, - { - Key: "plugins", - Name: "Plugins", - Description: "Save settings for all configured plugins", - process: exportPlugins, - }, - { - Key: "usage", - Name: "Usage", - Description: "archive current usage stats", - process: exportUsage, - }, - // { - // Key: "snapshots", - // Name: "Snapshots", - // Description: "write snapshots", - // process: exportSnapshots, - // }, -} - -type StandardExport struct { - logger log.Logger - glive *live.GrafanaLive - mutex sync.Mutex - dataDir string - - // Services - db db.DB - dashboardsnapshotsService dashboardsnapshots.Service - playlistService playlist.Service - orgService org.Service - datasourceService datasources.DataSourceService - store entity.EntityStoreServer - - // updated with mutex - exportJob Job -} - -func ProvideService(db db.DB, features featuremgmt.FeatureToggles, gl *live.GrafanaLive, cfg *setting.Cfg, - dashboardsnapshotsService dashboardsnapshots.Service, playlistService playlist.Service, orgService org.Service, - datasourceService datasources.DataSourceService, store entity.EntityStoreServer) ExportService { - if !features.IsEnabled(featuremgmt.FlagExport) { - return &StubExport{} - } - - return &StandardExport{ - glive: gl, - logger: log.New("export_service"), - dashboardsnapshotsService: dashboardsnapshotsService, - playlistService: playlistService, - orgService: orgService, - datasourceService: datasourceService, - exportJob: &stoppedJob{}, - dataDir: cfg.DataPath, - store: store, - db: db, - } -} - -func (ex *StandardExport) HandleGetOptions(c *contextmodel.ReqContext) response.Response { - info := map[string]interface{}{ - "exporters": exporters, - } - return response.JSON(http.StatusOK, info) -} - -func (ex *StandardExport) HandleGetStatus(c *contextmodel.ReqContext) response.Response { - ex.mutex.Lock() - defer ex.mutex.Unlock() - - return response.JSON(http.StatusOK, ex.exportJob.getStatus()) -} - -func (ex *StandardExport) HandleRequestStop(c *contextmodel.ReqContext) response.Response { - ex.mutex.Lock() - defer ex.mutex.Unlock() - - ex.exportJob.requestStop() - - return response.JSON(http.StatusOK, ex.exportJob.getStatus()) -} - -func (ex *StandardExport) HandleRequestExport(c *contextmodel.ReqContext) response.Response { - var cfg ExportConfig - err := json.NewDecoder(c.Req.Body).Decode(&cfg) - if err != nil { - return response.Error(http.StatusBadRequest, "unable to read config", err) - } - - ex.mutex.Lock() - defer ex.mutex.Unlock() - - status := ex.exportJob.getStatus() - if status.Running { - ex.logger.Error("export already running") - return response.Error(http.StatusLocked, "export already running", nil) - } - - ctx := appcontext.WithUser(context.Background(), c.SignedInUser) - var job Job - broadcast := func(s ExportStatus) { - ex.broadcastStatus(c.OrgID, s) - } - switch cfg.Format { - case "dummy": - job, err = startDummyExportJob(cfg, broadcast) - case "entityStore": - job, err = startEntityStoreJob(ctx, cfg, broadcast, ex.db, ex.playlistService, ex.store, ex.dashboardsnapshotsService) - case "git": - dir := filepath.Join(ex.dataDir, "export_git", fmt.Sprintf("git_%d", time.Now().Unix())) - if err := os.MkdirAll(dir, os.ModePerm); err != nil { - return response.Error(http.StatusBadRequest, "Error creating export folder", nil) - } - job, err = startGitExportJob(ctx, cfg, ex.db, ex.dashboardsnapshotsService, dir, c.OrgID, broadcast, ex.playlistService, ex.orgService, ex.datasourceService) - default: - return response.Error(http.StatusBadRequest, "Unsupported job format", nil) - } - - if err != nil { - ex.logger.Error("failed to start export job", "err", err) - return response.Error(http.StatusBadRequest, "failed to start export job", err) - } - - ex.exportJob = job - - info := map[string]interface{}{ - "cfg": cfg, // parsed job we are running - "status": ex.exportJob.getStatus(), - } - return response.JSON(http.StatusOK, info) -} - -func (ex *StandardExport) broadcastStatus(orgID int64, s ExportStatus) { - msg, err := json.Marshal(s) - if err != nil { - ex.logger.Warn("Error making message", "err", err) - return - } - err = ex.glive.Publish(orgID, "grafana/broadcast/export", msg) - if err != nil { - ex.logger.Warn("Error Publish message", "err", err) - return - } -} diff --git a/pkg/services/export/stopped_job.go b/pkg/services/export/stopped_job.go deleted file mode 100644 index 2bd248d6a65..00000000000 --- a/pkg/services/export/stopped_job.go +++ /dev/null @@ -1,21 +0,0 @@ -package export - -import "time" - -var _ Job = new(stoppedJob) - -type stoppedJob struct { -} - -func (e *stoppedJob) getStatus() ExportStatus { - return ExportStatus{ - Running: false, - Changed: time.Now().UnixMilli(), - } -} - -func (e *stoppedJob) getConfig() ExportConfig { - return ExportConfig{} -} - -func (e *stoppedJob) requestStop() {} diff --git a/pkg/services/export/stub.go b/pkg/services/export/stub.go deleted file mode 100644 index fe8d9768d6b..00000000000 --- a/pkg/services/export/stub.go +++ /dev/null @@ -1,28 +0,0 @@ -package export - -import ( - "net/http" - - "github.com/grafana/grafana/pkg/api/response" - contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model" -) - -var _ ExportService = new(StubExport) - -type StubExport struct{} - -func (ex *StubExport) HandleGetStatus(c *contextmodel.ReqContext) response.Response { - return response.Error(http.StatusForbidden, "feature not enabled", nil) -} - -func (ex *StubExport) HandleGetOptions(c *contextmodel.ReqContext) response.Response { - return response.Error(http.StatusForbidden, "feature not enabled", nil) -} - -func (ex *StubExport) HandleRequestExport(c *contextmodel.ReqContext) response.Response { - return response.Error(http.StatusForbidden, "feature not enabled", nil) -} - -func (ex *StubExport) HandleRequestStop(c *contextmodel.ReqContext) response.Response { - return response.Error(http.StatusForbidden, "feature not enabled", nil) -} diff --git a/pkg/services/export/types.go b/pkg/services/export/types.go deleted file mode 100644 index a64cabecfef..00000000000 --- a/pkg/services/export/types.go +++ /dev/null @@ -1,46 +0,0 @@ -package export - -// Export status. Only one running at a time -type ExportStatus struct { - Running bool `json:"running"` - Target string `json:"target"` // description of where it is going (no secrets) - Started int64 `json:"started,omitempty"` - Finished int64 `json:"finished,omitempty"` - Changed int64 `json:"update,omitempty"` - Last string `json:"last,omitempty"` - Status string `json:"status"` // ERROR, SUCCESS, ETC - Index int `json:"index,omitempty"` - Count map[string]int `json:"count,omitempty"` -} - -// Basic export config (for now) -type ExportConfig struct { - Format string `json:"format"` - GeneralFolderPath string `json:"generalFolderPath"` - KeepHistory bool `json:"history"` - - Exclude map[string]bool `json:"exclude"` - - // Depends on the format - Git GitExportConfig `json:"git"` -} - -type GitExportConfig struct{} - -type Job interface { - getStatus() ExportStatus - getConfig() ExportConfig - requestStop() -} - -// Will broadcast the live status -type statusBroadcaster func(s ExportStatus) - -type Exporter struct { - Key string `json:"key"` - Name string `json:"name"` - Description string `json:"description"` - Exporters []Exporter `json:"exporters,omitempty"` - - process func(helper *commitHelper, job *gitExportJob) error -} diff --git a/pkg/services/export/utils.go b/pkg/services/export/utils.go deleted file mode 100644 index 567264c9c39..00000000000 --- a/pkg/services/export/utils.go +++ /dev/null @@ -1,29 +0,0 @@ -package export - -import ( - "strings" - - "github.com/grafana/grafana/pkg/infra/db" -) - -func isTableNotExistsError(err error) bool { - txt := err.Error() - return strings.HasPrefix(txt, "no such table") || // SQLite - strings.HasSuffix(txt, " does not exist") || // PostgreSQL - strings.HasSuffix(txt, " doesn't exist") // MySQL -} - -func removeQuotesFromQuery(query string, remove bool) string { - if remove { - return strings.ReplaceAll(query, `"`, "") - } - return query -} - -func isMySQLEngine(sql db.DB) bool { - return sql.GetDBType() == "mysql" -} - -func isPostgreSQL(sql db.DB) bool { - return sql.GetDBType() == "postgres" -} diff --git a/pkg/services/featuremgmt/registry.go b/pkg/services/featuremgmt/registry.go index 70b56370717..b8f15a158ad 100644 --- a/pkg/services/featuremgmt/registry.go +++ b/pkg/services/featuremgmt/registry.go @@ -120,12 +120,6 @@ var ( State: FeatureStateAlpha, RequiresDevMode: true, // Also a gate on automatic git storage (for now) }, - { - Name: "export", - Description: "Export grafana instance (to git, etc)", - State: FeatureStateAlpha, - RequiresDevMode: true, - }, { Name: "exploreMixedDatasource", Description: "Enable mixed datasource in Explore", diff --git a/pkg/services/featuremgmt/toggles_gen.go b/pkg/services/featuremgmt/toggles_gen.go index 4781421e7eb..63e177e54a9 100644 --- a/pkg/services/featuremgmt/toggles_gen.go +++ b/pkg/services/featuremgmt/toggles_gen.go @@ -91,10 +91,6 @@ const ( // Load dashboards from the generic storage interface FlagDashboardsFromStorage = "dashboardsFromStorage" - // FlagExport - // Export grafana instance (to git, etc) - FlagExport = "export" - // FlagExploreMixedDatasource // Enable mixed datasource in Explore FlagExploreMixedDatasource = "exploreMixedDatasource" diff --git a/pkg/services/navtree/navtreeimpl/admin.go b/pkg/services/navtree/navtreeimpl/admin.go index 6f6e9164a15..531c63233b1 100644 --- a/pkg/services/navtree/navtreeimpl/admin.go +++ b/pkg/services/navtree/navtreeimpl/admin.go @@ -161,16 +161,6 @@ func (s *ServiceImpl) getServerAdminNode(c *contextmodel.ReqContext) *navtree.Na Url: s.cfg.AppSubURL + "/admin/storage", } adminNavLinks = append(adminNavLinks, storage) - - if s.features.IsEnabled(featuremgmt.FlagExport) { - storage.Children = append(storage.Children, &navtree.NavLink{ - Text: "Export", - Id: "export", - SubTitle: "Export grafana settings", - Icon: "cube", - Url: s.cfg.AppSubURL + "/admin/storage/export", - }) - } } if s.cfg.LDAPEnabled && hasAccess(ac.ReqGrafanaAdmin, ac.EvalPermission(ac.ActionLDAPStatusRead)) { diff --git a/public/app/features/storage/ExportPage.tsx b/public/app/features/storage/ExportPage.tsx deleted file mode 100644 index 96e5e39caeb..00000000000 --- a/public/app/features/storage/ExportPage.tsx +++ /dev/null @@ -1,278 +0,0 @@ -import React, { useEffect, useState, useCallback } from 'react'; -import { useAsync, useLocalStorage } from 'react-use'; - -import { isLiveChannelMessageEvent, isLiveChannelStatusEvent, LiveChannelScope, SelectableValue } from '@grafana/data'; -import { getBackendSrv, getGrafanaLiveSrv, config } from '@grafana/runtime'; -import { - Button, - CodeEditor, - Collapse, - Field, - HorizontalGroup, - InlineField, - InlineFieldRow, - InlineSwitch, - Input, - LinkButton, - Select, - Switch, - Alert, -} from '@grafana/ui'; -import { Page } from 'app/core/components/Page/Page'; -import { useNavModel } from 'app/core/hooks/useNavModel'; -import { GrafanaRouteComponentProps } from 'app/core/navigation/types'; - -export const EXPORT_LOCAL_STORAGE_KEY = 'grafana.export.config'; - -interface ExportStatusMessage { - running: boolean; - target: string; - started: number; - finished: number; - update: number; - count: number; - current: number; - last: string; - status: string; -} - -interface ExportJob { - format: string; // 'git'; - generalFolderPath: string; - history: boolean; - exclude: Record; - - git?: {}; -} - -const defaultJob: ExportJob = { - format: 'git', - generalFolderPath: 'general', - history: true, - exclude: {}, - git: {}, -}; - -interface ExporterInfo { - key: string; - name: string; - description: string; - children?: ExporterInfo[]; -} - -enum StorageFormat { - Git = 'git', - EntityStore = 'entityStore', -} - -const formats: Array> = [ - { label: 'GIT', value: StorageFormat.Git, description: 'Exports a fresh git repository' }, - { label: 'Entity store', value: StorageFormat.EntityStore, description: 'Export to the SQL based entity store' }, -]; - -interface Props extends GrafanaRouteComponentProps {} - -const labelWith = 18; - -export default function ExportPage(props: Props) { - const navModel = useNavModel('export'); - const [status, setStatus] = useState(); - const [body, setBody] = useLocalStorage(EXPORT_LOCAL_STORAGE_KEY, defaultJob); - const [details, setDetails] = useState(false); - - const serverOptions = useAsync(() => { - return getBackendSrv().get<{ exporters: ExporterInfo[] }>('/api/admin/export/options'); - }, []); - - const doStart = () => { - getBackendSrv() - .post('/api/admin/export', body) - .then((v) => { - if (v.cfg && v.status.running) { - setBody(v.cfg); // saves the valid parsed body - } - }); - }; - - const doStop = () => { - getBackendSrv().post('/api/admin/export/stop'); - }; - - const setInclude = useCallback( - (k: string, v: boolean) => { - if (!serverOptions.value || !body) { - return; - } - const exclude: Record = {}; - if (k === '*') { - if (!v) { - for (let exp of serverOptions.value.exporters) { - exclude[exp.key] = true; - } - } - setBody({ ...body, exclude }); - return; - } - - for (let exp of serverOptions.value.exporters) { - let val = body.exclude?.[exp.key]; - if (k === exp.key) { - val = !v; - } - if (val) { - exclude[exp.key] = val; - } - } - setBody({ ...body, exclude }); - }, - [body, setBody, serverOptions] - ); - - useEffect(() => { - const subscription = getGrafanaLiveSrv() - .getStream({ - scope: LiveChannelScope.Grafana, - namespace: 'broadcast', - path: 'export', - }) - .subscribe({ - next: (evt) => { - if (isLiveChannelMessageEvent(evt)) { - setStatus(evt.message); - } else if (isLiveChannelStatusEvent(evt)) { - setStatus(evt.message); - } - }, - }); - - return () => { - subscription.unsubscribe(); - }; - // eslint-disable-next-line react-hooks/exhaustive-deps - }, []); - - const renderView = () => { - const isEntityStoreEnabled = body?.format === StorageFormat.EntityStore && config.featureToggles.entityStore; - const shouldDisplayContent = isEntityStoreEnabled || body?.format === StorageFormat.Git; - - const statusFragment = status && ( -
-

Status

-
{JSON.stringify(status, null, 2)}
- {status.running && ( -
- -
- )} -
- ); - - const formFragment = !Boolean(status?.running) && ( -
- - setBody({ ...body!, generalFolderPath: v.currentTarget.value })} - placeholder="root folder path" - /> - - - - - - Cancel - - - - )} -
- ); - - const requestDetailsFragment = (isEntityStoreEnabled || body?.format === StorageFormat.Git) && ( - - { - setBody(JSON.parse(text)); // force JSON? - }} - /> - - ); - - return ( -
- {statusFragment} - {formFragment} -
-
- {requestDetailsFragment} -
- ); - }; - - return ( - - {renderView()} - - ); -} diff --git a/public/app/routes/routes.tsx b/public/app/routes/routes.tsx index 7f429101645..20fff70cf77 100644 --- a/public/app/routes/routes.tsx +++ b/public/app/routes/routes.tsx @@ -366,13 +366,6 @@ export function getAppRoutes(): RouteDescriptor[] { () => import(/* webpackChunkName: "AdminEditOrgPage" */ 'app/features/admin/AdminEditOrgPage') ), }, - { - path: '/admin/storage/export', - roles: () => ['Admin'], - component: SafeDynamicImport( - () => import(/* webpackChunkName: "ExportPage" */ 'app/features/storage/ExportPage') - ), - }, { path: '/admin/storage/:path*', roles: () => ['Admin'],