diff --git a/.gitignore b/.gitignore index c9affe81a2afcdb69363796adf230ee1598bc808..b4eb12db183f39703c25bafc87369b19eeaadfd5 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,5 @@ engine/bin/ /engine/configs/ci_checker.yml /packer/example.com.key cloudformation/output/*.yaml + +engine/meta diff --git a/cloudformation/dle_cf_template.yaml b/cloudformation/dle_cf_template.yaml index b5acd3e3615467972e74302b2b87541ef1fb2c65..bef47c3472a15ec3cafedfa501b747c438322408 100644 --- a/cloudformation/dle_cf_template.yaml +++ b/cloudformation/dle_cf_template.yaml @@ -404,6 +404,10 @@ Resources: dle_config_path="/home/ubuntu/.dblab/engine/configs" dle_meta_path="/home/ubuntu/.dblab/engine/meta" postgres_conf_path="/home/ubuntu/.dblab/postgres_conf" + + # Create a special marker file to identify that the DLE is running as a "managed" instance + # (e.g., launched using Marketplace or Terraform), and has not yet been configured. + touch $dle_meta_path/pending.retrieval yq e -i ' .global.debug=${DLEDebugMode} | diff --git a/engine/.dockerignore b/engine/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..840be396e3e0fb5e497836a6ad875cecd3f325d7 --- /dev/null +++ b/engine/.dockerignore @@ -0,0 +1 @@ +meta/ \ No newline at end of file diff --git a/engine/.golangci.yml b/engine/.golangci.yml index 2b9a46df2c3672eed1df422a987c10a232ccb6eb..e43f4a37f4b5c88f3ecce8999493e5f98d6cab92 100644 --- a/engine/.golangci.yml +++ b/engine/.golangci.yml @@ -25,7 +25,7 @@ linters-settings: lang-version: "1.17" extra-rules: false gosimple: - go: "1.17" + go: "1.18" checks: [ "all" ] goimports: local-prefixes: gitlab.com/postgres-ai/database-lab diff --git a/engine/Makefile b/engine/Makefile index e76f75387ced3007eb085ea0721183a8beec8577..5e01b8312c0c3dc471cbcef1e42204c67342c041 100644 --- a/engine/Makefile +++ b/engine/Makefile @@ -5,6 +5,8 @@ RUN_CI_BINARY = run-ci CLI_BINARY = dblab GOARCH = amd64 +PWD= $(shell pwd) + COMMIT?=$(shell git rev-parse HEAD) BUILD_TIME?=$(shell date -u '+%Y%m%d-%H%M') VERSION=$(shell git describe --tags --match "v*" 2>/dev/null || echo "${COMMIT}") @@ -32,7 +34,7 @@ help: ## Display the help message all: clean build ## Build all binary components of the project install-lint: ## Install the linter to $GOPATH/bin which is expected to be in $PATH - curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.45.0 + curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.45.2 run-lint: ## Run linters golangci-lint run @@ -77,4 +79,20 @@ fmt: ## Format code clean: ## Remove compiled binaries from the local bin/ directory rm -f bin/* -.PHONY: help all build test run-lint install-lint lint fmt clean build-image build-dle build-ci-checker build-client build-ci-checker +run-dle: build-dle + docker run \ + --rm \ + --volume /tmp:/tmp \ + --volume /var/run/docker.sock:/var/run/docker.sock \ + --volume /var/lib/dblab:/var/lib/dblab/:rshared \ + --volume /var/lib/dblab/dblab_pool/dump:/var/lib/dblab/dblab_pool/dump \ + --volume /sys/kernel/debug:/sys/kernel/debug:rw \ + --volume /lib/modules:/lib/modules:ro \ + --volume $(PWD)/configs:/home/dblab/configs:ro \ + --volume $(PWD)/meta:/home/dblab/meta \ + --volume /proc:/host_proc:ro \ + --env DOCKER_API_VERSION=1.39 \ + -p "2345:2345" \ + dblab_server:local + +.PHONY: help all build test run-lint install-lint lint fmt clean build-image build-dle build-ci-checker build-client build-ci-checker run-dle diff --git a/engine/api/swagger-spec/dblab_server_swagger.yaml b/engine/api/swagger-spec/dblab_server_swagger.yaml index ef02d701e17c95683a304fb0857e4aa15991355c..b0c02316f0bf3a1220cea3d009374bf404edf3b2 100644 --- a/engine/api/swagger-spec/dblab_server_swagger.yaml +++ b/engine/api/swagger-spec/dblab_server_swagger.yaml @@ -486,6 +486,112 @@ paths: $ref: "#/definitions/Error" + /admin/config: + post: + tags: + - "config" + summary: "Set instance configuration" + description: "" + operationId: "setConfig" + consumes: + - "application/json" + produces: + - "application/json" + parameters: + - in: header + name: Verification-Token + type: string + required: true + - in: body + name: body + description: "Set configuration object" + required: true + schema: + $ref: '#/definitions/Config' + responses: + 200: + description: "Successful operation" + schema: + $ref: "#/definitions/Config" + 500: + description: "Internal server error" + schema: + $ref: "#/definitions/Error" + get: + tags: + - "config" + summary: "Get instance configuration" + description: "" + operationId: "getConfig" + produces: + - "application/json" + parameters: + - in: header + name: Verification-Token + type: string + required: true + responses: + 200: + description: "Successful operation" + schema: + $ref: "#/definitions/Config" + 500: + description: "Internal server error" + schema: + $ref: "#/definitions/Error" + + /admin/config.yaml: + get: + tags: + - "config" + summary: "Get the config of the instance" + description: "" + operationId: "getConfigYaml" + produces: + - "application/yaml" + parameters: + - in: header + name: Verification-Token + type: string + required: true + responses: + 200: + description: "Successful operation" + schema: + $ref: "#/definitions/Config" + 500: + description: "Internal server error" + schema: + $ref: "#/definitions/Error" + + /admin/test-db-source: + post: + tags: + - "config" + summary: "Test source database" + description: "" + operationId: "testDBConnection" + consumes: + - "application/json" + parameters: + - in: header + name: Verification-Token + type: string + required: true + - in: body + name: body + description: "Connection DB object" + required: true + schema: + $ref: '#/definitions/Connection' + responses: + 200: + description: "Successful operation" + 500: + description: "Internal server error" + schema: + $ref: "#/definitions/Error" + definitions: Instance: type: "object" @@ -878,6 +984,24 @@ definitions: hint: type: "string" + Config: + type: object + + Connection: + type: "object" + properties: + host: + type: "string" + port: + type: "string" + dbname: + type: "string" + username: + type: "string" + password: + type: "string" + + externalDocs: description: "Database Lab Docs" url: "https://gitlab.com/postgres-ai/docs/tree/master/docs/database-lab" diff --git a/engine/cmd/database-lab/main.go b/engine/cmd/database-lab/main.go index d12154c1ee1b9c66035e5161a55baed7331815b6..d4a63f2afac3a08b688e33d4538d9525ee780133 100644 --- a/engine/cmd/database-lab/main.go +++ b/engine/cmd/database-lab/main.go @@ -54,6 +54,8 @@ func main() { log.Fatal(errors.WithMessage(err, "failed to parse config")) } + config.ApplyGlobals(cfg) + docker, err := client.NewClientWithOpts(client.FromEnv) if err != nil { log.Fatal("Failed to create a Docker client:", err) @@ -163,12 +165,29 @@ func main() { }) embeddedUI := embeddedui.New(cfg.EmbeddedUI, engProps, runner, docker) - server := srv.NewServer(&cfg.Server, &cfg.Global, engProps, docker, cloningSvc, provisioner, retrievalSvc, platformSvc, - obs, est, pm, tm, tokenHolder, embeddedUI) - shutdownCh := setShutdownListener() logCleaner := diagnostic.NewLogCleaner() + reloadConfigFn := func(server *srv.Server) error { + return reloadConfig( + ctx, + provisioner, + tm, + retrievalSvc, + pm, + cloningSvc, + platformSvc, + est, + embeddedUI, + server, + logCleaner, + ) + } + + server := srv.NewServer(&cfg.Server, &cfg.Global, engProps, docker, cloningSvc, provisioner, retrievalSvc, platformSvc, + obs, est, pm, tm, tokenHolder, embeddedUI, reloadConfigFn) + shutdownCh := setShutdownListener() + go setReloadListener(ctx, provisioner, tm, retrievalSvc, pm, cloningSvc, platformSvc, est, embeddedUI, server, logCleaner) server.InitHandlers() @@ -256,6 +275,8 @@ func reloadConfig(ctx context.Context, provisionSvc *provision.Provisioner, tm * return err } + config.ApplyGlobals(cfg) + if err := provision.IsValidConfig(cfg.Provision); err != nil { return err } diff --git a/engine/configs/config.example.logical_generic.yml b/engine/configs/config.example.logical_generic.yml index b0d666788c4ce648cea07f3bcfc1e8265c87a0bf..a3381ca3c00d2c9628bb584a1d26f618f9534e44 100644 --- a/engine/configs/config.example.logical_generic.yml +++ b/engine/configs/config.example.logical_generic.yml @@ -219,7 +219,7 @@ retrieval: # Option for specifying the database list that must be copied. # By default, DLE dumps and restores all available databases. # Do not specify the databases section to take all databases. - # databases: + databases: # database1: # Options for a partial dump. # Do not specify the tables section to dump all available tables. diff --git a/engine/configs/config.example.logical_rds_iam.yml b/engine/configs/config.example.logical_rds_iam.yml index ef2c2469077a4342f8eb9feda2a6a999f92b1abd..c13b91173db5e3687be8b288ed6cd0333915cc35 100644 --- a/engine/configs/config.example.logical_rds_iam.yml +++ b/engine/configs/config.example.logical_rds_iam.yml @@ -221,7 +221,7 @@ retrieval: # Option for specifying the database list that must be copied. # By default, DLE dumps and restores all available databases. # Do not specify the databases section to take all databases. - # databases: + databases: # database1: # # Option for a partial dump. Do not specify the tables section to dump all available tables. # tables: diff --git a/engine/go.mod b/engine/go.mod index 6854cafe448f548ba89cf9e9dd198f30fa717cfa..de3c2b64a40b43fd13506e00572f252b7e5805ee 100644 --- a/engine/go.mod +++ b/engine/go.mod @@ -1,6 +1,6 @@ module gitlab.com/postgres-ai/database-lab/v3 -go 1.17 +go 1.18 require ( github.com/AlekSi/pointer v1.1.0 @@ -33,6 +33,7 @@ require ( golang.org/x/mod v0.5.1 golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f gopkg.in/yaml.v2 v2.4.0 + gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b ) require ( @@ -77,7 +78,6 @@ require ( google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa // indirect google.golang.org/grpc v1.43.0 // indirect google.golang.org/protobuf v1.27.1 // indirect - gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect ) // Include the single version of the dependency to clean up go.sum from old revisions. diff --git a/engine/internal/retrieval/engine/postgres/logical/dump.go b/engine/internal/retrieval/engine/postgres/logical/dump.go index 94876ba8f9979eab954dbd102498302e52d3b8ec..8c5256cea3bf192b094071a507e1a4fb03b3c0f0 100644 --- a/engine/internal/retrieval/engine/postgres/logical/dump.go +++ b/engine/internal/retrieval/engine/postgres/logical/dump.go @@ -397,7 +397,7 @@ func (d *DumpJob) Run(ctx context.Context) (err error) { return errors.Wrap(err, "failed to recalculate statistics after restore") } - if err := tools.RunCheckpoint(ctx, d.dockerClient, containerID, d.globalCfg.Database.User(), d.globalCfg.Database.DBName); err != nil { + if err := tools.RunCheckpoint(ctx, d.dockerClient, containerID, d.globalCfg.Database.User(), d.globalCfg.Database.Name()); err != nil { return errors.Wrap(err, "failed to run checkpoint before stop") } diff --git a/engine/internal/retrieval/engine/postgres/logical/restore.go b/engine/internal/retrieval/engine/postgres/logical/restore.go index f6e26b00e703388d7ad5ff4d8f402a64e5412e0a..eac25ac65c611beec9c6fe4712fa2aefcfa743eb 100644 --- a/engine/internal/retrieval/engine/postgres/logical/restore.go +++ b/engine/internal/retrieval/engine/postgres/logical/restore.go @@ -286,7 +286,7 @@ func (r *RestoreJob) Run(ctx context.Context) (err error) { return errors.Wrap(err, "failed to recalculate statistics after restore") } - if err := tools.RunCheckpoint(ctx, r.dockerClient, containerID, r.globalCfg.Database.User(), r.globalCfg.Database.DBName); err != nil { + if err := tools.RunCheckpoint(ctx, r.dockerClient, containerID, r.globalCfg.Database.User(), r.globalCfg.Database.Name()); err != nil { return errors.Wrap(err, "failed to run checkpoint before stop") } diff --git a/engine/internal/retrieval/engine/postgres/tools/cont/container.go b/engine/internal/retrieval/engine/postgres/tools/cont/container.go index 92b1c054294888d0b86d8deed08410a0956796f5..18eb52a5a0de87b748eae8e2c0ff64d79ea90f04 100644 --- a/engine/internal/retrieval/engine/postgres/tools/cont/container.go +++ b/engine/internal/retrieval/engine/postgres/tools/cont/container.go @@ -57,6 +57,8 @@ const ( DBLabRestoreLabel = "dblab_restore" // DBLabEmbeddedUILabel defines a label value for embedded UI containers. DBLabEmbeddedUILabel = "dblab_embedded_ui" + // DBLabFoundationLabel defines a label value to mark foundation containers. + DBLabFoundationLabel = "dblab_foundation" // DBLabRunner defines a label to mark runner containers. DBLabRunner = "dblab_runner" diff --git a/engine/internal/retrieval/engine/postgres/tools/db/image_content.go b/engine/internal/retrieval/engine/postgres/tools/db/image_content.go new file mode 100644 index 0000000000000000000000000000000000000000..845d31fe377500846f9d094011fde2a231ba357a --- /dev/null +++ b/engine/internal/retrieval/engine/postgres/tools/db/image_content.go @@ -0,0 +1,274 @@ +/* +2022 © Postgres.ai +*/ + +package db + +import ( + "context" + "encoding/json" + "fmt" + "os" + "os/signal" + "strings" + "time" + + "github.com/docker/docker/api/types" + "github.com/docker/docker/api/types/container" + "github.com/docker/docker/client" + "github.com/jackc/pgx/v4" + + dockerTools "gitlab.com/postgres-ai/database-lab/v3/internal/provision/docker" + "gitlab.com/postgres-ai/database-lab/v3/internal/retrieval/engine/postgres/tools" + "gitlab.com/postgres-ai/database-lab/v3/internal/retrieval/engine/postgres/tools/cont" + "gitlab.com/postgres-ai/database-lab/v3/internal/retrieval/engine/postgres/tools/health" + "gitlab.com/postgres-ai/database-lab/v3/pkg/config/global" + "gitlab.com/postgres-ai/database-lab/v3/pkg/log" + "gitlab.com/postgres-ai/database-lab/v3/pkg/util/networks" +) + +const ( + extensionQuery = "select jsonb_object_agg(name, default_version) from pg_available_extensions" + + port = "5432" + username = "postgres" + dbname = "postgres" + password = "" + + foundationName = "dblab_foundation_" + + defaultRetries = 10 +) + +// ImageContent keeps the content lists from the foundation image. +type ImageContent struct { + engineProps global.EngineProps + isReady bool + extensions map[string]string + locales map[string]struct{} + databases map[string]struct{} +} + +// IsReady reports if the ImageContent has collected details about the current image. +func (i *ImageContent) IsReady() bool { + return i.isReady +} + +// NewImageContent creates a new ImageContent. +func NewImageContent(engineProps global.EngineProps) *ImageContent { + return &ImageContent{ + engineProps: engineProps, + extensions: make(map[string]string, 0), + locales: make(map[string]struct{}, 0), + databases: make(map[string]struct{}, 0), + } +} + +// Extensions provides list of Postgres extensions from the foundation image. +func (i *ImageContent) Extensions() map[string]string { + return i.extensions +} + +// Locales provides list of locales from the foundation image. +func (i *ImageContent) Locales() map[string]struct{} { + return i.locales +} + +// SetDatabases sets a list of databases mentioned in the Retrieval config. +// An empty list means all databases. +func (i *ImageContent) SetDatabases(dbList []string) { + if len(dbList) == 0 { + i.databases = make(map[string]struct{}, 0) + return + } + + for _, dbName := range dbList { + i.databases[dbName] = struct{}{} + } +} + +// Databases returns the list of databases mentioned in the Retrieval config. +// An empty list means all databases. +func (i *ImageContent) Databases() map[string]struct{} { + return i.databases +} + +// Collect collects extension and locale lists from the provided Docker image. +func (i *ImageContent) Collect(dockerImage string) error { + docker, err := client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.39")) + if err != nil { + log.Fatal("Failed to create a Docker client:", err) + } + + ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt, os.Kill) + defer cancel() + + if err := i.collectImageContent(ctx, docker, dockerImage); err != nil { + return err + } + + i.isReady = true + + log.Msg("The image content has been successfully collected") + + return nil +} + +func getFoundationName(instanceID string) string { + return foundationName + instanceID +} + +func (i *ImageContent) collectImageContent(ctx context.Context, docker *client.Client, dockerImage string) error { + containerID, err := createContainer(ctx, docker, dockerImage, i.engineProps) + if err != nil { + return fmt.Errorf("failed to create a Docker container: %w", err) + } + + defer tools.RemoveContainer(ctx, docker, containerID, time.Millisecond) + + if err := i.collectExtensions(ctx, i.engineProps.InstanceID); err != nil { + return fmt.Errorf("failed to collect extensions from the image %s: %w", dockerImage, err) + } + + if err := i.collectLocales(ctx, docker, containerID); err != nil { + return fmt.Errorf("failed to collect locales: %w", err) + } + + return nil +} + +func (i *ImageContent) collectExtensions(ctx context.Context, instanceID string) error { + conn, err := pgx.Connect(ctx, ConnectionString(getFoundationName(instanceID), port, username, dbname, password)) + if err != nil { + return fmt.Errorf("failed to connect: %w", err) + } + + var row []byte + + if err = conn.QueryRow(ctx, extensionQuery).Scan(&row); err != nil { + return err + } + + extensionMap := map[string]string{} + + if err := json.Unmarshal(row, &extensionMap); err != nil { + return err + } + + i.extensions = extensionMap + + return nil +} + +func (i *ImageContent) collectLocales(ctx context.Context, docker *client.Client, containerID string) error { + out, err := getLocales(ctx, docker, containerID) + if err != nil { + return err + } + + imageLocales := map[string]struct{}{} + + for _, line := range strings.Split(out, "\n") { + if len(line) != 0 { + locale := strings.ReplaceAll(strings.ToLower(strings.TrimSpace(line)), "-", "") + imageLocales[locale] = struct{}{} + } + } + + i.locales = imageLocales + + return nil +} + +func createContainer(ctx context.Context, docker *client.Client, image string, props global.EngineProps) (string, error) { + if err := dockerTools.PrepareImage(ctx, docker, image); err != nil { + return "", fmt.Errorf("failed to prepare Docker image: %w", err) + } + + containerConf := &container.Config{ + Labels: map[string]string{ + cont.DBLabControlLabel: cont.DBLabFoundationLabel, + cont.DBLabInstanceIDLabel: props.InstanceID, + cont.DBLabEngineNameLabel: props.ContainerName, + }, + Env: []string{ + "POSTGRES_HOST_AUTH_METHOD=trust", + }, + Image: image, + Healthcheck: health.GetConfig(username, dbname, + health.OptionInterval(health.DefaultRestoreInterval), health.OptionRetries(defaultRetries)), + } + + containerName := getFoundationName(props.InstanceID) + + containerID, err := tools.CreateContainerIfMissing(ctx, docker, containerName, containerConf, &container.HostConfig{}) + if err != nil { + return "", fmt.Errorf("failed to create container %q %w", containerName, err) + } + + log.Msg(fmt.Sprintf("Running container: %s. ID: %v", containerName, containerID)) + + if err := docker.ContainerStart(ctx, containerID, types.ContainerStartOptions{}); err != nil { + return "", fmt.Errorf("failed to start container %q: %w", containerName, err) + } + + if err := tools.InitDB(ctx, docker, containerID); err != nil { + return "", fmt.Errorf("failed to init Postgres: %w", err) + } + + if err := resetHBA(ctx, docker, containerID); err != nil { + return "", fmt.Errorf("failed to init Postgres: %w", err) + } + + if err := tools.StartPostgres(ctx, docker, containerID, tools.DefaultStopTimeout); err != nil { + return "", fmt.Errorf("failed to init Postgres: %w", err) + } + + log.Dbg("Waiting for container readiness") + + if err := tools.CheckContainerReadiness(ctx, docker, containerID); err != nil { + return "", fmt.Errorf("failed to readiness check: %w", err) + } + + if err := networks.Connect(ctx, docker, props.InstanceID, containerID); err != nil { + return "", fmt.Errorf("failed to connect UI container to the internal Docker network: %w", err) + } + + return containerID, nil +} + +func resetHBA(ctx context.Context, dockerClient *client.Client, containerID string) error { + command := []string{"sh", "-c", `su postgres -c "echo 'hostnossl all all 0.0.0.0/0 trust' > ${PGDATA}/pg_hba.conf"`} + + log.Dbg("Reset pg_hba", command) + + out, err := tools.ExecCommandWithOutput(ctx, dockerClient, containerID, types.ExecConfig{ + Tty: true, + Cmd: command, + }) + + if err != nil { + return fmt.Errorf("failed to reset pg_hba.conf: %w", err) + } + + log.Dbg(out) + + return nil +} + +func getLocales(ctx context.Context, dockerClient *client.Client, containerID string) (string, error) { + command := []string{"sh", "-c", `locale -a`} + + log.Dbg("Get locale list", command) + + out, err := tools.ExecCommandWithOutput(ctx, dockerClient, containerID, types.ExecConfig{ + Tty: true, + Cmd: command, + }) + + if err != nil { + return "", fmt.Errorf("failed to get locale list: %w", err) + } + + return out, nil +} diff --git a/engine/internal/retrieval/engine/postgres/tools/db/pg.go b/engine/internal/retrieval/engine/postgres/tools/db/pg.go index a438003fa0b33a0a36447eac3ca1c23fee439d77..56763f3d35bd1b0e2f5734c18bed8a6338f8d154 100644 --- a/engine/internal/retrieval/engine/postgres/tools/db/pg.go +++ b/engine/internal/retrieval/engine/postgres/tools/db/pg.go @@ -6,10 +6,244 @@ package db import ( + "context" + "errors" "fmt" + "strings" + + "github.com/jackc/pgx/v4" + "golang.org/x/mod/semver" + + "gitlab.com/postgres-ai/database-lab/v3/pkg/log" + "gitlab.com/postgres-ai/database-lab/v3/pkg/models" ) // ConnectionString builds PostgreSQL connection string. func ConnectionString(host, port, username, dbname, password string) string { return fmt.Sprintf("host=%s port=%s user='%s' database='%s' password='%s'", host, port, username, dbname, password) } + +const ( + availableExtensions = "select name, default_version, coalesce(installed_version,'') from pg_available_extensions " + + "where installed_version is not null" + availableLocales = "select datname, lower(datcollate), lower(datctype) from pg_catalog.pg_database" +) + +type extension struct { + name string + defaultVersion string + installedVersion string +} + +type locale struct { + name string + collate string + ctype string +} + +// CheckSource checks the readiness of the source database to dump and restore processes. +func CheckSource(ctx context.Context, conf *models.ConnectionTest, imageContent *ImageContent) (*models.TestConnection, error) { + if !imageContent.IsReady() { + return &models.TestConnection{ + Status: models.TCStatusNotice, + Result: models.TCResultUnexploredImage, + Message: "Service has not collected data about the Docker image yet. Please try again later", + }, nil + } + + connStr := ConnectionString(conf.Host, conf.Port, conf.Username, conf.DBName, conf.Password) + + conn, err := pgx.Connect(ctx, connStr) + if err != nil { + log.Dbg("failed to test database connection:", err) + + return &models.TestConnection{ + Status: models.TCStatusError, + Result: models.TCResultConnectionError, + Message: err.Error(), + }, nil + } + + defer func() { + if err := conn.Close(ctx); err != nil { + log.Dbg("failed to close connection:", err) + } + }() + + var one int + + if err := conn.QueryRow(ctx, "select 1").Scan(&one); err != nil { + return &models.TestConnection{ + Status: models.TCStatusError, + Result: models.TCResultConnectionError, + Message: err.Error(), + }, nil + } + + if missing, unsupported, err := checkExtensions(ctx, conn, imageContent.Extensions()); err != nil { + return &models.TestConnection{ + Status: models.TCStatusWarning, + Result: models.TCResultMissingExtension, + Message: buildExtensionsWarningMessage(missing, unsupported), + }, nil + } + + if missing, err := checkLocales(ctx, conn, imageContent.Locales(), imageContent.Databases()); err != nil { + return &models.TestConnection{ + Status: models.TCStatusWarning, + Result: models.TCResultMissingLocale, + Message: buildLocalesWarningMessage(missing), + }, nil + } + + return &models.TestConnection{ + Status: models.TCStatusOK, + Result: models.TCResultOK, + Message: models.TCMessageOK, + }, nil +} + +func checkExtensions(ctx context.Context, conn *pgx.Conn, imageExtensions map[string]string) ([]extension, []extension, error) { + rows, err := conn.Query(ctx, availableExtensions) + if err != nil { + return nil, nil, err + } + + missingExtensions := []extension{} + unsupportedVersions := []extension{} + + for rows.Next() { + var ext extension + if err := rows.Scan(&ext.name, &ext.defaultVersion, &ext.installedVersion); err != nil { + return nil, nil, err + } + + imageExt, ok := imageExtensions[ext.name] + if !ok { + missingExtensions = append(missingExtensions, ext) + continue + } + + if !semver.IsValid(toCanonicalSemver(ext.defaultVersion)) { + unsupportedVersions = append(unsupportedVersions, ext) + continue + } + + if semver.Compare(toCanonicalSemver(imageExt), toCanonicalSemver(ext.defaultVersion)) == -1 { + unsupportedVersions = append(unsupportedVersions, ext) + } + } + + if err := rows.Err(); err != nil { + return nil, nil, err + } + + if len(missingExtensions) != 0 || len(unsupportedVersions) != 0 { + return missingExtensions, unsupportedVersions, errors.New("extension warning found") + } + + return nil, nil, nil +} + +func toCanonicalSemver(v string) string { + if v == "" { + return "" + } + + if v[0] != 'v' { + return "v" + v + } + + return v +} + +func buildExtensionsWarningMessage(missingExtensions, unsupportedVersions []extension) string { + sb := &strings.Builder{} + + if len(missingExtensions) > 0 { + sb.WriteString("There are missing extensions:") + + formatExtensionList(sb, missingExtensions) + + sb.WriteRune('\n') + } + + if len(unsupportedVersions) > 0 { + sb.WriteString("There are extensions with an unsupported version:") + + formatExtensionList(sb, unsupportedVersions) + } + + return sb.String() +} + +func formatExtensionList(sb *strings.Builder, extensions []extension) { + length := len(extensions) + + for i, missing := range extensions { + sb.WriteString(" " + missing.name + " " + missing.defaultVersion) + + if i != length-1 { + sb.WriteRune(',') + } + } +} + +func checkLocales(ctx context.Context, conn *pgx.Conn, imageLocales, databases map[string]struct{}) ([]locale, error) { + rows, err := conn.Query(ctx, availableLocales) + if err != nil { + return nil, err + } + + missingLocales := []locale{} + + for rows.Next() { + var l locale + if err := rows.Scan(&l.name, &l.collate, &l.ctype); err != nil { + return nil, err + } + + if _, ok := databases[l.name]; len(databases) > 0 && !ok { + // Skip the check if there is a list of restored databases, and it does not contain the current database. + continue + } + + cleanCollate := strings.ReplaceAll(strings.ToLower(l.collate), "-", "") + + if _, ok := imageLocales[cleanCollate]; !ok { + missingLocales = append(missingLocales, l) + continue + } + + cleanCtype := strings.ReplaceAll(strings.ToLower(l.ctype), "-", "") + + if _, ok := imageLocales[cleanCtype]; !ok { + missingLocales = append(missingLocales, l) + continue + } + } + + if len(missingLocales) != 0 { + return missingLocales, errors.New("locale warning found") + } + + return nil, nil +} + +func buildLocalesWarningMessage(missingLocales []locale) string { + sb := &strings.Builder{} + + if length := len(missingLocales); length > 0 { + sb.WriteString("There are missing locales:") + + for i, missing := range missingLocales { + sb.WriteString(fmt.Sprintf(" '%s' (collate: %s, ctype: %s)", missing.name, missing.collate, missing.ctype)) + + if i != length-1 { + sb.WriteRune(',') + } + } + } + + return sb.String() +} diff --git a/engine/internal/retrieval/engine/postgres/tools/fs/tools.go b/engine/internal/retrieval/engine/postgres/tools/fs/tools.go index 6f10b6429985c451fc810f3f6f4e28c93eda50f7..acc236d988b4e822e17decb4057a3217c750981f 100644 --- a/engine/internal/retrieval/engine/postgres/tools/fs/tools.go +++ b/engine/internal/retrieval/engine/postgres/tools/fs/tools.go @@ -26,7 +26,7 @@ func CopyDirectoryContent(sourceDir, dataDir string) error { sourcePath := filepath.Join(sourceDir, entry.Name()) destPath := filepath.Join(dataDir, entry.Name()) - if err := copyFile(sourcePath, destPath); err != nil { + if err := CopyFile(sourcePath, destPath); err != nil { return err } } @@ -34,7 +34,8 @@ func CopyDirectoryContent(sourceDir, dataDir string) error { return nil } -func copyFile(sourceFilename, destinationFilename string) error { +// CopyFile copies a file from one location to another. +func CopyFile(sourceFilename, destinationFilename string) error { dst, err := os.Create(destinationFilename) if err != nil { return err diff --git a/engine/internal/retrieval/engine/postgres/tools/tools.go b/engine/internal/retrieval/engine/postgres/tools/tools.go index f4feee8596343109d424c5f6bacd6f5c367c3258..8f32ea83ebca50fbb85eb42351e1944037fc1d12 100644 --- a/engine/internal/retrieval/engine/postgres/tools/tools.go +++ b/engine/internal/retrieval/engine/postgres/tools/tools.go @@ -192,12 +192,12 @@ func InitDB(ctx context.Context, dockerClient *client.Client, containerID string Cmd: initCommand, }) + log.Dbg(out) + if err != nil { return errors.Wrap(err, "failed to init Postgres") } - log.Dbg(out) - return nil } diff --git a/engine/internal/retrieval/retrieval.go b/engine/internal/retrieval/retrieval.go index 78b358f9ad0f4fe4c9e552795a65756aea94b264..1152ef73d071f85e2dc21f036397d1a1930ebd7e 100644 --- a/engine/internal/retrieval/retrieval.go +++ b/engine/internal/retrieval/retrieval.go @@ -8,6 +8,7 @@ package retrieval import ( "context" "fmt" + "os" "strings" "time" @@ -26,7 +27,10 @@ import ( "gitlab.com/postgres-ai/database-lab/v3/internal/retrieval/engine/postgres/physical" "gitlab.com/postgres-ai/database-lab/v3/internal/retrieval/engine/postgres/snapshot" "gitlab.com/postgres-ai/database-lab/v3/internal/retrieval/engine/postgres/tools/cont" + "gitlab.com/postgres-ai/database-lab/v3/internal/retrieval/engine/postgres/tools/db" + "gitlab.com/postgres-ai/database-lab/v3/internal/retrieval/options" "gitlab.com/postgres-ai/database-lab/v3/internal/telemetry" + "gitlab.com/postgres-ai/database-lab/v3/pkg/util" dblabCfg "gitlab.com/postgres-ai/database-lab/v3/pkg/config" "gitlab.com/postgres-ai/database-lab/v3/pkg/config/global" @@ -35,8 +39,11 @@ import ( ) const ( + parseOption = cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow refreshJobs jobGroup = "refresh" snapshotJobs jobGroup = "snapshot" + + pendingFilename = "pending.retrieval" ) type jobGroup string @@ -45,6 +52,7 @@ type jobGroup string type Retrieval struct { Scheduler Scheduler State State + imageState *db.ImageContent cfg *config.Config global *global.Config engineProps global.EngineProps @@ -76,6 +84,7 @@ func New(cfg *dblabCfg.Config, engineProps global.EngineProps, docker *client.Cl Status: models.Inactive, alerts: make(map[models.AlertType]models.Alert), }, + imageState: db.NewImageContent(engineProps), } retrievalCfg, err := ValidateConfig(&cfg.Retrieval) @@ -85,9 +94,57 @@ func New(cfg *dblabCfg.Config, engineProps global.EngineProps, docker *client.Cl r.setup(retrievalCfg) + if err := checkPendingMarker(r); err != nil { + return nil, fmt.Errorf("failed to check pending marker: %w", err) + } + return r, nil } +// ImageContent provides the content of foundation Docker image. +func (r *Retrieval) ImageContent() *db.ImageContent { + return r.imageState +} + +func checkPendingMarker(r *Retrieval) error { + pendingPath, err := util.GetMetaPath(pendingFilename) + if err != nil { + return fmt.Errorf("failed to build pending filename: %w", err) + } + + if _, err := os.Stat(pendingPath); err != nil { + if errors.Is(err, os.ErrNotExist) { + return nil + } + + return fmt.Errorf("failed to get pending file info: %w", err) + } + + r.State.Status = models.Pending + + return nil +} + +// RemovePendingMarker removes the file from the metadata directory which specifies that retrieval is pending. +func (r *Retrieval) RemovePendingMarker() error { + pending, err := util.GetMetaPath(pendingFilename) + if err != nil { + return fmt.Errorf("failed to build pending filename: %w", err) + } + + if err := os.Remove(pending); err != nil { + if errors.Is(err, os.ErrNotExist) { + return nil + } + + return err + } + + r.State.Status = models.Inactive + + return nil +} + // Reload reloads retrieval configuration. func (r *Retrieval) Reload(ctx context.Context, retrievalCfg *config.Config) { r.setup(retrievalCfg) @@ -109,7 +166,8 @@ func (r *Retrieval) reloadStatefulJobs() { log.Msg("Skip reloading of the stateful retrieval job. Spec not found", job.Name()) continue } - + // todo should we remove if jobs are not there ? + // todo should we check for completion before ? if err := job.Reload(cfg.Options); err != nil { log.Err("Failed to reload configuration of the retrieval job", job.Name(), err) } @@ -123,6 +181,10 @@ func (r *Retrieval) Run(ctx context.Context) error { log.Msg("Retrieval mode:", r.State.Mode) + if err := r.collectFoundationImageContent(); err != nil { + return fmt.Errorf("failed to collect content lists from the foundation Docker image of the logicalDump job: %w", err) + } + fsManager, err := r.getNextPoolToDataRetrieving() if err != nil { var skipError *SkipRefreshingError @@ -148,6 +210,12 @@ func (r *Retrieval) Run(ctx context.Context) error { log.Msg("Pool to perform data retrieving: ", fsManager.Pool().Name) + if r.State.Status == models.Pending { + log.Msg("Data retrieving suspended because Retrieval state is pending") + + return nil + } + if err := r.run(runCtx, fsManager); err != nil { alert := telemetry.Alert{Level: models.RefreshFailed, Message: fmt.Sprintf("Failed to perform initial data retrieving: %s", r.State.Mode)} @@ -162,6 +230,49 @@ func (r *Retrieval) Run(ctx context.Context) error { return nil } +func (r *Retrieval) collectFoundationImageContent() error { + if _, ok := r.cfg.JobsSpec[logical.DumpJobType]; !ok { + return nil + } + + dumpOptions := &logical.DumpOptions{} + + if err := r.JobConfig(logical.DumpJobType, &dumpOptions); err != nil { + return fmt.Errorf("failed to get config of %s job: %w", logical.DumpJobType, err) + } + + if err := r.imageState.Collect(dumpOptions.DockerImage); err != nil { + return err + } + + // Collect a list of databases mentioned in the Retrieval config. An empty list means all databases. + dbs := make([]string, 0) + + if len(dumpOptions.Databases) != 0 { + dbs = append(dbs, collectDBList(dumpOptions.Databases)...) + + restoreOptions := &logical.RestoreOptions{} + + if err := r.JobConfig(logical.RestoreJobType, &restoreOptions); err == nil && len(restoreOptions.Databases) != 0 { + dbs = append(dbs, collectDBList(restoreOptions.Databases)...) + } + } + + r.imageState.SetDatabases(dbs) + + return nil +} + +func collectDBList(definitions map[string]logical.DumpDefinition) []string { + dbs := []string{} + + for dbName := range definitions { + dbs = append(dbs, dbName) + } + + return dbs +} + func (r *Retrieval) getNextPoolToDataRetrieving() (pool.FSManager, error) { firstPool := r.poolManager.First() if firstPool == nil { @@ -406,7 +517,7 @@ func (r *Retrieval) setupScheduler(ctx context.Context) { return } - specParser := cron.NewParser(cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow) + specParser := cron.NewParser(parseOption) spec, err := specParser.Parse(r.cfg.Refresh.Timetable) if err != nil { @@ -422,7 +533,7 @@ func (r *Retrieval) setupScheduler(ctx context.Context) { func (r *Retrieval) refreshFunc(ctx context.Context) func() { return func() { - if err := r.fullRefresh(ctx); err != nil { + if err := r.FullRefresh(ctx); err != nil { alert := telemetry.Alert{Level: models.RefreshFailed, Message: "Failed to run full-refresh"} r.State.addAlert(alert) r.tm.SendEvent(ctx, telemetry.AlertEvent, alert) @@ -431,8 +542,8 @@ func (r *Retrieval) refreshFunc(ctx context.Context) func() { } } -// fullRefresh performs full refresh for an unused storage pool and makes it active. -func (r *Retrieval) fullRefresh(ctx context.Context) error { +// FullRefresh performs full refresh for an unused storage pool and makes it active. +func (r *Retrieval) FullRefresh(ctx context.Context) error { if r.State.Status == models.Refreshing || r.State.Status == models.Snapshotting { alert := telemetry.Alert{ Level: models.RefreshSkipped, @@ -445,6 +556,12 @@ func (r *Retrieval) fullRefresh(ctx context.Context) error { return nil } + if r.State.Status == models.Pending { + log.Msg("Data retrieving suspended because Retrieval state is pending") + + return nil + } + // Stop previous runs and snapshot schedulers. if r.ctxCancel != nil { r.ctxCancel() @@ -542,3 +659,20 @@ func (r *Retrieval) ReportState() telemetry.Restore { Jobs: r.cfg.Jobs, } } + +// ErrStageNotFound means that the requested stage is not exist in the retrieval jobs config. +var ErrStageNotFound = errors.New("stage not found") + +// JobConfig parses job configuration to the provided structure. +func (r *Retrieval) JobConfig(stage string, jobCfg any) error { + stageSpec, ok := r.cfg.JobsSpec[stage] + if !ok { + return ErrStageNotFound + } + + if err := options.Unmarshal(stageSpec.Options, jobCfg); err != nil { + return fmt.Errorf("failed to unmarshal configuration options: %w", err) + } + + return nil +} diff --git a/engine/internal/retrieval/retrieval_test.go b/engine/internal/retrieval/retrieval_test.go index 99b70a6e37619c72ff6d40424f5861cfe086d889..92461080493b64fc6199da3aa52d6c4d8d6bf20e 100644 --- a/engine/internal/retrieval/retrieval_test.go +++ b/engine/internal/retrieval/retrieval_test.go @@ -1,9 +1,15 @@ package retrieval import ( + "os" + "path" "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "gitlab.com/postgres-ai/database-lab/v3/pkg/models" + "gitlab.com/postgres-ai/database-lab/v3/pkg/util" ) func TestJobGroup(t *testing.T) { @@ -41,3 +47,73 @@ func TestJobGroup(t *testing.T) { assert.Equal(t, tc.group, getJobGroup(tc.jobName)) } } + +func TestPendingMarker(t *testing.T) { + t.Run("check if the marker file affects the retrieval state", func(t *testing.T) { + pendingFilepath, err := util.GetMetaPath(pendingFilename) + require.Nil(t, err) + + tmpDir := path.Dir(pendingFilepath) + + err = os.MkdirAll(tmpDir, 0755) + require.Nil(t, err) + + defer func() { + err := os.RemoveAll(tmpDir) + require.Nil(t, err) + }() + + _, err = os.Create(pendingFilepath) + require.Nil(t, err) + + defer func() { + err := os.Remove(pendingFilepath) + require.Nil(t, err) + }() + + r := &Retrieval{} + + err = checkPendingMarker(r) + require.Nil(t, err) + assert.Equal(t, models.Pending, r.State.Status) + }) + + t.Run("check the deletion of the pending marker", func(t *testing.T) { + pendingFilepath, err := util.GetMetaPath(pendingFilename) + require.Nil(t, err) + + tmpDir := path.Dir(pendingFilepath) + + err = os.MkdirAll(tmpDir, 0755) + require.Nil(t, err) + + defer func() { + err := os.RemoveAll(tmpDir) + require.Nil(t, err) + }() + + _, err = os.Create(pendingFilepath) + require.Nil(t, err) + + defer func() { + err := os.Remove(pendingFilepath) + require.ErrorIs(t, err, os.ErrNotExist) + }() + + r := &Retrieval{ + State: State{ + Status: models.Pending, + }, + } + + err = r.RemovePendingMarker() + require.Nil(t, err) + assert.Equal(t, models.Inactive, r.State.Status) + + r.State.Status = models.Finished + + err = r.RemovePendingMarker() + require.Nil(t, err) + assert.Equal(t, models.Finished, r.State.Status) + }) +} diff --git a/engine/internal/retrieval/validator.go b/engine/internal/retrieval/validator.go index e5d6ca0f193ccb90055dc6dd3d0fa9a708c835a9..2371a884acc3b8184740cbc29d59b45a101c9906 100644 --- a/engine/internal/retrieval/validator.go +++ b/engine/internal/retrieval/validator.go @@ -5,6 +5,8 @@ import ( "fmt" "strings" + "github.com/robfig/cron/v3" + "gitlab.com/postgres-ai/database-lab/v3/internal/retrieval/config" "gitlab.com/postgres-ai/database-lab/v3/internal/retrieval/engine/postgres/logical" "gitlab.com/postgres-ai/database-lab/v3/internal/retrieval/engine/postgres/physical" @@ -18,6 +20,10 @@ func ValidateConfig(cfg *config.Config) (*config.Config, error) { return nil, err } + if err = validateRefreshTimetable(retrievalCfg); err != nil { + return nil, err + } + if err = validateStructure(retrievalCfg); err != nil { return nil, err } @@ -63,6 +69,21 @@ func validateStructure(r *config.Config) error { return nil } +func validateRefreshTimetable(r *config.Config) error { + if r.Refresh.Timetable == "" { + return nil + } + + specParser := cron.NewParser(parseOption) + + _, err := specParser.Parse(r.Refresh.Timetable) + if err != nil { + return fmt.Errorf("invalid timetable: %w", err) + } + + return nil +} + func hasLogicalJob(jobSpecs map[string]config.JobSpec) bool { if len(jobSpecs) == 0 { return false diff --git a/engine/internal/srv/api/util.go b/engine/internal/srv/api/util.go index 08aee6e603ba6f5f312d499564045568badf9f12..8116a8249c8c0fb1f66faa14e4c0312f54eb4e45 100644 --- a/engine/internal/srv/api/util.go +++ b/engine/internal/srv/api/util.go @@ -14,6 +14,12 @@ import ( "gitlab.com/postgres-ai/database-lab/v3/pkg/log" ) +// YamlContentType is the content type header for YAML. +const YamlContentType = "application/yaml; charset=utf-8" + +// JSONContentType is the content type header for JSON. +const JSONContentType = "application/json; charset=utf-8" + // WriteJSON responds with JSON. func WriteJSON(w http.ResponseWriter, httpStatusCode int, v interface{}) error { b, err := json.MarshalIndent(v, "", " ") @@ -21,7 +27,7 @@ func WriteJSON(w http.ResponseWriter, httpStatusCode int, v interface{}) error { return errors.Wrap(err, "failed to marshal response") } - w.Header().Set("Content-Type", "application/json; charset=utf-8") + w.Header().Set("Content-Type", JSONContentType) w.WriteHeader(httpStatusCode) if _, err := w.Write(b); err != nil { @@ -49,7 +55,17 @@ func ReadJSON(r *http.Request, v interface{}) error { // WriteData responds with JSON. func WriteData(w http.ResponseWriter, httpStatusCode int, b []byte) error { - w.Header().Set("Content-Type", "application/json; charset=utf-8") + return WriteDataTyped(w, httpStatusCode, JSONContentType, b) +} + +// WriteDataTyped responds with data including content type. +func WriteDataTyped( + w http.ResponseWriter, + httpStatusCode int, + contentType string, + b []byte, +) error { + w.Header().Set("Content-Type", contentType) w.WriteHeader(httpStatusCode) if _, err := w.Write(b); err != nil { diff --git a/engine/internal/srv/config.go b/engine/internal/srv/config.go new file mode 100644 index 0000000000000000000000000000000000000000..83fda3d88bae40dfa5a4f4e45823492482428337 --- /dev/null +++ b/engine/internal/srv/config.go @@ -0,0 +1,338 @@ +package srv + +import ( + "bytes" + "context" + "fmt" + "net/http" + "time" + + "github.com/docker/docker/api/types" + yamlv2 "gopkg.in/yaml.v2" + "gopkg.in/yaml.v3" + + "gitlab.com/postgres-ai/database-lab/v3/internal/provision" + "gitlab.com/postgres-ai/database-lab/v3/internal/retrieval" + "gitlab.com/postgres-ai/database-lab/v3/internal/retrieval/engine/postgres/tools/db" + "gitlab.com/postgres-ai/database-lab/v3/internal/srv/api" + "gitlab.com/postgres-ai/database-lab/v3/pkg/config" + "gitlab.com/postgres-ai/database-lab/v3/pkg/log" + "gitlab.com/postgres-ai/database-lab/v3/pkg/models" + "gitlab.com/postgres-ai/database-lab/v3/pkg/util/projection" + yamlUtils "gitlab.com/postgres-ai/database-lab/v3/pkg/util/yaml" +) + +const connectionCheckTimeout = 10 * time.Second + +func (s *Server) getProjectedAdminConfig(w http.ResponseWriter, r *http.Request) { + cfg, err := s.projectedAdminConfig() + if err != nil { + api.SendError(w, r, err) + return + } + + if err := api.WriteJSON(w, http.StatusOK, cfg); err != nil { + api.SendError(w, r, err) + return + } +} + +func (s *Server) getAdminConfigYaml(w http.ResponseWriter, r *http.Request) { + cfg, err := adminConfigYaml() + if err != nil { + api.SendError(w, r, err) + return + } + + if err := api.WriteDataTyped( + w, + http.StatusOK, + api.YamlContentType, + cfg, + ); err != nil { + api.SendError(w, r, err) + return + } +} + +func (s *Server) setProjectedAdminConfig(w http.ResponseWriter, r *http.Request) { + var cfg interface{} + if err := api.ReadJSON(r, &cfg); err != nil { + api.SendBadRequestError(w, r, err.Error()) + return + } + + applied, err := s.applyProjectedAdminConfig(r.Context(), cfg) + if err != nil { + api.SendBadRequestError(w, r, err.Error()) + return + } + + retrievalStatus := s.Retrieval.State.Status + + if err := s.Retrieval.RemovePendingMarker(); err != nil { + api.SendError(w, r, err) + return + } + + if retrievalStatus == models.Pending { + go func() { + if err := s.Retrieval.FullRefresh(context.Background()); err != nil { + log.Err(fmt.Errorf("failed to refresh data: %w", err)) + } + }() + } + + if err := api.WriteJSON(w, http.StatusOK, applied); err != nil { + api.SendError(w, r, err) + return + } +} + +func (s *Server) testDBSource(w http.ResponseWriter, r *http.Request) { + if s.Retrieval.State.Mode != models.Logical { + api.SendBadRequestError(w, r, "the endpoint is only available in the Logical mode of the data retrieval") + return + } + + var connection models.ConnectionTest + if err := api.ReadJSON(r, &connection); err != nil { + api.SendBadRequestError(w, r, err.Error()) + return + } + + if err := connectionPassword(&connection); err != nil { + api.SendBadRequestError(w, r, err.Error()) + return + } + + ctx, cancel := context.WithTimeout(r.Context(), connectionCheckTimeout) + defer cancel() + + tc, err := db.CheckSource(ctx, &connection, s.Retrieval.ImageContent()) + if err != nil { + api.SendBadRequestError(w, r, err.Error()) + return + } + + if err := api.WriteJSON(w, http.StatusOK, tc); err != nil { + api.SendError(w, r, err) + return + } +} + +func connectionPassword(connection *models.ConnectionTest) error { + if connection.Password != "" { + return nil + } + + proj := &models.ConfigProjection{} + + data, err := config.GetConfigBytes() + if err != nil { + return fmt.Errorf("failed to get config: %w", err) + } + + node := &yaml.Node{} + + if err = yaml.Unmarshal(data, node); err != nil { + return fmt.Errorf("failed to unmarshal config: %w", err) + } + + if err = projection.LoadYaml(proj, node, projection.LoadOptions{ + Groups: []string{"sensitive"}, + }); err != nil { + return fmt.Errorf("failed to load config projection: %w", err) + } + + if proj.Password != nil { + connection.Password = *proj.Password + } + + return nil +} + +func adminConfigYaml() ([]byte, error) { + data, err := config.GetConfigBytes() + if err != nil { + return nil, err + } + + document := &yaml.Node{} + + err = yaml.Unmarshal(data, document) + if err != nil { + return nil, err + } + + yamlUtils.DefaultConfigMask().Yaml(document) + + doc, err := yaml.Marshal(document) + if err != nil { + return nil, err + } + + return doc, nil +} + +func (s *Server) projectedAdminConfig() (interface{}, error) { + if s.Retrieval.State.Mode != models.Logical { + return nil, fmt.Errorf("config is only available in logical mode") + } + + data, err := config.GetConfigBytes() + if err != nil { + return nil, err + } + + document := &yaml.Node{} + + err = yaml.Unmarshal(data, document) + if err != nil { + return nil, err + } + + proj := &models.ConfigProjection{} + + err = projection.LoadYaml(proj, document, projection.LoadOptions{ + Groups: []string{"default"}, + }) + if err != nil { + return nil, fmt.Errorf("failed to load yaml config projection: %w", err) + } + + obj := map[string]interface{}{} + + err = projection.StoreJSON(proj, obj, projection.StoreOptions{ + Groups: []string{"default"}, + }) + if err != nil { + return nil, fmt.Errorf("failed to jsonify config projection: %w", err) + } + + return obj, nil +} + +func (s *Server) applyProjectedAdminConfig(ctx context.Context, obj interface{}) (interface{}, error) { + if s.Retrieval.State.Mode != models.Logical { + return nil, fmt.Errorf("config is only available in logical mode") + } + + objMap, ok := obj.(map[string]interface{}) + if !ok { + return nil, fmt.Errorf("config must be an object: %T", obj) + } + + proj := &models.ConfigProjection{} + + err := projection.LoadJSON(proj, objMap, projection.LoadOptions{ + Groups: []string{"default", "sensitive"}, + }) + if err != nil { + return nil, fmt.Errorf("failed to load json config projection: %w", err) + } + + if proj.Password != nil && *proj.Password == "" { + proj.Password = nil // Avoid storing empty password + } + + data, err := config.GetConfigBytes() + if err != nil { + return nil, err + } + + node := &yaml.Node{} + + err = yaml.Unmarshal(data, node) + if err != nil { + return nil, err + } + + err = projection.StoreYaml(proj, node, projection.StoreOptions{ + Groups: []string{"default", "sensitive"}, + }) + if err != nil { + return nil, fmt.Errorf("failed to prepare yaml config projection: %w", err) + } + + cfgData, err := yaml.Marshal(node) + if err != nil { + return nil, err + } + + if !bytes.Equal(cfgData, data) { + log.Msg("Config changed, validating...") + + err = s.validateConfig(ctx, proj, cfgData) + if err != nil { + return nil, err + } + + log.Msg("Backing up config...") + + err = config.RotateConfig(cfgData) + if err != nil { + log.Errf("Failed to backup config: %v", err) + return nil, err + } + + log.Msg("Config backed up successfully") + log.Msg("Reloading configuration...") + + err = s.reloadFn(s) + if err != nil { + log.Msg("Failed to reload configuration", err) + return nil, err + } + + log.Msg("Configuration reloaded") + } else { + log.Msg("No changes detected in the config, skipping backup and reload") + } + + result, err := s.projectedAdminConfig() + if err != nil { + return nil, err + } + + return result, nil +} + +func (s *Server) validateConfig( + ctx context.Context, + proj *models.ConfigProjection, + nodeBytes []byte, +) error { + cfg := &config.Config{} + + // yamlv2 is used because v3 returns an error when config is deserialized + err := yamlv2.Unmarshal(nodeBytes, cfg) + if err != nil { + return err + } + + // Validating unmarshalled config is better because it represents actual usage + err = provision.IsValidConfig(cfg.Provision) + if err != nil { + return err + } + + _, err = retrieval.ValidateConfig(&cfg.Retrieval) + if err != nil { + return err + } + + if proj.DockerImage != nil { + stream, err := s.docker.ImagePull(ctx, *proj.DockerImage, types.ImagePullOptions{}) + if err != nil { + return err + } + + err = stream.Close() + if err != nil { + log.Err(err) + } + } + + return nil +} diff --git a/engine/internal/srv/routes.go b/engine/internal/srv/routes.go index 42bf91a4d770915ee940049ff44cc9be3dc82274..33c78d4a9602dabb3de5aeea1c49cba2021a06fe 100644 --- a/engine/internal/srv/routes.go +++ b/engine/internal/srv/routes.go @@ -595,7 +595,7 @@ func (s *Server) downloadArtifact(w http.ResponseWriter, r *http.Request) { // healthCheck provides a health check handler. func (s *Server) healthCheck(w http.ResponseWriter, _ *http.Request) { - w.Header().Set("Content-Type", "application/json; charset=utf-8") + w.Header().Set("Content-Type", api.JSONContentType) healthResponse := models.Engine{ Version: version.GetVersion(), diff --git a/engine/internal/srv/server.go b/engine/internal/srv/server.go index 0f1fa56d3f78431711275451d84ca088b7f04e2d..e0473cc015685d68bd3c416d3b789c36e023d24f 100644 --- a/engine/internal/srv/server.go +++ b/engine/internal/srv/server.go @@ -62,6 +62,7 @@ type Server struct { tm *telemetry.Agent startedAt *models.LocalTime re *regexp.Regexp + reloadFn func(server *Server) error } // WSService defines a service to manage web-sockets. @@ -72,19 +73,11 @@ type WSService struct { } // NewServer initializes a new Server instance with provided configuration. -func NewServer(cfg *srvCfg.Config, globalCfg *global.Config, - engineProps global.EngineProps, - dockerClient *client.Client, - cloning *cloning.Base, - provisioner *provision.Provisioner, - retrievalSvc *retrieval.Retrieval, - platform *platform.Service, - observer *observer.Observer, - estimator *estimator.Estimator, - pm *pool.Manager, - tm *telemetry.Agent, - tokenKeeper *ws.TokenKeeper, - uiManager *embeddedui.UIManager) *Server { +func NewServer(cfg *srvCfg.Config, globalCfg *global.Config, engineProps global.EngineProps, + dockerClient *client.Client, cloning *cloning.Base, provisioner *provision.Provisioner, + retrievalSvc *retrieval.Retrieval, platform *platform.Service, observer *observer.Observer, + estimator *estimator.Estimator, pm *pool.Manager, tm *telemetry.Agent, tokenKeeper *ws.TokenKeeper, + uiManager *embeddedui.UIManager, reloadConfigFn func(server *Server) error) *Server { server := &Server{ Config: cfg, Global: globalCfg, @@ -104,6 +97,7 @@ func NewServer(cfg *srvCfg.Config, globalCfg *global.Config, pm: pm, tm: tm, startedAt: &models.LocalTime{Time: time.Now().Truncate(time.Second)}, + reloadFn: reloadConfigFn, } server.initLogRegExp() @@ -209,6 +203,10 @@ func (s *Server) InitHandlers() { adminR := r.PathPrefix("/admin").Subrouter() adminR.Use(authMW.AdminMW) adminR.HandleFunc("/ws-auth", s.websocketAuth).Methods(http.MethodGet) + adminR.HandleFunc("/config", s.getProjectedAdminConfig).Methods(http.MethodGet) + adminR.HandleFunc("/config.yaml", s.getAdminConfigYaml).Methods(http.MethodGet) + adminR.HandleFunc("/config", s.setProjectedAdminConfig).Methods(http.MethodPost) + adminR.HandleFunc("/test-db-source", s.testDBSource).Methods(http.MethodPost) r.HandleFunc("/instance/logs", authMW.WebSocketsMW(s.wsService.tokenKeeper, s.instanceLogs)) diff --git a/engine/pkg/config/config.go b/engine/pkg/config/config.go index 9c3d9083c0ff58a39aa9bf9d23796de695210f7f..93e0c60056fcfbc16dda2696922d5f8972d05f52 100644 --- a/engine/pkg/config/config.go +++ b/engine/pkg/config/config.go @@ -6,14 +6,6 @@ package config import ( - "fmt" - "os" - "path" - - "github.com/pkg/errors" - "github.com/rs/xid" - "gopkg.in/yaml.v2" - "gitlab.com/postgres-ai/database-lab/v3/internal/cloning" "gitlab.com/postgres-ai/database-lab/v3/internal/diagnostic" "gitlab.com/postgres-ai/database-lab/v3/internal/embeddedui" @@ -25,8 +17,6 @@ import ( retConfig "gitlab.com/postgres-ai/database-lab/v3/internal/retrieval/config" srvCfg "gitlab.com/postgres-ai/database-lab/v3/internal/srv/config" "gitlab.com/postgres-ai/database-lab/v3/pkg/config/global" - "gitlab.com/postgres-ai/database-lab/v3/pkg/log" - "gitlab.com/postgres-ai/database-lab/v3/pkg/util" ) const ( @@ -48,66 +38,3 @@ type Config struct { EmbeddedUI embeddedui.Config `yaml:"embeddedUI"` Diagnostic diagnostic.Config `yaml:"diagnostic"` } - -// LoadConfiguration instances a new application configuration. -func LoadConfiguration() (*Config, error) { - cfg, err := readConfig() - if err != nil { - return nil, errors.Wrap(err, "failed to parse config") - } - - log.SetDebug(cfg.Global.Debug) - log.Dbg("Config loaded", cfg) - - return cfg, nil -} - -// LoadInstanceID tries to make instance ID persistent across runs and load its value after restart -func LoadInstanceID() (string, error) { - instanceID := "" - - idFilepath, err := util.GetMetaPath(instanceIDFile) - if err != nil { - return "", fmt.Errorf("failed to get path of the instanceID file: %w", err) - } - - data, err := os.ReadFile(idFilepath) - if err != nil { - if os.IsNotExist(err) { - instanceID = xid.New().String() - log.Dbg("no instance_id file was found, generate new instance ID", instanceID) - - if err := os.MkdirAll(path.Dir(idFilepath), 0744); err != nil { - return "", fmt.Errorf("failed to make directory meta: %w", err) - } - - return instanceID, os.WriteFile(idFilepath, []byte(instanceID), 0644) - } - - return instanceID, fmt.Errorf("failed to load instanceid, %w", err) - } - - instanceID = string(data) - - return instanceID, nil -} - -// readConfig reads application configuration. -func readConfig() (*Config, error) { - configPath, err := util.GetConfigPath(configName) - if err != nil { - return nil, errors.Wrap(err, "failed to get config path") - } - - b, err := os.ReadFile(configPath) - if err != nil { - return nil, errors.Errorf("error loading %s config file", configPath) - } - - cfg := &Config{} - if err := yaml.Unmarshal(b, cfg); err != nil { - return nil, errors.WithMessagef(err, "error parsing %s config", configPath) - } - - return cfg, nil -} diff --git a/engine/pkg/config/loaders.go b/engine/pkg/config/loaders.go new file mode 100644 index 0000000000000000000000000000000000000000..2453b57736aabc7bf5ffb061e592a935840162b1 --- /dev/null +++ b/engine/pkg/config/loaders.go @@ -0,0 +1,123 @@ +package config + +import ( + "fmt" + "os" + "path" + + "github.com/pkg/errors" + "github.com/rs/xid" + "gopkg.in/yaml.v2" + + "gitlab.com/postgres-ai/database-lab/v3/pkg/log" + "gitlab.com/postgres-ai/database-lab/v3/pkg/util" + "gitlab.com/postgres-ai/database-lab/v3/pkg/util/backup" +) + +const numberOfBackups = 10 + +// LoadConfiguration instances a new application configuration. +func LoadConfiguration() (*Config, error) { + cfg, err := readConfig() + if err != nil { + return nil, errors.Wrap(err, "failed to parse config") + } + + return cfg, nil +} + +// ApplyGlobals applies global configuration to logger. +func ApplyGlobals(cfg *Config) { + log.SetDebug(cfg.Global.Debug) + log.Dbg("Config loaded", cfg) +} + +// LoadInstanceID tries to make instance ID persistent across runs and load its value after restart +func LoadInstanceID() (string, error) { + instanceID := "" + + idFilepath, err := util.GetMetaPath(instanceIDFile) + if err != nil { + return "", fmt.Errorf("failed to get path of instanceID file: %w", err) + } + + data, err := os.ReadFile(idFilepath) + if err != nil { + if os.IsNotExist(err) { + instanceID = xid.New().String() + log.Dbg("no instance_id file was found, generate new instance ID", instanceID) + + if err := os.MkdirAll(path.Dir(idFilepath), 0744); err != nil { + return "", fmt.Errorf("failed to make directory meta: %w", err) + } + + return instanceID, os.WriteFile(idFilepath, []byte(instanceID), 0644) + } + + return instanceID, fmt.Errorf("failed to load instanceid, %w", err) + } + + instanceID = string(data) + + return instanceID, nil +} + +// readConfig reads application configuration. +func readConfig() (*Config, error) { + configPath, err := util.GetConfigPath(configName) + if err != nil { + return nil, errors.Wrap(err, "failed to get config path") + } + + b, err := os.ReadFile(configPath) + if err != nil { + return nil, errors.Errorf("error loading %s config file", configPath) + } + + cfg := &Config{} + if err := yaml.Unmarshal(b, cfg); err != nil { + return nil, errors.WithMessagef(err, "error parsing %s config", configPath) + } + + return cfg, nil +} + +// GetConfigBytes returns config bytes. +func GetConfigBytes() ([]byte, error) { + configPath, err := util.GetConfigPath(configName) + if err != nil { + return nil, errors.Wrap(err, "failed to get config path") + } + + b, err := os.ReadFile(configPath) + if err != nil { + return nil, errors.Errorf("error loading %s config file", configPath) + } + + return b, nil +} + +// RotateConfig store data in config, and backup old config +func RotateConfig(data []byte) error { + configPath, err := util.GetConfigPath(configName) + if err != nil { + return errors.Wrap(err, "failed to get config path") + } + + backups, err := backup.NewBackupCollection(configPath) + if err != nil { + return errors.Wrap(err, "failed to create backup collection") + } + + err = backups.Rotate(data) + if err != nil { + return errors.Wrap(err, "failed to rotate config") + } + + err = backups.EnsureMaxBackups(numberOfBackups) + if err != nil { + return errors.Wrap(err, "failed to ensure max backups") + } + + return nil +} diff --git a/engine/pkg/models/admin.go b/engine/pkg/models/admin.go new file mode 100644 index 0000000000000000000000000000000000000000..101dd0259b105c5497f7524c35cd1a62ae9d6713 --- /dev/null +++ b/engine/pkg/models/admin.go @@ -0,0 +1,40 @@ +package models + +const ( + // TCStatusOK defines the status code OK of the test connection request. + TCStatusOK = "ok" + + // TCStatusNotice defines the status code "notice" of the test connection request. + TCStatusNotice = "notice" + + // TCStatusWarning defines the status code "warning" of the test connection request. + TCStatusWarning = "warning" + + // TCStatusError defines the status code "error" of the test connection request. + TCStatusError = "error" + + // TCResultOK defines the result without errors of the test connection request. + TCResultOK = "ok" + + // TCResultConnectionError defines a connection error of the test connection request. + TCResultConnectionError = "connection_error" + + // TCResultUnexploredImage defines the notice about unexplored Docker image yet. + TCResultUnexploredImage = "unexplored_image" + + // TCResultMissingExtension defines the warning about a missing extension. + TCResultMissingExtension = "missing_extension" + + // TCResultMissingLocale defines the warning about a missing locale. + TCResultMissingLocale = "missing_locale" + + // TCMessageOK defines the source database is ready for dump and restore. + TCMessageOK = "Database ready for dump and restore" +) + +// TestConnection represents the response of the test connection request. +type TestConnection struct { + Status string `json:"status"` + Result string `json:"result"` + Message string `json:"message"` +} diff --git a/engine/pkg/models/configuration.go b/engine/pkg/models/configuration.go new file mode 100644 index 0000000000000000000000000000000000000000..1ec5cde13c17fef3a311ad7ff967ddf799caa8e6 --- /dev/null +++ b/engine/pkg/models/configuration.go @@ -0,0 +1,27 @@ +package models + +// ConnectionTest defines a connection test model. +type ConnectionTest struct { + Host string `json:"host"` + Port string `json:"port"` + DBName string `json:"dbname"` + Username string `json:"username"` + Password string `json:"password"` +} + +// ConfigProjection is a projection of the configuration. +type ConfigProjection struct { + Debug *bool `proj:"global.debug"` + SharedBuffers *string `proj:"databaseConfigs.configs.shared_buffers"` + SharedPreloadLibraries *string `proj:"databaseConfigs.configs.shared_preload_libraries"` + DockerImage *string `proj:"databaseContainer.dockerImage"` + Timetable *string `proj:"retrieval.refresh.timetable"` + DBName *string `proj:"retrieval.spec.logicalDump.options.source.connection.dbname"` + Host *string `proj:"retrieval.spec.logicalDump.options.source.connection.host"` + Password *string `proj:"retrieval.spec.logicalDump.options.source.connection.password" groups:"sensitive"` + Port *int64 `proj:"retrieval.spec.logicalDump.options.source.connection.port"` + Username *string `proj:"retrieval.spec.logicalDump.options.source.connection.username"` + DBList map[string]interface{} `proj:"retrieval.spec.logicalDump.options.databases"` + DumpParallelJobs *int64 `proj:"retrieval.spec.logicalDump.options.parallelJobs"` + RestoreParallelJobs *int64 `proj:"retrieval.spec.logicalRestore.options.parallelJobs"` +} diff --git a/engine/pkg/models/retrieval.go b/engine/pkg/models/retrieval.go index b528a1f4cda4e3cbe8df344064040b9e363f3747..86db8c04fc5f41de984831f8a79564c96132eb1c 100644 --- a/engine/pkg/models/retrieval.go +++ b/engine/pkg/models/retrieval.go @@ -26,6 +26,8 @@ type RetrievalStatus string const ( // Inactive defines status when data retrieving is disabled. Inactive RetrievalStatus = "inactive" + // Pending defines status when data retrieving is pending configuration verification. + Pending RetrievalStatus = "pending" // Failed defines status when data retrieving is failed. Failed RetrievalStatus = "failed" // Refreshing defines status when data retrieving is in progress. diff --git a/engine/pkg/util/backup/backup.go b/engine/pkg/util/backup/backup.go new file mode 100644 index 0000000000000000000000000000000000000000..9ac29e79cb482b873c539c50b35162c4aba063ee --- /dev/null +++ b/engine/pkg/util/backup/backup.go @@ -0,0 +1,19 @@ +// Package backup utilities to back up and restore data +package backup + +import ( + "time" +) + +const ( + backupFileExtension = ".bak" +) + +type backup struct { + Filename string + Time time.Time +} + +var now = func() time.Time { + return time.Now().In(time.UTC) +} diff --git a/engine/pkg/util/backup/backup_test.go b/engine/pkg/util/backup/backup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..110d515935453232dd8595201bcb3e3e37d150a5 --- /dev/null +++ b/engine/pkg/util/backup/backup_test.go @@ -0,0 +1,65 @@ +package backup + +import ( + "os" + "path" + "path/filepath" + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +func TestBackupTimestamps(t *testing.T) { + now = mockNow() + r := require.New(t) + tmp := os.TempDir() + + configFile := path.Join(tmp, "backed.yaml") + cleanup, err := filepath.Glob(configFile + "*") + r.NoError(err) + for _, filename := range cleanup { + err := os.Remove(filename) + r.NoError(err) + } + + err = os.WriteFile(configFile, []byte(""), 0644) + r.NoError(err) + + backups, err := NewBackupCollection(configFile) + r.NoError(err) + r.Len(backups.backups, 0) + + err = backups.Backup() + backup1 := backups.backups[0] + r.NoError(err) + r.NotNil(backup1) + + err = backups.Backup() + backup2 := backups.backups[1] + r.NoError(err) + r.NotNil(backup2) + + err = backups.Backup() + backup3 := backups.backups[2] + r.NoError(err) + r.NotNil(backup3) + + err = backups.EnsureMaxBackups(2) + r.NoError(err) + + backups, err = NewBackupCollection(configFile) + r.NoError(err) + r.Len(backups.backups, 2) + + r.EqualValues(backup2.Filename, backups.backups[0].Filename) + r.EqualValues(backup3.Filename, backups.backups[1].Filename) +} + +func mockNow() func() time.Time { + seed := time.Now() + return func() time.Time { + seed = seed.Add(time.Second) + return seed + } +} diff --git a/engine/pkg/util/backup/collection.go b/engine/pkg/util/backup/collection.go new file mode 100644 index 0000000000000000000000000000000000000000..fed21dcc67086662aba1c5d8995589ee72673cfe --- /dev/null +++ b/engine/pkg/util/backup/collection.go @@ -0,0 +1,137 @@ +package backup + +import ( + "fmt" + iofs "io/fs" + "os" + "path/filepath" + "sort" + + "gitlab.com/postgres-ai/database-lab/v3/internal/retrieval/engine/postgres/tools/fs" + "gitlab.com/postgres-ai/database-lab/v3/pkg/util" +) + +// Collection represents a collection of backups. +type Collection struct { + Filename string + backups []*backup + perm iofs.FileMode +} + +// NewBackupCollection finds a collection of backups. +func NewBackupCollection(filename string) (*Collection, error) { + filename = filepath.Clean(filename) + + stat, err := os.Stat(filename) + if err != nil { + return nil, fmt.Errorf("failed to stat file: %w", err) + } + + if stat.IsDir() { + return nil, fmt.Errorf("file is a directory") + } + + files, err := filepath.Glob(filename + "*") + if err != nil { + return nil, fmt.Errorf("failed to glob file: %w", err) + } + + backups := make([]*backup, 0, len(files)-1) + + for _, filePath := range files { + if filepath.Ext(filePath) != backupFileExtension { + continue + } + + timestamp, err := getFileTimestamp(filePath) + if err != nil { + continue + } + + backup := &backup{ + Filename: filePath, + Time: timestamp, + } + backups = append(backups, backup) + } + + c := &Collection{ + Filename: filename, + backups: backups, + perm: stat.Mode(), + } + + c.sort() + + return c, nil +} + +// Rotate rotates the backups. +func (c *Collection) Rotate(content []byte) error { + err := c.Backup() + if err != nil { + return fmt.Errorf("failed to backup: %w", err) + } + + err = os.WriteFile(c.Filename, content, c.perm) + if err != nil { + return fmt.Errorf("failed to write file: %w", err) + } + + return nil +} + +// Backup create backup of a file. +func (c *Collection) Backup() error { + nowTime := now() + + last := &backup{ + Filename: c.Filename + "." + + nowTime.Format(util.DataStateAtFormat) + + backupFileExtension, + Time: nowTime, + } + + err := fs.CopyFile(c.Filename, last.Filename) + if err != nil { + return fmt.Errorf("failed to copy file: %w", err) + } + + c.backups = append(c.backups, last) + c.sort() + + return nil +} + +// EnsureMaxBackups ensures that there are no more than maxBackups backups. +func (c *Collection) EnsureMaxBackups(count int) error { + if count < 0 { + return fmt.Errorf("count must be positive") + } + + c.sort() + backupsCount := len(c.backups) + removeCount := backupsCount - count + + if removeCount <= 0 { + return nil + } + + for i := 0; i < removeCount; i++ { + err := os.Remove(c.backups[i].Filename) + if err != nil { + return fmt.Errorf("failed to remove file: %w", err) + } + } + + c.backups = make([]*backup, count) + copy(c.backups, c.backups[removeCount:]) + + return nil +} + +func (c *Collection) sort() { + sort.Slice(c.backups, func(i, j int) bool { + return c.backups[i].Time.Before(c.backups[j].Time) + }) +} diff --git a/engine/pkg/util/backup/utils.go b/engine/pkg/util/backup/utils.go new file mode 100644 index 0000000000000000000000000000000000000000..21f4cde2fe0f0b4908c0c9e158db0f0c4b5aa0e7 --- /dev/null +++ b/engine/pkg/util/backup/utils.go @@ -0,0 +1,34 @@ +package backup + +import ( + "fmt" + "path/filepath" + "strings" + "time" + + "github.com/pkg/errors" + + "gitlab.com/postgres-ai/database-lab/v3/pkg/util" +) + +// getFileTimestamp returns the timestamp of a backup file. +// expected filename format: ...bak +func getFileTimestamp(filename string) (time.Time, error) { + base := filepath.Base(filename) + split := strings.Split(base, ".") + + const expectedSize = 3 + + if len(split) < expectedSize { + return time.Time{}, fmt.Errorf("invalid filename format: %s", filename) + } + + timeStr := split[len(split)-2] + timeStamp, err := time.Parse(util.DataStateAtFormat, timeStr) + + if err != nil { + return time.Time{}, errors.Wrap(err, "failed to parse timestamp") + } + + return timeStamp, nil +} diff --git a/engine/pkg/util/projection/common_test.go b/engine/pkg/util/projection/common_test.go new file mode 100644 index 0000000000000000000000000000000000000000..e9bcdc3d86357e22a58c8e84a7bcc8c21a0b15d5 --- /dev/null +++ b/engine/pkg/util/projection/common_test.go @@ -0,0 +1,207 @@ +package projection + +import ( + "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" +) + +func getJSONNormal() map[string]interface{} { + return map[string]interface{}{ + "nested": map[string]interface{}{ + "string": "string", + "int": int64(1), + "float": 1.1, + "bool": true, + "ptrString": "string", + "ptrInt": int64(1), + "ptrFloat": 1.1, + "ptrBool": true, + }, + } +} + +func getJSONNull() map[string]interface{} { + return map[string]interface{}{ + "nested": map[string]interface{}{ + "string": nil, + "int": nil, + "float": nil, + "bool": nil, + "ptrString": nil, + "ptrInt": nil, + "ptrFloat": nil, + "ptrBool": nil, + }, + } +} + +const yamlNormal = ` +nested: + string: "string" + int: 1 + float: 1.1 + bool: true + ptrString: "string" + ptrInt: 1 + ptrFloat: 1.1 + ptrBool: true +` + +const yamlNull = ` +nested: + string: null + int: null + float: null + bool: null + ptrString: null + ptrInt: null + ptrFloat: null + ptrBool: null +` + +const yamlDiverted = ` +nested: + string: "to be stored" + int: 200 + float: 200.2 + bool: false + ptrString: "to be stored" + ptrInt: 200 + ptrFloat: 200.2 + ptrBool: false +` + +const yamlNullApplied = ` +nested: + string: "" + int: 0 + float: 0.0 + bool: false + ptrString: "string" + ptrInt: 1 + ptrFloat: 1.1 + ptrBool: true +` + +type testStruct struct { + StringField string `proj:"nested.string"` + IntField int64 `proj:"nested.int"` + FloatField float64 `proj:"nested.float"` + BoolField bool `proj:"nested.bool"` + + PtrStringField *string `proj:"nested.ptrString"` + PtrIntField *int64 `proj:"nested.ptrInt"` + PtrFloatField *float64 `proj:"nested.ptrFloat"` + PtrBoolField *bool `proj:"nested.ptrBool"` + + MissField string `proj:"nested.miss"` + MissNestedField string `proj:"nested.missMap.nested"` + + PtrMissField *string `proj:"nested.ptrMiss"` + PtrMissNestedField *string `proj:"nested.ptrMissMap.nested"` +} + +func fullTestStruct() *testStruct { + strField := "string" + intField := int64(1) + floatField := 1.1 + boolField := true + missField := "ptrMiss" + missNestedField := "ptrMissNested" + + return &testStruct{ + StringField: "string", + IntField: int64(1), + FloatField: 1.1, + BoolField: true, + MissField: "miss", + MissNestedField: "missNested", + PtrStringField: &strField, + PtrIntField: &intField, + PtrFloatField: &floatField, + PtrBoolField: &boolField, + PtrMissField: &missField, + PtrMissNestedField: &missNestedField, + } +} + +func getYamlNormal(t *testing.T) *yaml.Node { + t.Helper() + node := &yaml.Node{} + err := yaml.Unmarshal([]byte(yamlNormal), node) + require.NoError(t, err) + return node +} + +func getYamlNull(t *testing.T) *yaml.Node { + t.Helper() + node := &yaml.Node{} + err := yaml.Unmarshal([]byte(yamlNull), node) + require.NoError(t, err) + return node +} + +func getYamlDiverted(t *testing.T) *yaml.Node { + t.Helper() + node := &yaml.Node{} + err := yaml.Unmarshal([]byte(yamlDiverted), node) + require.NoError(t, err) + return node +} + +func requireEmpty(t *testing.T, s *testStruct) { + t.Helper() + require.Zero(t, s.StringField) + require.Zero(t, s.IntField) + require.Zero(t, s.FloatField) + require.Zero(t, s.BoolField) + require.Zero(t, s.MissField) + require.Zero(t, s.MissNestedField) + + require.Nil(t, s.PtrStringField) + require.Nil(t, s.PtrIntField) + require.Nil(t, s.PtrFloatField) + require.Nil(t, s.PtrBoolField) + require.Nil(t, s.PtrMissField) + require.Nil(t, s.PtrMissNestedField) +} + +func requireMissEmpty(t *testing.T, s *testStruct) { + t.Helper() + require.Zero(t, s.MissField) + require.Zero(t, s.MissNestedField) + + require.Nil(t, s.PtrMissField) + require.Nil(t, s.PtrMissNestedField) +} + +func requireComplete(t *testing.T, s *testStruct) { + t.Helper() + require.Equal(t, "string", s.StringField) + require.Equal(t, int64(1), s.IntField) + require.Equal(t, 1.1, s.FloatField) + require.Equal(t, true, s.BoolField) + + require.Equal(t, "string", *s.PtrStringField) + require.Equal(t, int64(1), *s.PtrIntField) + require.Equal(t, 1.1, *s.PtrFloatField) + require.Equal(t, true, *s.PtrBoolField) +} + +func requireYamlNormal(t *testing.T, node *yaml.Node) { + t.Helper() + normal := &yaml.Node{} + err := yaml.Unmarshal([]byte(yamlNormal), normal) + require.NoError(t, err) + require.EqualValues(t, normal, node) +} + +func requireYamlNullApplied(t *testing.T, node *yaml.Node) { + t.Helper() + null := &yaml.Node{} + err := yaml.Unmarshal([]byte(yamlNullApplied), null) + require.NoError(t, err) + require.EqualValues(t, null, node) +} diff --git a/engine/pkg/util/projection/helpers.go b/engine/pkg/util/projection/helpers.go new file mode 100644 index 0000000000000000000000000000000000000000..022eb583ab3807eda2ae38c984917e57128d0cd9 --- /dev/null +++ b/engine/pkg/util/projection/helpers.go @@ -0,0 +1,38 @@ +// Package projection helps to bind struct fields to json/yaml paths. +package projection + +import ( + "gopkg.in/yaml.v3" +) + +// LoadYaml loads struct fields from yaml document. +func LoadYaml(target interface{}, yaml *yaml.Node, options LoadOptions) error { + soft, err := NewSoftYaml(yaml) + if err != nil { + return err + } + + return Load(target, soft, options) +} + +// StoreYaml stores struct fields to yaml document. +func StoreYaml(target interface{}, yaml *yaml.Node, options StoreOptions) error { + soft, err := NewSoftYaml(yaml) + if err != nil { + return err + } + + return Store(target, soft, options) +} + +// LoadJSON loads struct fields from json document. +func LoadJSON(target interface{}, m map[string]interface{}, options LoadOptions) error { + soft := NewSoftJSON(m) + return Load(target, soft, options) +} + +// StoreJSON stores struct fields to json document. +func StoreJSON(target interface{}, m map[string]interface{}, options StoreOptions) error { + soft := NewSoftJSON(m) + return Store(target, soft, options) +} diff --git a/engine/pkg/util/projection/json.go b/engine/pkg/util/projection/json.go new file mode 100644 index 0000000000000000000000000000000000000000..b21e6e2595103e6fa8a0929dbdfa99a50d44b949 --- /dev/null +++ b/engine/pkg/util/projection/json.go @@ -0,0 +1,88 @@ +package projection + +import ( + "fmt" + + "gitlab.com/postgres-ai/database-lab/v3/pkg/util/ptypes" +) + +type softJSON struct { + root map[string]interface{} +} + +// NewSoftJSON creates a new JSON accessor. +func NewSoftJSON(root map[string]interface{}) Accessor { + return &softJSON{root: root} +} + +func (s *softJSON) Set(path []string, value interface{}, _ ptypes.Type) error { + parent := s.root + for _, key := range path[:len(path)-1] { + child, hasChild := parent[key] + if !hasChild { + child = make(map[string]interface{}) + parent[key] = child + } + + switch childTyped := child.(type) { + case map[string]interface{}: + parent = childTyped + default: + return fmt.Errorf("unsupported type: %T", childTyped) + } + } + + key := path[len(path)-1] + + child, ok := parent[key] + if !ok { + parent[key] = value + return nil + } + + switch child.(type) { + case map[string]interface{}: + return fmt.Errorf("node is already a mapping node") + case []interface{}: + return fmt.Errorf("node is already a sequence node") + default: + parent[key] = value + } + + return nil +} + +func (s *softJSON) Get(path []string, t ptypes.Type) (interface{}, error) { + parent := s.root + for _, key := range path[:len(path)-1] { + child, hasChild := parent[key] + if !hasChild { + return nil, nil + } + + switch childTyped := child.(type) { + case map[string]interface{}: + parent = childTyped + default: + return nil, fmt.Errorf("unsupported type: %T", childTyped) + } + } + + key := path[len(path)-1] + + child, ok := parent[key] + if !ok { + return nil, nil + } + + if child == nil { + return nil, nil + } + + typed, err := ptypes.Convert(child, t) + if err != nil { + return nil, fmt.Errorf("failed to convert %#v: %w", child, err) + } + + return typed, nil +} diff --git a/engine/pkg/util/projection/load_json_test.go b/engine/pkg/util/projection/load_json_test.go new file mode 100644 index 0000000000000000000000000000000000000000..62b75cfd7071e1ea246c120264fce7aea100e427 --- /dev/null +++ b/engine/pkg/util/projection/load_json_test.go @@ -0,0 +1,28 @@ +package projection + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestLoadJson(t *testing.T) { + r := require.New(t) + s := &testStruct{} + + err := LoadJSON(s, getJSONNormal(), LoadOptions{}) + r.NoError(err) + + requireComplete(t, s) + requireMissEmpty(t, s) +} + +func TestLoadJsonNull(t *testing.T) { + r := require.New(t) + s := fullTestStruct() + + err := LoadJSON(s, getJSONNull(), LoadOptions{}) + r.NoError(err) + + requireEmpty(t, s) +} diff --git a/engine/pkg/util/projection/load_yaml_test.go b/engine/pkg/util/projection/load_yaml_test.go new file mode 100644 index 0000000000000000000000000000000000000000..6b4ab796759db5f407602935695b2bd334ee74ed --- /dev/null +++ b/engine/pkg/util/projection/load_yaml_test.go @@ -0,0 +1,30 @@ +package projection + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestLoadYaml(t *testing.T) { + r := require.New(t) + s := &testStruct{} + node := getYamlNormal(t) + + err := LoadYaml(s, node, LoadOptions{}) + r.NoError(err) + + requireMissEmpty(t, s) + requireComplete(t, s) +} + +func TestLoadYamlNull(t *testing.T) { + r := require.New(t) + s := fullTestStruct() + node := getYamlNull(t) + + err := LoadYaml(s, node, LoadOptions{}) + r.NoError(err) + + requireEmpty(t, s) +} diff --git a/engine/pkg/util/projection/multi_group_test.go b/engine/pkg/util/projection/multi_group_test.go new file mode 100644 index 0000000000000000000000000000000000000000..7abbd7752236370bfc206ea90b0d0f34e8cf5ff4 --- /dev/null +++ b/engine/pkg/util/projection/multi_group_test.go @@ -0,0 +1,52 @@ +package projection + +import ( + "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" +) + +type structMulti struct { + Yaml string `proj:"yaml.yamlValue" groups:"yaml"` + JSON string `proj:"json.jsonValue" groups:"json"` +} + +const yamlMulti = ` +yaml: + yamlValue: "yamlValue" +` + +func getJSONMulti() map[string]interface{} { + return map[string]interface{}{ + "json": map[string]interface{}{ + "jsonValue": "jsonValue", + }, + } +} + +func getYamlMulti(t *testing.T) *yaml.Node { + t.Helper() + node := &yaml.Node{} + err := yaml.Unmarshal([]byte(yamlMulti), node) + require.NoError(t, err) + return node +} + +func TestLoadJsonMulti(t *testing.T) { + r := require.New(t) + + s := &structMulti{} + err := LoadJSON(s, getJSONMulti(), LoadOptions{ + Groups: []string{"json"}, + }) + r.NoError(err) + + err = LoadYaml(s, getYamlMulti(t), LoadOptions{ + Groups: []string{"yaml"}, + }) + r.NoError(err) + + r.Equal("jsonValue", s.JSON) + r.Equal("yamlValue", s.Yaml) +} diff --git a/engine/pkg/util/projection/operations.go b/engine/pkg/util/projection/operations.go new file mode 100644 index 0000000000000000000000000000000000000000..6b4f26362703fc3c639d61411bae6688f9f6de19 --- /dev/null +++ b/engine/pkg/util/projection/operations.go @@ -0,0 +1,101 @@ +package projection + +import ( + "reflect" + + "github.com/pkg/errors" + + "gitlab.com/postgres-ai/database-lab/v3/pkg/util/ptypes" +) + +// Load reads the values of the fields of the target struct from the accessor. +func Load(target interface{}, accessor Accessor, options LoadOptions) error { + return forEachField(target, func(tag *fieldTag, field reflect.Value) error { + if !tag.matchesLoad(options) { + return nil + } + + accessorValue, err := accessor.Get(tag.path, tag.fType) + if err != nil { + return err + } + + if accessorValue == nil { + field.Set(reflect.Zero(field.Type())) + return nil + } + + if tag.isPtr { + setValue := ptypes.NewPtr(accessorValue) + if setValue.IsValid() { + field.Set(setValue) + } + } else { + field.Set(reflect.ValueOf(accessorValue)) + } + return nil + }, + ) +} + +// Store writes the values of the fields of the target struct to the accessor. +func Store(target interface{}, accessor Accessor, options StoreOptions) error { + return forEachField(target, func(tag *fieldTag, field reflect.Value) error { + if !tag.matchesStore(options) { + return nil + } + var accessorValue interface{} + if tag.isPtr { + if field.IsNil() { + return nil + } + accessorValue = field.Elem().Interface() + } else { + accessorValue = field.Interface() + } + err := accessor.Set(tag.path, accessorValue, tag.fType) + if err != nil { + return err + } + return nil + }, + ) +} + +func forEachField(target interface{}, fn func(tag *fieldTag, field reflect.Value) error) error { + value := reflect.Indirect( + reflect.ValueOf(target), + ) + + if value.Kind() != reflect.Struct { + return errors.Errorf("target must be a struct") + } + + if !value.CanAddr() { + return errors.Errorf("target must be addressable") + } + + valueType := value.Type() + num := value.NumField() + + for i := 0; i < num; i++ { + field := value.Field(i) + fieldType := valueType.Field(i) + + tag, err := getFieldTag(fieldType) + if err != nil { + return err + } + + if tag == nil { + continue + } + + err = fn(tag, field) + if err != nil { + return err + } + } + + return nil +} diff --git a/engine/pkg/util/projection/store_json_test.go b/engine/pkg/util/projection/store_json_test.go new file mode 100644 index 0000000000000000000000000000000000000000..318d4bffefc7211e708b7923dffd99c1feb3afda --- /dev/null +++ b/engine/pkg/util/projection/store_json_test.go @@ -0,0 +1,67 @@ +package projection + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestStoreJson(t *testing.T) { + r := require.New(t) + s := fullTestStruct() + node := map[string]interface{}{} + + err := StoreJSON(s, node, StoreOptions{}) + r.NoError(err) + + var expected = map[string]interface{}{ + "nested": map[string]interface{}{ + "string": "string", + "int": int64(1), + "float": 1.1, + "bool": true, + "miss": "miss", + "missMap": map[string]interface{}{ + "nested": "missNested", + }, + "ptrString": "string", + "ptrInt": int64(1), + "ptrFloat": 1.1, + "ptrBool": true, + "ptrMiss": "ptrMiss", + "ptrMissMap": map[string]interface{}{ + "nested": "ptrMissNested", + }, + }, + } + r.EqualValues(expected, node) +} + +func TestStoreJsonNull(t *testing.T) { + r := require.New(t) + s := &testStruct{} + node := getJSONNormal() + + err := StoreJSON(s, node, StoreOptions{}) + r.NoError(err) + + expected := map[string]interface{}{ + "nested": map[string]interface{}{ + "string": "", + "int": int64(0), + "float": 0.0, + "bool": false, + "miss": "", + "missMap": map[string]interface{}{ + "nested": "", + }, + + "ptrString": "string", + "ptrInt": int64(1), + "ptrFloat": 1.1, + "ptrBool": true, + }, + } + + r.EqualValues(expected, node) +} diff --git a/engine/pkg/util/projection/store_yaml_test.go b/engine/pkg/util/projection/store_yaml_test.go new file mode 100644 index 0000000000000000000000000000000000000000..4687a502c2bc61d6121cb0e3aedfbd9c5e429483 --- /dev/null +++ b/engine/pkg/util/projection/store_yaml_test.go @@ -0,0 +1,30 @@ +package projection + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestStoreYaml(t *testing.T) { + r := require.New(t) + s := fullTestStruct() + node := getYamlDiverted(t) + + err := StoreYaml(s, node, StoreOptions{}) + r.NoError(err) + + requireYamlNormal(t, node) +} + +func TestStoreYamlNull(t *testing.T) { + r := require.New(t) + s := &testStruct{} + node := getYamlNormal(t) + + err := StoreYaml(s, node, StoreOptions{}) + r.NoError(err) + + // no changes should have been made to the node + requireYamlNullApplied(t, node) +} diff --git a/engine/pkg/util/projection/tags.go b/engine/pkg/util/projection/tags.go new file mode 100644 index 0000000000000000000000000000000000000000..4191208e8e9b844aaa97fd89c70c59cb4bcffe56 --- /dev/null +++ b/engine/pkg/util/projection/tags.go @@ -0,0 +1,104 @@ +package projection + +import ( + "reflect" + "strings" + + "github.com/pkg/errors" + + "gitlab.com/postgres-ai/database-lab/v3/pkg/util/ptypes" +) + +const projectionTag = "proj" +const projectionGroupTag = "groups" + +type fieldTag struct { + path []string + groups []string + isPtr bool + fType ptypes.Type +} + +// LoadOptions is used to filter fields to load. +type LoadOptions struct { + Groups []string +} + +// StoreOptions is used to filter fields to store. +type StoreOptions struct { + Groups []string +} + +func getFieldTag(value reflect.StructField) (*fieldTag, error) { + tag := value.Tag.Get(projectionTag) + if tag == "" { + return nil, nil + } + + options := strings.Split(tag, ",") + path := strings.Split(options[0], ".") + + var isPtr bool + + var fType ptypes.Type + + var groups []string + + groupTag := value.Tag.Get(projectionGroupTag) + if groupTag == "" { + groups = []string{"default"} + } else { + groups = strings.Split(groupTag, ",") + } + + if value.Type.Kind() == reflect.Ptr { + isPtr = true + fType = ptypes.MapKindToType(value.Type.Elem().Kind()) + } else { + isPtr = false + fType = ptypes.MapKindToType(value.Type.Kind()) + } + + if fType == ptypes.Invalid { + return nil, errors.Errorf("invalid type: %s", value.Type.Kind()) + } + + return &fieldTag{ + path: path, + fType: fType, + isPtr: isPtr, + groups: groups, + }, nil +} + +func (f *fieldTag) matchesStore(options StoreOptions) bool { + if len(options.Groups) == 0 { + return true + } + + for _, group := range f.groups { + for _, option := range options.Groups { + if group == option { + return true + } + } + } + + return false +} + +func (f *fieldTag) matchesLoad(options LoadOptions) bool { + if len(options.Groups) == 0 { + return true + } + + for _, group := range f.groups { + for _, option := range options.Groups { + if group == option { + return true + } + } + } + + return false +} diff --git a/engine/pkg/util/projection/types.go b/engine/pkg/util/projection/types.go new file mode 100644 index 0000000000000000000000000000000000000000..64320a9ba70e073da1848fb2fdf613af27021e06 --- /dev/null +++ b/engine/pkg/util/projection/types.go @@ -0,0 +1,9 @@ +package projection + +import "gitlab.com/postgres-ai/database-lab/v3/pkg/util/ptypes" + +// Accessor is an interface for getting and setting values from a json / yaml / anything else +type Accessor interface { + Set(path []string, value interface{}, t ptypes.Type) error + Get(path []string, t ptypes.Type) (interface{}, error) +} diff --git a/engine/pkg/util/projection/yaml.go b/engine/pkg/util/projection/yaml.go new file mode 100644 index 0000000000000000000000000000000000000000..83cd6d80c504e60ccbabab8dd52e63a32f25c151 --- /dev/null +++ b/engine/pkg/util/projection/yaml.go @@ -0,0 +1,201 @@ +package projection + +import ( + "fmt" + + "gopkg.in/yaml.v3" + + "gitlab.com/postgres-ai/database-lab/v3/pkg/util/ptypes" +) + +type yamlSoft struct { + root *yaml.Node + document *yaml.Node +} + +// NewSoftYaml creates a new yaml accessor +func NewSoftYaml( + document *yaml.Node, +) (Accessor, error) { + if document.Kind != yaml.DocumentNode { + return nil, fmt.Errorf("document is not a document node") + } + + if len(document.Content) != 1 { + return nil, fmt.Errorf("document has more than one child") + } + + if document.Content[0].Kind != yaml.MappingNode { + return nil, fmt.Errorf("document has no mapping node") + } + + return &yamlSoft{ + root: document.Content[0], + document: document, + }, nil +} + +func (y *yamlSoft) Set(path []string, value interface{}, t ptypes.Type) error { + node := y.root + for _, key := range path { + if node.Kind != yaml.MappingNode { + return fmt.Errorf("node is not a mapping node") + } + + child, hasChild := findNodeForKey(node, key) + if !hasChild { + return nil + } + + node = child + } + + if value == nil { + return nil + } + + if mv, ok := value.(map[string]interface{}); ok { + if err := node.Encode(mv); err != nil { + return fmt.Errorf("cannot encode map: %w", err) + } + + return nil + } + + conv, err := ptypes.Convert(value, ptypes.String) + if err != nil { + return err + } + + node.Value = conv.(string) + node.Tag = ptypeToNodeTag(t) + + return nil +} + +func (y *yamlSoft) Get(path []string, t ptypes.Type) (interface{}, error) { + node := y.root + for _, key := range path { + if node.Kind != yaml.MappingNode { + return nil, fmt.Errorf("node is not a mapping node") + } + + child, hasChild := findNodeForKey(node, key) + if !hasChild { + return nil, nil + } + + node = child + } + + if node.Tag == "!!null" { + return nil, nil + } + + if node.Tag == "!!map" { + return convertMap(node) + } + + typed, err := ptypes.Convert(node.Value, t) + if err != nil { + return nil, err + } + + return typed, nil +} + +func findNodeForKey(node *yaml.Node, key string) (*yaml.Node, bool) { + for i := 0; i < len(node.Content); i += 2 { + if node.Content[i].Value == key { + return node.Content[i+1], true + } + } + + return nil, false +} + +func convertMap(node *yaml.Node) (map[string]interface{}, error) { + convertedMap := make(map[string]interface{}, 0) + + for i := 0; i < len(node.Content); i += 2 { + switch node.Content[i+1].Tag { + case "!!null": + convertedMap[node.Content[i].Value] = struct{}{} + + case "!!map": + nestedMap, err := convertMap(node.Content[i+1]) + if err != nil { + return nil, err + } + + convertedMap[node.Content[i].Value] = nestedMap + + case "!!seq": + slice, err := convertSlice(node.Content[i+1]) + if err != nil { + return nil, err + } + + convertedMap[node.Content[i].Value] = slice + + default: + typed, err := ptypes.Convert(node.Content[i+1].Value, nodeTagToPType(node.Content[i+1].Tag)) + if err != nil { + return nil, err + } + + convertedMap[node.Content[i].Value] = typed + } + } + + return convertedMap, nil +} + +func convertSlice(node *yaml.Node) ([]interface{}, error) { + stringSlice := []interface{}{} + + for _, nodeContent := range node.Content { + typed, err := ptypes.Convert(nodeContent.Value, ptypes.String) + if err != nil { + return nil, fmt.Errorf("failed to convert a slice element: %w", err) + } + + stringSlice = append(stringSlice, typed) + } + + return stringSlice, nil +} + +func ptypeToNodeTag(t ptypes.Type) string { + switch t { + case ptypes.String: + return "!!str" + case ptypes.Int64: + return "!!int" + case ptypes.Float64: + return "!!float" + case ptypes.Bool: + return "!!bool" + case ptypes.Map: + return "!!map" + default: + return "" + } +} + +func nodeTagToPType(nodeTag string) ptypes.Type { + switch nodeTag { + case "!!str": + return ptypes.String + case "!!int": + return ptypes.Int64 + case "!!float": + return ptypes.Float64 + case "!!bool": + return ptypes.Bool + case "!!map": + return ptypes.Map + default: + return ptypes.Invalid + } +} diff --git a/engine/pkg/util/ptypes/mapping.go b/engine/pkg/util/ptypes/mapping.go new file mode 100644 index 0000000000000000000000000000000000000000..2f800bc705e500b6b57781310af704870187b713 --- /dev/null +++ b/engine/pkg/util/ptypes/mapping.go @@ -0,0 +1,160 @@ +// Package ptypes helps with type conversion in projections. +package ptypes + +import ( + "reflect" + "strconv" + + "github.com/pkg/errors" +) + +// Type represents the type of value. +type Type int + +const ( + // Invalid is the type of unsupported values. + Invalid Type = iota + // String is a string type. + String + // Int64 is an int64 type. + Int64 + // Float64 is a float64 type. + Float64 + // Bool is a bool type. + Bool + // Map is a map type. + Map +) + +// Convert converts the value to the given type. +func Convert(value interface{}, expected Type) (interface{}, error) { + switch expected { + case String: + return convertString(value) + case Int64: + return convertInt64(value) + case Float64: + return convertFloat64(value) + case Bool: + return convertBool(value) + case Map: + return convertMap(value) + } + + return nil, errors.Errorf("unsupported type for conversion: %T", value) +} + +func convertString(value interface{}) (interface{}, error) { + switch v := value.(type) { + case string: + return v, nil + case int64: + return strconv.FormatInt(v, 10), nil + case float64: + if v == 0 { + return "0.0", nil + } + + return strconv.FormatFloat(v, 'f', -1, 64), nil + case bool: + return strconv.FormatBool(v), nil + } + + return nil, errors.Errorf("unsupported string type: %T", value) +} + +func convertInt64(value interface{}) (interface{}, error) { + switch v := value.(type) { + case string: + i, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return nil, err + } + + return i, nil + case int64: + return v, nil + case float64: + return int64(v), nil + } + + return nil, errors.Errorf("unsupported int64 type: %T", value) +} + +func convertFloat64(value interface{}) (interface{}, error) { + switch v := value.(type) { + case string: + f, err := strconv.ParseFloat(v, 64) + if err != nil { + return nil, err + } + + return f, nil + case int64: + return float64(v), nil + case float64: + return v, nil + } + + return nil, errors.Errorf("unsupported float64 type: %T", value) +} + +func convertBool(value interface{}) (interface{}, error) { + switch v := value.(type) { + case string: + b, err := strconv.ParseBool(v) + if err != nil { + return nil, err + } + + return b, nil + case bool: + return v, nil + } + + return nil, errors.Errorf("unsupported bool type: %T", value) +} + +func convertMap(value interface{}) (interface{}, error) { + if v, ok := value.(map[string]interface{}); ok { + return v, nil + } + + return nil, errors.Errorf("unsupported map type: %T", value) +} + +// MapKindToType returns the type of the given kind. +func MapKindToType(kind reflect.Kind) Type { + switch kind { + case reflect.String: + return String + case reflect.Int64: + return Int64 + case reflect.Float64: + return Float64 + case reflect.Bool: + return Bool + case reflect.Map: + return Map + } + + return Invalid +} + +// NewPtr returns a pointer to a value of the given type. +func NewPtr(value interface{}) reflect.Value { + switch v := value.(type) { + case string: + return reflect.ValueOf(&v) + case int64: + return reflect.ValueOf(&v) + case float64: + return reflect.ValueOf(&v) + case bool: + return reflect.ValueOf(&v) + case map[string]interface{}: + return reflect.ValueOf(&v) + } + + return reflect.Value{} +} diff --git a/engine/pkg/util/yaml/default.go b/engine/pkg/util/yaml/default.go new file mode 100644 index 0000000000000000000000000000000000000000..d3b39e63b62a9ded51b219d794e23a272bd250e3 --- /dev/null +++ b/engine/pkg/util/yaml/default.go @@ -0,0 +1,12 @@ +package yaml + +// DefaultConfigMask return default config copy configuration +func DefaultConfigMask() *Mask { + sensitive := []string{ + "server.verificationToken", + "platform.accessToken", + "retrieval.spec.logicalDump.options.source.connection.password", + } + + return NewMask(sensitive) +} diff --git a/engine/pkg/util/yaml/mask.go b/engine/pkg/util/yaml/mask.go new file mode 100644 index 0000000000000000000000000000000000000000..2df40a8988bf7422e75cf7bb109950e07869949c --- /dev/null +++ b/engine/pkg/util/yaml/mask.go @@ -0,0 +1,44 @@ +// Package yaml contains utilities to work with YAML nodes +package yaml + +import ( + "strings" + + "gopkg.in/yaml.v3" +) + +const maskValue = "****" + +// Mask is a YAML masking utility +type Mask struct { + paths [][]string +} + +// NewMask creates a new YAML copy configuration. +func NewMask(paths []string) *Mask { + c := &Mask{} + + for _, path := range paths { + pathSplit := strings.Split(path, ".") + c.paths = append(c.paths, pathSplit) + } + + return c +} + +// Yaml copies node values +func (c *Mask) Yaml(node *yaml.Node) { + for i := 0; i < len(c.paths); i++ { + child, found := FindNodeAtPath(node, c.paths[i]) + if !found { + continue + } + + if child.Kind != yaml.ScalarNode { + continue + } + + child.Value = maskValue + child.Tag = "!!str" + } +} diff --git a/engine/pkg/util/yaml/mask_test.go b/engine/pkg/util/yaml/mask_test.go new file mode 100644 index 0000000000000000000000000000000000000000..f0b0cfd29a6de2838bb9143c70c3c392daf7b2bc --- /dev/null +++ b/engine/pkg/util/yaml/mask_test.go @@ -0,0 +1,33 @@ +package yaml + +import ( + "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" +) + +const yamlStr = ` +root: + sensitive: "fromValue" + nonSensitive: 123 +` + +func TestMask(t *testing.T) { + r := require.New(t) + node := &yaml.Node{} + + err := yaml.Unmarshal([]byte(yamlStr), node) + r.NoError(err) + + mask := NewMask([]string{"root.sensitive"}) + mask.Yaml(node) + + sensitive, _ := FindNodeAtPathString(node, "root.sensitive") + r.NotNil(sensitive) + r.Equal(maskValue, sensitive.Value) + + nonSensitive, _ := FindNodeAtPathString(node, "root.nonSensitive") + r.NotNil(nonSensitive) + r.Equal("123", nonSensitive.Value) +} diff --git a/engine/pkg/util/yaml/path.go b/engine/pkg/util/yaml/path.go new file mode 100644 index 0000000000000000000000000000000000000000..9e7e836bf0ac054e255e32873c2110a7992cd67b --- /dev/null +++ b/engine/pkg/util/yaml/path.go @@ -0,0 +1,38 @@ +// Package yaml Utilities to work with YAML nodes +package yaml + +import ( + "strings" + + "gopkg.in/yaml.v3" +) + +// FindNodeAtPathString finds the node at the given path. +func FindNodeAtPathString(node *yaml.Node, path string) (*yaml.Node, bool) { + return FindNodeAtPath(node, strings.Split(path, ".")) +} + +// FindNodeAtPath finds the node at the given path. +func FindNodeAtPath(node *yaml.Node, path []string) (*yaml.Node, bool) { + if len(path) == 0 { + return node, true + } + + if node.Kind == yaml.DocumentNode { + if len(node.Content) < 1 { + return nil, false + } + + return FindNodeAtPath(node.Content[0], path) + } + + if node.Kind == yaml.MappingNode { + for i := 0; i < len(node.Content); i += 2 { + if node.Content[i].Value == path[0] { + return FindNodeAtPath(node.Content[i+1], path[1:]) + } + } + } + + return nil, false +} diff --git a/engine/test/1.synthetic.sh b/engine/test/1.synthetic.sh index aa7d274fa9dcc12460db2faf289c46469342259f..40684ae6447e3525329d35591b1bc4f7757aad9c 100644 --- a/engine/test/1.synthetic.sh +++ b/engine/test/1.synthetic.sh @@ -109,7 +109,7 @@ sudo docker run \ --volume /var/run/docker.sock:/var/run/docker.sock \ --volume ${DLE_TEST_MOUNT_DIR}/${DLE_TEST_POOL_NAME}/dump:${DLE_TEST_MOUNT_DIR}/${DLE_TEST_POOL_NAME}/dump \ --volume ${DLE_TEST_MOUNT_DIR}:${DLE_TEST_MOUNT_DIR}/:rshared \ - --volume "${configDir}":/home/dblab/configs:ro \ + --volume "${configDir}":/home/dblab/configs \ --volume "${metaDir}":/home/dblab/meta \ --volume /sys/kernel/debug:/sys/kernel/debug:rw \ --volume /lib/modules:/lib/modules:ro \ diff --git a/engine/test/2.logical_generic.sh b/engine/test/2.logical_generic.sh index ac7c1dfad7f5c135d89ab1ef8c90d57bcd291722..c16dd52bed9501afc849a15a70a80dd57f430175 100644 --- a/engine/test/2.logical_generic.sh +++ b/engine/test/2.logical_generic.sh @@ -82,6 +82,7 @@ metaDir="$HOME/.dblab/engine/meta" # Copy the contents of configuration example mkdir -p "${configDir}" +mkdir -p "${metaDir}" curl https://gitlab.com/postgres-ai/database-lab/-/raw/"${CI_COMMIT_BRANCH:-master}"/engine/configs/config.example.logical_generic.yml \ --output "${configDir}/server.yml" @@ -96,23 +97,24 @@ yq eval -i ' .provision.portPool.from = env(DLE_PORT_POOL_FROM) | .provision.portPool.to = env(DLE_PORT_POOL_TO) | .retrieval.spec.logicalDump.options.dumpLocation = env(DLE_TEST_MOUNT_DIR) + "/" + env(DLE_TEST_POOL_NAME) + "/dump" | - .retrieval.spec.logicalDump.options.source.connection.dbname = strenv(SOURCE_DBNAME) | - .retrieval.spec.logicalDump.options.source.connection.host = strenv(SOURCE_HOST) | - .retrieval.spec.logicalDump.options.source.connection.port = env(SOURCE_PORT) | - .retrieval.spec.logicalDump.options.source.connection.username = strenv(SOURCE_USERNAME) | - .retrieval.spec.logicalDump.options.source.connection.password = strenv(SOURCE_PASSWORD) | .retrieval.spec.logicalRestore.options.dumpLocation = env(DLE_TEST_MOUNT_DIR) + "/" + env(DLE_TEST_POOL_NAME) + "/dump" | .databaseContainer.dockerImage = "postgresai/extended-postgres:" + strenv(POSTGRES_VERSION) ' "${configDir}/server.yml" +SHARED_PRELOAD_LIBRARIES="pg_stat_statements, auto_explain, pgaudit, logerrors, pg_stat_kcache" + # Edit the following options for PostgreSQL 9.6 if [ "${POSTGRES_VERSION}" = "9.6" ]; then yq eval -i ' - .databaseConfigs.configs.shared_preload_libraries = "pg_stat_statements, auto_explain" | .databaseConfigs.configs.log_directory = "log" ' "${configDir}/server.yml" + + SHARED_PRELOAD_LIBRARIES="pg_stat_statements, auto_explain" fi +pendingFile="${metaDir}/pending.retrieval" +sudo touch $pendingFile + ## Launch Database Lab server sudo docker run \ --name ${DLE_SERVER_NAME} \ @@ -123,7 +125,7 @@ sudo docker run \ --volume /var/run/docker.sock:/var/run/docker.sock \ --volume ${DLE_TEST_MOUNT_DIR}/${DLE_TEST_POOL_NAME}/dump:${DLE_TEST_MOUNT_DIR}/${DLE_TEST_POOL_NAME}/dump \ --volume ${DLE_TEST_MOUNT_DIR}:${DLE_TEST_MOUNT_DIR}/:rshared \ - --volume "${configDir}":/home/dblab/configs:ro \ + --volume "${configDir}":/home/dblab/configs \ --volume "${metaDir}":/home/dblab/meta \ --volume /sys/kernel/debug:/sys/kernel/debug:rw \ --volume /lib/modules:/lib/modules:ro \ @@ -135,6 +137,118 @@ sudo docker run \ # Check the Database Lab Engine logs sudo docker logs ${DLE_SERVER_NAME} -f 2>&1 | awk '{print "[CONTAINER dblab_server]: "$0}' & +check_dle_running(){ + if [[ $(curl --silent --header 'Verification-Token: secret_token' --header 'Content-Type: application/json' http://localhost:${DLE_SERVER_PORT}/status | jq -r .status.code) == "OK" ]] ; then + return 0 + fi + return 1 +} + +### Waiting for the Database Lab Engine running. +for i in {1..30}; do + check_dle_running && break || echo "Database Lab Engine is not running yet" + sleep 1 +done + +check_dle_running || (echo "Database Lab Engine is not running" && exit 1) + +check_dle_pending(){ + if [[ $(curl --silent --header 'Verification-Token: secret_token' --header 'Content-Type: application/json' http://localhost:${DLE_SERVER_PORT}/status | jq -r .retrieving.status) == "pending" ]] ; then + return 0 + fi + return 1 +} + +for i in {1..30}; do + check_dle_pending && break || echo "Retrieval state is not pending yet" + sleep 1 +done + +check_dle_pending || (echo "Database Lab Engine is not pending" && exit 1) + +PATCH_CONFIG_DATA=$(jq -n -c \ + --arg dbname "$SOURCE_DBNAME" \ + --arg host "$SOURCE_HOST" \ + --arg port "$SOURCE_PORT" \ + --arg username "$SOURCE_USERNAME" \ + --arg password "$SOURCE_PASSWORD" \ + --arg spl "$SHARED_PRELOAD_LIBRARIES" \ + --arg dockerImage "postgresai/extended-postgres:${POSTGRES_VERSION}" \ +'{ + "global": { + "debug": true + }, + "databaseConfigs": { + "configs": { + "shared_buffers": "256MB", + "shared_preload_libraries": $spl + } + }, + "databaseContainer": { + "dockerImage": $dockerImage + }, + "retrieval": { + "refresh": { + "timetable": "5 0 * * 1" + }, + "spec": { + "logicalDump": { + "options": { + "source": { + "connection": { + "dbname": $dbname, + "host": $host, + "port": $port, + "username": $username, + "password": $password + } + }, + "parallelJobs": 2, + "databases": { + "postgres": {}, + "test": {} + }, + }, + }, + "logicalRestore": { + "options": { + "parallelJobs": 2 + } + } + } + } +}') + +echo $PATCH_CONFIG_DATA + +response_code=$(curl --silent -XPOST --data "$PATCH_CONFIG_DATA" --write-out "%{http_code}" \ + --header 'Verification-Token: secret_token' \ + --header 'Content-Type: application/json' \ + --output /tmp/response.json \ + http://localhost:${DLE_SERVER_PORT}/admin/config) + +if [[ $response_code -ne 200 ]]; then + echo "Status code: ${response_code}" + exit 1 +fi + +if [[ -f $pendingFile ]]; then + echo "Pending file has not been removed" + exit 1 +fi + +if [[ $(yq eval '.retrieval.spec.logicalDump.options.source.connection.dbname' ${configDir}/server.yml) != "$SOURCE_DBNAME" || + $(yq eval '.retrieval.spec.logicalDump.options.source.connection.host' ${configDir}/server.yml) != "$SOURCE_HOST" || + $(yq eval '.retrieval.spec.logicalDump.options.source.connection.port' ${configDir}/server.yml) != "$SOURCE_PORT" || + $(yq eval '.retrieval.spec.logicalDump.options.source.connection.username' ${configDir}/server.yml) != "$SOURCE_USERNAME" || + $(yq eval '.retrieval.spec.logicalDump.options.source.connection.password' ${configDir}/server.yml) != "$SOURCE_PASSWORD" || + $(yq eval '.retrieval.refresh.timetable' ${configDir}/server.yml) != "5 0 * * 1" || + $(yq eval '.databaseContainer.dockerImage' ${configDir}/server.yml) != "postgresai/extended-postgres:${POSTGRES_VERSION}" || + $(yq eval '.databaseConfigs.configs.shared_buffers' ${configDir}/server.yml) != "256MB" ]] ; then + echo "Configuration has not been updated properly" + exit 1 +fi + check_dle_readiness(){ if [[ $(curl --silent --header 'Verification-Token: secret_token' --header 'Content-Type: application/json' http://localhost:${DLE_SERVER_PORT}/status | jq -r .retrieving.status) == "finished" ]] ; then return 0 diff --git a/engine/test/3.physical_walg.sh b/engine/test/3.physical_walg.sh index 5e04fd0af9ecc36355942625384b510462a8b958..72e75effea3d0bf7e0b7e52e933bd8216e719071 100644 --- a/engine/test/3.physical_walg.sh +++ b/engine/test/3.physical_walg.sh @@ -100,7 +100,7 @@ sudo docker run \ --publish ${DLE_SERVER_PORT}:${DLE_SERVER_PORT} \ --volume /var/run/docker.sock:/var/run/docker.sock \ --volume ${DLE_TEST_MOUNT_DIR}:${DLE_TEST_MOUNT_DIR}/:rshared \ - --volume "${configDir}":/home/dblab/configs:ro \ + --volume "${configDir}":/home/dblab/configs \ --volume "${metaDir}":/home/dblab/meta \ --volume /sys/kernel/debug:/sys/kernel/debug:rw \ --volume /lib/modules:/lib/modules:ro \ diff --git a/engine/test/4.physical_basebackup.sh b/engine/test/4.physical_basebackup.sh index cc5dbd29407d21cdab6bb55e26474a59a3b71c2d..dd5fc5d1109f4e6cc6f0d2cf7a910272bfe2aa19 100644 --- a/engine/test/4.physical_basebackup.sh +++ b/engine/test/4.physical_basebackup.sh @@ -141,7 +141,7 @@ sudo docker run \ --publish ${DLE_SERVER_PORT}:${DLE_SERVER_PORT} \ --volume /var/run/docker.sock:/var/run/docker.sock \ --volume ${DLE_TEST_MOUNT_DIR}:${DLE_TEST_MOUNT_DIR}/:rshared \ - --volume "${configDir}":/home/dblab/configs:ro \ + --volume "${configDir}":/home/dblab/configs \ --volume "${metaDir}":/home/dblab/meta \ --volume /sys/kernel/debug:/sys/kernel/debug:rw \ --volume /lib/modules:/lib/modules:ro \ diff --git a/engine/test/5.logical_rds.sh b/engine/test/5.logical_rds.sh index eb3f7b8ed5b62deb7424e05262cf8f13327c43e8..ca26a4eef15b09e4936c33f63f6f412f5b7ef27e 100644 --- a/engine/test/5.logical_rds.sh +++ b/engine/test/5.logical_rds.sh @@ -76,7 +76,7 @@ sudo docker run \ --label dblab_test \ --privileged \ --publish ${DLE_SERVER_PORT}:${DLE_SERVER_PORT} \ - --volume "${configDir}":/home/dblab/configs:ro \ + --volume "${configDir}":/home/dblab/configs \ --volume "${metaDir}":/home/dblab/meta \ --volume ${DLE_TEST_MOUNT_DIR}/${DLE_TEST_POOL_NAME}/dump:${DLE_TEST_MOUNT_DIR}/${DLE_TEST_POOL_NAME}/dump \ --volume ${DLE_TEST_MOUNT_DIR}:${DLE_TEST_MOUNT_DIR}/:rshared \ diff --git a/ui/package-lock.json b/ui/package-lock.json index 6b394e8f7537ad2af6646eaca99fed5301a52cef..036d8c0c581d23f8f19065d592fb9fab0f8e669d 100644 --- a/ui/package-lock.json +++ b/ui/package-lock.json @@ -3003,6 +3003,31 @@ "react-dom": "^16.8.0 || ^17.0.0" } }, + "node_modules/@monaco-editor/loader": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.3.2.tgz", + "integrity": "sha512-BTDbpHl3e47r3AAtpfVFTlAi7WXv4UQ/xZmz8atKl4q7epQV5e7+JbigFDViWF71VBi4IIBdcWP57Hj+OWuc9g==", + "dependencies": { + "state-local": "^1.0.6" + }, + "peerDependencies": { + "monaco-editor": ">= 0.21.0 < 1" + } + }, + "node_modules/@monaco-editor/react": { + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/@monaco-editor/react/-/react-4.4.5.tgz", + "integrity": "sha512-IImtzU7sRc66OOaQVCG+5PFHkSWnnhrUWGBuH6zNmH2h0YgmAhcjHZQc/6MY9JWEbUtVF1WPBMJ9u1XuFbRrVA==", + "dependencies": { + "@monaco-editor/loader": "^1.3.2", + "prop-types": "^15.7.2" + }, + "peerDependencies": { + "monaco-editor": ">= 0.25.0 < 1", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -15960,6 +15985,12 @@ "node": "*" } }, + "node_modules/monaco-editor": { + "version": "0.33.0", + "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.33.0.tgz", + "integrity": "sha512-VcRWPSLIUEgQJQIE0pVT8FcGBIgFoxz7jtqctE+IiCxWugD0DwgyQBcZBhdSrdMC84eumoqMZsGl2GTreOzwqw==", + "peer": true + }, "node_modules/move-concurrently": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/move-concurrently/-/move-concurrently-1.0.1.tgz", @@ -18807,13 +18838,13 @@ } }, "node_modules/prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", "dependencies": { "loose-envify": "^1.4.0", "object-assign": "^4.1.1", - "react-is": "^16.8.1" + "react-is": "^16.13.1" } }, "node_modules/prop-types-extra": { @@ -22174,6 +22205,11 @@ "resolved": "https://registry.npmjs.org/stackframe/-/stackframe-1.2.0.tgz", "integrity": "sha512-GrdeshiRmS1YLMYgzF16olf2jJ/IzxXY9lhKOskuVziubpTYcYqyOwYeJKzQkwy7uN0fYSsbsC4RQaXf9LCrYA==" }, + "node_modules/state-local": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/state-local/-/state-local-1.0.7.tgz", + "integrity": "sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==" + }, "node_modules/static-extend": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz", @@ -26679,6 +26715,7 @@ "@craco/craco": "^6.4.3", "@material-ui/core": "^4.12.3", "@material-ui/lab": "^4.0.0-alpha.61", + "@monaco-editor/react": "^4.4.5", "@types/node": "^12.20.33", "@types/react": "^17.0.30", "@types/react-dom": "^17.0.10", @@ -39553,6 +39590,23 @@ "react-is": "^16.8.0 || ^17.0.0" } }, + "@monaco-editor/loader": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.3.2.tgz", + "integrity": "sha512-BTDbpHl3e47r3AAtpfVFTlAi7WXv4UQ/xZmz8atKl4q7epQV5e7+JbigFDViWF71VBi4IIBdcWP57Hj+OWuc9g==", + "requires": { + "state-local": "^1.0.6" + } + }, + "@monaco-editor/react": { + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/@monaco-editor/react/-/react-4.4.5.tgz", + "integrity": "sha512-IImtzU7sRc66OOaQVCG+5PFHkSWnnhrUWGBuH6zNmH2h0YgmAhcjHZQc/6MY9JWEbUtVF1WPBMJ9u1XuFbRrVA==", + "requires": { + "@monaco-editor/loader": "^1.3.2", + "prop-types": "^15.7.2" + } + }, "@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -39632,6 +39686,7 @@ "@craco/craco": "^6.4.3", "@material-ui/core": "^4.12.3", "@material-ui/lab": "^4.0.0-alpha.61", + "@monaco-editor/react": "^4.4.5", "@types/byte-size": "^8.1.0", "@types/node": "^12.20.33", "@types/react": "^17.0.30", @@ -56953,6 +57008,12 @@ "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.1.tgz", "integrity": "sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==" }, + "monaco-editor": { + "version": "0.33.0", + "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.33.0.tgz", + "integrity": "sha512-VcRWPSLIUEgQJQIE0pVT8FcGBIgFoxz7jtqctE+IiCxWugD0DwgyQBcZBhdSrdMC84eumoqMZsGl2GTreOzwqw==", + "peer": true + }, "move-concurrently": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/move-concurrently/-/move-concurrently-1.0.1.tgz", @@ -59267,13 +59328,13 @@ } }, "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", "requires": { "loose-envify": "^1.4.0", "object-assign": "^4.1.1", - "react-is": "^16.8.1" + "react-is": "^16.13.1" }, "dependencies": { "react-is": { @@ -61954,6 +62015,11 @@ "resolved": "https://registry.npmjs.org/stackframe/-/stackframe-1.2.0.tgz", "integrity": "sha512-GrdeshiRmS1YLMYgzF16olf2jJ/IzxXY9lhKOskuVziubpTYcYqyOwYeJKzQkwy7uN0fYSsbsC4RQaXf9LCrYA==" }, + "state-local": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/state-local/-/state-local-1.0.7.tgz", + "integrity": "sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==" + }, "static-extend": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz", diff --git a/ui/packages/ce/package.json b/ui/packages/ce/package.json index af10b948317a40b4308898bae283921078343642..2dc2958fbe4cb8afb7a83dde164f17939a4ef89b 100644 --- a/ui/packages/ce/package.json +++ b/ui/packages/ce/package.json @@ -6,6 +6,7 @@ "@craco/craco": "^6.4.3", "@material-ui/core": "^4.12.3", "@material-ui/lab": "^4.0.0-alpha.61", + "@monaco-editor/react": "^4.4.5", "@types/node": "^12.20.33", "@types/react": "^17.0.30", "@types/react-dom": "^17.0.10", diff --git a/ui/packages/ce/src/App/Instance/Page/index.tsx b/ui/packages/ce/src/App/Instance/Page/index.tsx index 4e732d39581d5c8db81de332336e69ff72d23007..97bd9293f7d7abe40102fa4b7cca1cbba6e21ab6 100644 --- a/ui/packages/ce/src/App/Instance/Page/index.tsx +++ b/ui/packages/ce/src/App/Instance/Page/index.tsx @@ -9,6 +9,10 @@ import { destroyClone } from 'api/clones/destroyClone' import { resetClone } from 'api/clones/resetClone' import { getWSToken } from "api/engine/getWSToken"; import { initWS } from "api/engine/initWS"; +import { getConfig } from 'api/configs/getConfig' +import { getFullConfig } from 'api/configs/getFullConfig' +import { updateConfig } from 'api/configs/updateConfig' +import { testDbSource } from 'api/configs/testDbSource' export const Page = () => { const routes = { @@ -23,6 +27,10 @@ export const Page = () => { destroyClone, resetClone, getWSToken, + getConfig, + getFullConfig, + updateConfig, + testDbSource, initWS, } diff --git a/ui/packages/ce/src/api/configs/getConfig.ts b/ui/packages/ce/src/api/configs/getConfig.ts new file mode 100644 index 0000000000000000000000000000000000000000..22ed8fb47ec28ca3e5a7351b798c788730ab19ad --- /dev/null +++ b/ui/packages/ce/src/api/configs/getConfig.ts @@ -0,0 +1,11 @@ +import { formatConfig } from '@postgres.ai/shared/types/api/entities/config' +import { request } from 'helpers/request' + +export const getConfig = async () => { + const response = await request('/admin/config') + + return { + response: response.ok ? formatConfig(await response.json()) : null, + error: response.ok ? null : response, + } +} diff --git a/ui/packages/ce/src/api/configs/getFullConfig.ts b/ui/packages/ce/src/api/configs/getFullConfig.ts new file mode 100644 index 0000000000000000000000000000000000000000..abf0338ddbc81abbd58ef71ecb37dcb270855663 --- /dev/null +++ b/ui/packages/ce/src/api/configs/getFullConfig.ts @@ -0,0 +1,14 @@ +import { request } from 'helpers/request' +export const getFullConfig = async () => { + const response = await request('/admin/config.yaml') + .then((res) => res.blob()) + .then((blob) => blob.text()) + .then((yamlAsString) => { + return yamlAsString + }) + + return { + response: response ? response : null, + error: response && null, + } +} diff --git a/ui/packages/ce/src/api/configs/testDbSource.ts b/ui/packages/ce/src/api/configs/testDbSource.ts new file mode 100644 index 0000000000000000000000000000000000000000..608220384da89cc4fe5d6bac13a580fa00d288ee --- /dev/null +++ b/ui/packages/ce/src/api/configs/testDbSource.ts @@ -0,0 +1,20 @@ +import { dbSource } from '@postgres.ai/shared/types/api/entities/dbSource' +import { request } from 'helpers/request' + +export const testDbSource = async (req: dbSource) => { + const response = await request('/admin/test-db-source', { + method: 'POST', + body: JSON.stringify({ + host: req.host, + port: req.port.toString(), + dbname: req.dbname, + username: req.username, + password: req.password, + }), + }) + + return { + response: response.ok ? await response.json(): null, + error: response.ok ? null : response, + } +} diff --git a/ui/packages/ce/src/api/configs/updateConfig.ts b/ui/packages/ce/src/api/configs/updateConfig.ts new file mode 100644 index 0000000000000000000000000000000000000000..b9a7e4f54372ee5093fae893637a97a043d272b4 --- /dev/null +++ b/ui/packages/ce/src/api/configs/updateConfig.ts @@ -0,0 +1,50 @@ +import { postUniqueDatabases } from '@postgres.ai/shared/pages/Configuration/utils' +import { Config } from '@postgres.ai/shared/types/api/entities/config' +import { request } from 'helpers/request' + +export const updateConfig = async (req: Config) => { + const response = await request('/admin/config', { + method: 'POST', + body: JSON.stringify({ + global: { + debug: req.debug, + }, + databaseContainer: { + dockerImage: req.dockerImage, + }, + databaseConfigs: { + configs: { + shared_buffers: req.sharedBuffers, + shared_preload_libraries: req.sharedPreloadLibraries, + }, + }, + retrieval: { + refresh: { + timetable: req.timetable, + }, + spec: { + logicalDump: { + options: { + databases: postUniqueDatabases(req.databases), + parallelJobs: req.pg_dump, + source: { + connection: { + dbname: req.dbname, + host: req.host, + port: req.port, + username: req.username, + password: req.password, + }, + }, + }, + }, + }, + }, + }), + }) + + return { + response: response.ok ? response : null, + error: response.ok ? null : response, + } +} diff --git a/ui/packages/shared/pages/Configuration/Header/index.tsx b/ui/packages/shared/pages/Configuration/Header/index.tsx new file mode 100644 index 0000000000000000000000000000000000000000..91bcbf3f9d8f2d43c657faedc7fa1ddef13d2935 --- /dev/null +++ b/ui/packages/shared/pages/Configuration/Header/index.tsx @@ -0,0 +1,38 @@ +/*-------------------------------------------------------------------------- + * Copyright (c) 2019-2021, Postgres.ai, Nikolay Samokhvalov nik@postgres.ai + * All Rights Reserved. Proprietary and confidential. + * Unauthorized copying of this file, via any medium is strictly prohibited + *-------------------------------------------------------------------------- + */ + +import { Link, Typography, Box } from '@material-ui/core' +import { SectionTitle } from '@postgres.ai/shared/components/SectionTitle' +import styles from '../styles.module.scss' + +type Props = { + retrievalMode: string + setOpen: () => void +} + +export const Header = (props: Props) => { + return ( +
+ + + Only select parameters can be changed here. + + + However, you can still see the{' '} + + full configuration file{' '} + {' '} + (with sensitive values masked). + + + Data retrieval mode: {props.retrievalMode} + + + +
+ ) +} diff --git a/ui/packages/shared/pages/Configuration/ResponseMessage/index.tsx b/ui/packages/shared/pages/Configuration/ResponseMessage/index.tsx new file mode 100644 index 0000000000000000000000000000000000000000..df5c2f9c6f94e221dd918d3816719a5741dde44a --- /dev/null +++ b/ui/packages/shared/pages/Configuration/ResponseMessage/index.tsx @@ -0,0 +1,65 @@ +import { makeStyles } from '@material-ui/core/styles' +import BlockIcon from '@material-ui/icons/Block' +import CheckCircleOutlineIcon from '@material-ui/icons/CheckCircleOutline' +import WarningIcon from '@material-ui/icons/Warning' + +const useStyles = makeStyles({ + successIcon: { + marginRight: 8, + color: 'green', + }, + success: { + color: 'green', + alignItems: 'center', + display: 'flex', + }, + errorIcon: { + marginRight: 8, + color: 'red', + }, + error: { + color: 'red', + alignItems: 'center', + display: 'flex', + }, + warning: { + color: '#FD8411', + alignItems: 'center', + display: 'flex', + }, + warningIcon: { + marginRight: 8, + color: '#FD8411', + }, +}) + +export const ResponseMessage = ({ + type, + message, +}: { + type: string + message: string | React.ReactNode | null +}) => { + const classes = useStyles() + + return ( + + {type === 'success' || type === 'ok' ? ( + + ) : type === 'warning' || type === 'notice' ? ( + + ) : ( + + )} + {message} + + ) +} diff --git a/ui/packages/shared/pages/Configuration/index.tsx b/ui/packages/shared/pages/Configuration/index.tsx new file mode 100644 index 0000000000000000000000000000000000000000..e94850c1f7d7aad8842e6c4b615d533ea810cdcc --- /dev/null +++ b/ui/packages/shared/pages/Configuration/index.tsx @@ -0,0 +1,488 @@ +/*-------------------------------------------------------------------------- + * Copyright (c) 2019-2021, Postgres.ai, Nikolay Samokhvalov nik@postgres.ai + * All Rights Reserved. Proprietary and confidential. + * Unauthorized copying of this file, via any small is strictly prohibited + *-------------------------------------------------------------------------- + */ + +import { + Box, + Checkbox, + FormControlLabel, + Typography, + TextField, + Chip, +} from '@material-ui/core' +import { useState, useEffect } from 'react' +import { withStyles, makeStyles } from '@material-ui/core/styles' +import { Modal } from '@postgres.ai/shared/components/Modal' +import { SectionTitle } from '@postgres.ai/shared/components/SectionTitle' +import { StubSpinner } from '@postgres.ai/shared/components/StubSpinner' +import { Button } from '@postgres.ai/shared/components/Button' +import { Header } from './Header' +import { observer } from 'mobx-react-lite' +import Editor from '@monaco-editor/react' +import { useStores } from '@postgres.ai/shared/pages/Instance/context' +import { FormValues, useForm } from './useForm' +import { Spinner } from '@postgres.ai/shared/components/Spinner' +import styles from './styles.module.scss' +import { SimpleModalControls } from '@postgres.ai/shared/components/SimpleModalControls' +import { ResponseMessage } from './ResponseMessage' +import { uniqueDatabases } from './utils' + +export const GrayTextTypography = withStyles({ + root: { + color: '#8a8a8a', + fontSize: '12px', + }, +})(Typography) + +const useStyles = makeStyles({ + checkboxRoot: { + padding: '9px 10px', + }, +}) + +export const Configuration = observer( + ({ + switchActiveTab, + activeTab, + }: { + switchActiveTab: (activeTab: number) => void + activeTab: number + }) => { + const classes = useStyles() + const stores = useStores() + const { + config, + updateConfig, + getFullConfig, + fullConfig, + testDbSource, + updateConfigError, + } = stores.main + const configData = config && JSON.parse(JSON.stringify(config)) + const [submitMessage, setSubmitMessage] = useState< + string | React.ReactNode | null + >('') + const [connectionResponse, setConnectionResponse] = useState( + null, + ) + const [submitStatus, setSubmitStatus] = useState('') + const [connectionStatus, setConnectionStatus] = useState('') + const [isTestConnectionLoading, setIsTestConnectionLoading] = + useState(false) + const [isOpen, setIsOpen] = useState(false) + + const onSubmit = async (values: FormValues) => { + setSubmitMessage(null) + await updateConfig(values).then((response) => { + if (response?.ok) { + setSubmitStatus('success') + setSubmitMessage( +

+ Changes applied.{' '} + switchActiveTab(0)} + > + Switch to Overview + {' '} + to see details and to work with clones +

, + ) + } + }) + } + const [{ formik, connectionData }] = useForm(onSubmit) + + const onTestConnectionClick = async () => { + setConnectionResponse(null) + setIsTestConnectionLoading(true) + Object.keys(connectionData).map(function (key) { + if (key !== 'password') { + formik.validateField(key) + } + }) + + formik.isValid && + testDbSource(connectionData).then((response) => { + if (response) { + setTimeout(() => { + setConnectionStatus(response.status) + setConnectionResponse(response.message) + setIsTestConnectionLoading(false) + }, 1500) + } + }) + } + + const handleModalClick = async () => { + await getFullConfig() + setIsOpen(true) + } + + const handleDeleteDatabase = ( + _: React.FormEvent, + database: string, + ) => { + if (formik.values.databases) { + let currentDatabases = uniqueDatabases(formik.values.databases) + let curDividers = formik.values.databases.match( + /[,(\s)(\n)(\r)(\t)(\r\n)]/gm, + ) + let splitDatabases = currentDatabases.split(',') + let newDatabases = '' + + for (let i in splitDatabases) { + if (curDividers && splitDatabases[i] !== database) { + newDatabases = + newDatabases + + splitDatabases[i] + + (curDividers[i] ? curDividers[i] : '') + } + } + + formik.setFieldValue('databases', newDatabases) + } + } + + // Set initial data, empty string for password + useEffect(() => { + if (configData) { + for (const [key, value] of Object.entries(configData)) { + if (key !== 'password') { + formik.setFieldValue(key, value) + } + } + } + }, [config]) + + // Clear response message on tab change + useEffect(() => { + setConnectionResponse(null) + setSubmitMessage(null) + }, [activeTab]) + + return ( +
+ +
+ + + + formik.setFieldValue('debug', e.target.checked) + } + classes={{ + root: classes.checkboxRoot, + }} + /> + } + label={Debug mode} + color="#8a8a8a" + /> + + + + + Container settings that will be used by default for each + Postgres container the DLE manages + + + + formik.setFieldValue('dockerImage', e.target.value) + } + /> + + + + + + Default PostgreSQL configuration used by all Postgres instances + managed by DLE. Each section have additional settings to + override these defaults. + + + + formik.setFieldValue('sharedBuffers', e.target.value) + } + /> + + + + formik.setFieldValue( + 'sharedPreloadLibraries', + e.target.value, + ) + } + /> + + + + + + Subsection retrieval.refresh + + + Define full data refresh on schedule. The process requires at + least one additional filesystem mount point. The schedule is to + be specified using{' '} + + crontab format. + + + + + formik.setFieldValue('timetable', e.target.value) + } + /> + + Subsection retrieval.spec.logicalDump + + Source database credentials and dumping options. + + + formik.setFieldValue('host', e.target.value)} + /> + + + formik.setFieldValue('port', e.target.value)} + /> + + + + formik.setFieldValue('username', e.target.value) + } + /> + + + + formik.setFieldValue('password', e.target.value) + } + /> + + + + formik.setFieldValue('dbname', e.target.value) + } + /> + + +
+ + formik.setFieldValue('databases', e.target.value) + } + value={formik.values.databases} + multiline + label="Databases" + inputProps={{ + name: 'databases', + id: 'databases', + }} + InputLabelProps={{ + shrink: true, + }} + /> +
+ +
+ {formik.values.databases && + uniqueDatabases(formik.values.databases) + .split(',') + .map((database, index) => { + if (database !== '') { + return ( + + handleDeleteDatabase(event, database) + } + color="primary" + /> + ) + } + })} +
+
+ + + formik.setFieldValue('pg_dump', e.target.value) + } + /> + + + + formik.setFieldValue('pg_restore', e.target.value) + } + /> + +
+ + + {connectionStatus && connectionResponse ? ( + + ) : null} + +
+ + + + {(submitStatus && submitMessage) || updateConfigError ? ( + + ) : null} + + + setIsOpen(false)} + isOpen={isOpen} + size="xl" + > + } + theme="vs-light" + options={{ domReadOnly: true, readOnly: true }} + /> + setIsOpen(false), + }, + ]} + /> + +
+ ) + }, +) diff --git a/ui/packages/shared/pages/Configuration/styles.module.scss b/ui/packages/shared/pages/Configuration/styles.module.scss new file mode 100644 index 0000000000000000000000000000000000000000..69823cd48ef59c62003238e9a06be66e098f84cd --- /dev/null +++ b/ui/packages/shared/pages/Configuration/styles.module.scss @@ -0,0 +1,40 @@ +.textField { + width: 300px; + max-width: 100%; +} + +.databasesContainer { + width: 300px; + max-width: 100%; +} + +.chip { + margin-right: 8px; + margin-top: 8px; +} + +.buttonContainer { + display: flex; + align-items: center; + justify-content: flex-start; + gap: 10px; + + @media screen and (max-width: '560px') { + flex-direction: column; + justify-content: flex-start; + align-items: flex-start !important; + } +} + +.underline { + text-decoration: underline; + cursor: pointer; +} + +.root { + font-size: 12px; +} + +.spinner { + margin-left: 8px; +} diff --git a/ui/packages/shared/pages/Configuration/useForm.ts b/ui/packages/shared/pages/Configuration/useForm.ts new file mode 100644 index 0000000000000000000000000000000000000000..c3fe2722370ecd433ef8f6383e81e43aa568e2d8 --- /dev/null +++ b/ui/packages/shared/pages/Configuration/useForm.ts @@ -0,0 +1,67 @@ +/*-------------------------------------------------------------------------- + * Copyright (c) 2019-2021, Postgres.ai, Nikolay Samokhvalov nik@postgres.ai + * All Rights Reserved. Proprietary and confidential. + * Unauthorized copying of this file, via any medium is strictly prohibited + *-------------------------------------------------------------------------- + */ + +import { useFormik } from 'formik' +import * as Yup from 'yup' + +export type FormValues = { + debug: boolean + dockerImage: string + sharedBuffers: string + sharedPreloadLibraries: string + timetable: string + dbname: string + host: string + port: string + username: string + password: string + databases: string + pg_dump: string + pg_restore: string +} + +const Schema = Yup.object().shape({ + dockerImage: Yup.string().required('Docker image is required'), + dbname: Yup.string().required('Dbname is required'), + host: Yup.string().required('Host is required'), + port: Yup.string().required('Port is required'), + username: Yup.string().required('Username is required'), +}) + +export const useForm = (onSubmit: (values: FormValues) => void) => { + const formik = useFormik({ + initialValues: { + debug: false, + dockerImage: '', + sharedBuffers: '', + sharedPreloadLibraries: '', + timetable: '', + dbname: '', + host: '', + port: '', + username: '', + password: '', + databases: '', + pg_dump: '', + pg_restore: '' + }, + validationSchema: Schema, + onSubmit, + validateOnBlur: false, + validateOnChange: false, + }) + + const connectionData = { + host: formik.values.host, + port: formik.values.port, + username: formik.values.username, + password: formik.values.password, + dbname: formik.values.dbname, + } + + return [{ formik, connectionData }] +} diff --git a/ui/packages/shared/pages/Configuration/utils/index.ts b/ui/packages/shared/pages/Configuration/utils/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..f2a9212417473250e50b5518d7b8d20f0b83c320 --- /dev/null +++ b/ui/packages/shared/pages/Configuration/utils/index.ts @@ -0,0 +1,28 @@ +import { DatabaseType } from "types/api/entities/config" + +export const uniqueDatabases = (values: string) => { + let splitValuesArray = values.split(/[,(\s)(\n)(\r)(\t)(\r\n)]/) + let newValuesArray = [] + + for (let i in splitValuesArray) { + if ( + splitValuesArray[i] !== '' && + newValuesArray.indexOf(splitValuesArray[i]) === -1 + ) { + newValuesArray.push(splitValuesArray[i]) + } + } + + return newValuesArray.join(',') +} + +export const postUniqueDatabases = (values: any) => { + let splitValuesArray = values.split(/[,(\s)(\n)(\r)(\t)(\r\n)]/) + + const obj = splitValuesArray.reduce((acc: DatabaseType, curr: number) => { + acc[curr] = {} + return acc + }, {}) + + return values.length !== 0 ? obj : null +} diff --git a/ui/packages/shared/pages/Instance/Tabs/index.tsx b/ui/packages/shared/pages/Instance/Tabs/index.tsx index 3506ce26b0fa799ece8cd541a24d19b223287f30..973f9762fb21a0b43dcaef46b011e202cbfbaa66 100644 --- a/ui/packages/shared/pages/Instance/Tabs/index.tsx +++ b/ui/packages/shared/pages/Instance/Tabs/index.tsx @@ -5,9 +5,13 @@ *-------------------------------------------------------------------------- */ -import React from 'react'; -import {makeStyles, Tab as TabComponent, Tabs as TabsComponent,} from '@material-ui/core' -import {colors} from '@postgres.ai/shared/styles/colors' +import React from 'react' +import { + makeStyles, + Tab as TabComponent, + Tabs as TabsComponent, +} from '@material-ui/core' +import { colors } from '@postgres.ai/shared/styles/colors' const useStyles = makeStyles({ tabsRoot: { @@ -60,12 +64,19 @@ export const Tabs = (props: Props) => { value={0} /> + {/* // TODO(Anton): Probably will be later. */} {/* { const { instance, instanceError } = stores.main useEffect(() => { + if (instance && instance?.state.retrieving?.status === "pending") { + setActiveTab(2) + } if (instance && !instance?.state?.pools) { if (!props.callbacks) return @@ -126,7 +130,7 @@ export const Instance = observer((props: Props) => { {!instanceError && (
- {!instance && } + {!instance || !instance?.state.retrieving?.status && } {instance && ( <> @@ -140,7 +144,6 @@ export const Instance = observer((props: Props) => { - Instance @@ -152,6 +155,10 @@ export const Instance = observer((props: Props) => { + + setActiveTab(id)} activeTab={activeTab} /> + + ) diff --git a/ui/packages/shared/pages/Instance/stores/Main.ts b/ui/packages/shared/pages/Instance/stores/Main.ts index 335a792b0cdf7fc065bab975c79605441f8a5fd0..ed857956a13e8e4d4d3fe7badbdcdae0653a9d18 100644 --- a/ui/packages/shared/pages/Instance/stores/Main.ts +++ b/ui/packages/shared/pages/Instance/stores/Main.ts @@ -8,14 +8,20 @@ import { makeAutoObservable } from 'mobx' import { GetSnapshots } from '@postgres.ai/shared/types/api/endpoints/getSnapshots' import { GetInstance } from '@postgres.ai/shared/types/api/endpoints/getInstance' +import { Config } from '@postgres.ai/shared/types/api/entities/config' +import { GetConfig } from '@postgres.ai/shared/types/api/endpoints/getConfig' +import { UpdateConfig } from '@postgres.ai/shared/types/api/endpoints/updateConfig' +import { TestDbSource } from '@postgres.ai/shared/types/api/endpoints/testDbSource' import { RefreshInstance } from '@postgres.ai/shared/types/api/endpoints/refreshInstance' import { DestroyClone } from '@postgres.ai/shared/types/api/endpoints/destroyClone' import { ResetClone } from '@postgres.ai/shared/types/api/endpoints/resetClone' -import { GetWSToken } from "@postgres.ai/shared/types/api/endpoints/getWSToken"; -import { InitWS } from "@postgres.ai/shared/types/api/endpoints/initWS"; +import { GetWSToken } from '@postgres.ai/shared/types/api/endpoints/getWSToken' +import { InitWS } from '@postgres.ai/shared/types/api/endpoints/initWS' import { Instance } from '@postgres.ai/shared/types/api/entities/instance' import { SnapshotsStore } from '@postgres.ai/shared/stores/Snapshots' import { getTextFromUnknownApiError } from '@postgres.ai/shared/utils/api' +import { dbSource } from 'types/api/entities/dbSource' +import { GetFullConfig } from 'types/api/endpoints/getFullConfig' const POLLING_TIME = 2000 @@ -29,16 +35,23 @@ export type Api = { resetClone: ResetClone getWSToken: GetWSToken initWS?: InitWS + getConfig?: GetConfig + updateConfig?: UpdateConfig + testDbSource?: TestDbSource + getFullConfig?: GetFullConfig } type Error = { title?: string - message: string + message?: string } export class MainStore { instance: Instance | null = null + config: Config | null = null + fullConfig?: string instanceError: Error | null = null + updateConfigError: string | null = null unstableClones = new Set() private updateInstanceTimeoutId: number | null = null @@ -63,8 +76,8 @@ export class MainStore { load = (instanceId: string) => { this.instance = null - this.loadInstance(instanceId) + this.getConfig() this.snapshots.load(instanceId) } @@ -116,6 +129,54 @@ export class MainStore { return !!response } + getConfig = async () => { + if (!this.api.getConfig) return + + const { response, error } = await this.api.getConfig() + + if (response) { + this.config = response + } + + if (error) await getTextFromUnknownApiError(error) + + return !!response + } + + updateConfig = async (values: Config) => { + if (!this.api.updateConfig) return + + const { response, error } = await this.api.updateConfig({ ...values }) + + if (error) + this.updateConfigError = await error.json().then((err) => err.message) + + return response + } + + getFullConfig = async () => { + if (!this.api.getFullConfig) return + + const { response, error } = await this.api.getFullConfig() + if (response) { + this.fullConfig = response + } + + if (error) await getTextFromUnknownApiError(error) + + return response + } + + testDbSource = async (values: dbSource) => { + if (!this.api.testDbSource) return + + const { response, error } = await this.api.testDbSource(values) + + if (error) await getTextFromUnknownApiError(error) + + return response + } + resetClone = async (cloneId: string, snapshotId: string) => { if (!this.instance) return diff --git a/ui/packages/shared/types/api/endpoints/getConfig.ts b/ui/packages/shared/types/api/endpoints/getConfig.ts new file mode 100644 index 0000000000000000000000000000000000000000..ea9f7546df631a97418f5767df6a7a85dd26632c --- /dev/null +++ b/ui/packages/shared/types/api/endpoints/getConfig.ts @@ -0,0 +1,6 @@ +import { Config } from "../entities/config" + +export type GetConfig = () => Promise<{ + response: Config | null + error: Response | null +}> diff --git a/ui/packages/shared/types/api/endpoints/getFullConfig.ts b/ui/packages/shared/types/api/endpoints/getFullConfig.ts new file mode 100644 index 0000000000000000000000000000000000000000..ae25c2f1c9164e86d5be338d4f722c4fd0c2e85a --- /dev/null +++ b/ui/packages/shared/types/api/endpoints/getFullConfig.ts @@ -0,0 +1,4 @@ +export type GetFullConfig = () => Promise<{ + response: string | null + error: Response | null +}> diff --git a/ui/packages/shared/types/api/endpoints/testDbSource.ts b/ui/packages/shared/types/api/endpoints/testDbSource.ts new file mode 100644 index 0000000000000000000000000000000000000000..95552c8bd720f403e9090128151da91005edf291 --- /dev/null +++ b/ui/packages/shared/types/api/endpoints/testDbSource.ts @@ -0,0 +1,6 @@ +import { dbSource, TestSourceDTO } from 'types/api/entities/dbSource' + +export type TestDbSource = (values: dbSource) => Promise<{ + response: TestSourceDTO | null + error: Response | null +}> diff --git a/ui/packages/shared/types/api/endpoints/updateConfig.ts b/ui/packages/shared/types/api/endpoints/updateConfig.ts new file mode 100644 index 0000000000000000000000000000000000000000..e82255d6a7291a166cda5bc728d037b237b8b876 --- /dev/null +++ b/ui/packages/shared/types/api/endpoints/updateConfig.ts @@ -0,0 +1,7 @@ +import { Config } from 'types/api/entities/config' + +export type UpdateConfig = (values: Config) => Promise<{ + response: Response | null + error: Response | null + }> + \ No newline at end of file diff --git a/ui/packages/shared/types/api/entities/config.ts b/ui/packages/shared/types/api/entities/config.ts new file mode 100644 index 0000000000000000000000000000000000000000..45b6938b8a9c91cdbef39404c5ec9a266da93421 --- /dev/null +++ b/ui/packages/shared/types/api/entities/config.ts @@ -0,0 +1,88 @@ +export interface DatabaseType { + [name: string]: string | Object +} + +export type configTypes = { + global?: { + debug?: boolean + } + databaseContainer?: { + dockerImage?: string + } + databaseConfigs?: { + configs?: { + shared_buffers?: string + shared_preload_libraries?: string + } + } + retrieval?: { + refresh?: { + timetable?: string + } + spec?: { + logicalDump?: { + options?: { + databases?: DatabaseType | null + parallelJobs?: string | number + source?: { + connection?: { + dbname?: string + host?: string + port?: string | number + username?: string + password?: string + } + } + } + } + logicalRestore?: { + options?: { + parallelJobs?: string | number + } + } + } + } +} + +const formatDatabases = (databases: DatabaseType | null) => { + let formattedDatabases = '' + + if (databases !== null) { + Object.keys(databases).forEach(function (key) { + formattedDatabases += key + ',' + }) + } + + return formattedDatabases +} + +export const formatConfig = (config: configTypes) => { + return { + debug: config.global?.debug, + dockerImage: config.databaseContainer?.dockerImage, + sharedBuffers: config.databaseConfigs?.configs?.shared_buffers, + sharedPreloadLibraries: + config.databaseConfigs?.configs?.shared_preload_libraries, + timetable: config.retrieval?.refresh?.timetable, + dbname: + config.retrieval?.spec?.logicalDump?.options?.source?.connection?.dbname, + host: config.retrieval?.spec?.logicalDump?.options?.source?.connection + ?.host, + port: config.retrieval?.spec?.logicalDump?.options?.source?.connection + ?.port, + username: + config.retrieval?.spec?.logicalDump?.options?.source?.connection + ?.username, + password: + config.retrieval?.spec?.logicalDump?.options?.source?.connection + ?.password, + databases: formatDatabases( + config.retrieval?.spec?.logicalDump?.options + ?.databases as DatabaseType | null, + ), + pg_dump: config.retrieval?.spec?.logicalDump?.options?.parallelJobs, + pg_restore: config.retrieval?.spec?.logicalRestore?.options?.parallelJobs, + } +} + +export type Config = ReturnType diff --git a/ui/packages/shared/types/api/entities/dbSource.ts b/ui/packages/shared/types/api/entities/dbSource.ts new file mode 100644 index 0000000000000000000000000000000000000000..3468cda7a440bfd4cf887bc1273a6451d868f959 --- /dev/null +++ b/ui/packages/shared/types/api/entities/dbSource.ts @@ -0,0 +1,12 @@ +export type dbSource = { + host: string + port: string + dbname: string + username: string + password: string +} + +export type TestSourceDTO = { + message: string + status: string +} \ No newline at end of file diff --git a/ui/packages/shared/types/api/entities/instanceState.ts b/ui/packages/shared/types/api/entities/instanceState.ts index f882206c5598fac5488424a23ee1b1a48d737bc1..2f56f76e92f1db1c9335876dc1ab9ba55a9d5dd8 100644 --- a/ui/packages/shared/types/api/entities/instanceState.ts +++ b/ui/packages/shared/types/api/entities/instanceState.ts @@ -27,7 +27,7 @@ export type InstanceStateDto = { lastRefresh: string | null nextRefresh: string | null mode: string - status: 'finished' | 'failed' | 'refreshing' + status: 'finished' | 'failed' | 'refreshing' | 'pending' alerts?: { refresh_failed?: { level: 'error'