From 00ca15e94f080fa4db07ac90d5aa0623dcc1e3c7 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Thu, 29 Aug 2024 21:49:35 +0200 Subject: [PATCH 1/5] feat: add migrate database from a source to a target databse --- cmd/backup.go | 2 ++ cmd/migrate.go | 21 ++++++++++++ cmd/restore.go | 2 ++ cmd/root.go | 5 +-- docker/Dockerfile | 9 +++-- docs/how-tos/migrate.md | 75 +++++++++++++++++++++++++++++++++++++++++ pkg/backup.go | 72 +++++++++++++++++---------------------- pkg/config.go | 55 ++++++++++++++++++++++++++++++ pkg/helper.go | 37 ++++++++++++++++++++ pkg/migrate.go | 31 +++++++++++++++++ pkg/restore.go | 37 ++++++++++---------- pkg/var.go | 24 +++++++++---- utils/utils.go | 49 ++------------------------- 13 files changed, 298 insertions(+), 121 deletions(-) create mode 100644 cmd/migrate.go create mode 100644 docs/how-tos/migrate.md create mode 100644 pkg/migrate.go diff --git a/cmd/backup.go b/cmd/backup.go index 5d48819..fc5f76b 100644 --- a/cmd/backup.go +++ b/cmd/backup.go @@ -21,6 +21,8 @@ var BackupCmd = &cobra.Command{ func init() { //Backup + BackupCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3") + BackupCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`") BackupCmd.PersistentFlags().StringP("mode", "m", "default", "Execution mode. default or scheduled") BackupCmd.PersistentFlags().StringP("period", "", "0 1 * * *", "Schedule period time") BackupCmd.PersistentFlags().BoolP("prune", "", false, "Delete old backup, default disabled") diff --git a/cmd/migrate.go b/cmd/migrate.go new file mode 100644 index 0000000..70af8b6 --- /dev/null +++ b/cmd/migrate.go @@ -0,0 +1,21 @@ +package cmd + +import ( + "github.com/jkaninda/pg-bkup/pkg" + "github.com/jkaninda/pg-bkup/utils" + "github.com/spf13/cobra" +) + +var MigrateCmd = &cobra.Command{ + Use: "migrate", + Short: "Migrate database from a source database to a target database", + Run: func(cmd *cobra.Command, args []string) { + if len(args) == 0 { + pkg.StartMigration(cmd) + } else { + utils.Fatal("Error, no argument required") + + } + + }, +} diff --git a/cmd/restore.go b/cmd/restore.go index 4e275ef..b81da31 100644 --- a/cmd/restore.go +++ b/cmd/restore.go @@ -24,5 +24,7 @@ var RestoreCmd = &cobra.Command{ func init() { //Restore RestoreCmd.PersistentFlags().StringP("file", "f", "", "File name of database") + RestoreCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3") + RestoreCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`") } diff --git a/cmd/root.go b/cmd/root.go index 613a335..13de869 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -18,7 +18,6 @@ var rootCmd = &cobra.Command{ Example: utils.MainExample, Version: appVersion, } -var operation = "" // Execute adds all child commands to the root command and sets flags appropriately. // This is called by main.main(). It only needs to happen once to the rootCmd. @@ -30,12 +29,10 @@ func Execute() { } func init() { - rootCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3") - rootCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`") rootCmd.PersistentFlags().StringP("dbname", "d", "", "Database name") rootCmd.PersistentFlags().IntP("port", "p", 5432, "Database port") - rootCmd.PersistentFlags().StringVarP(&operation, "operation", "o", "", "Set operation, for old version only") rootCmd.AddCommand(VersionCmd) rootCmd.AddCommand(BackupCmd) rootCmd.AddCommand(RestoreCmd) + rootCmd.AddCommand(MigrateCmd) } diff --git a/docker/Dockerfile b/docker/Dockerfile index 7c81f8c..59f5053 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -14,7 +14,7 @@ ENV DB_HOST="" ENV DB_NAME="" ENV DB_USERNAME="" ENV DB_PASSWORD="" -ENV DB_PORT="5432" +ENV DB_PORT=5432 ENV STORAGE=local ENV AWS_S3_ENDPOINT="" ENV AWS_S3_BUCKET_NAME="" @@ -30,8 +30,13 @@ ENV SSH_PASSWORD="" ENV SSH_HOST_NAME="" ENV SSH_IDENTIFY_FILE="" ENV SSH_PORT="22" +ENV SOURCE_DB_HOST="" +ENV SOURCE_DB_PORT=5432 +ENV SOURCE_DB_NAME="" +ENV SOURCE_DB_USERNAME="" +ENV SOURCE_DB_PASSWORD="" ARG DEBIAN_FRONTEND=noninteractive -ENV VERSION="v1.2.2" +ENV VERSION="v1.2.3" ENV BACKUP_CRON_EXPRESSION="" ENV GNUPGHOME="/tmp/gnupg" ARG WORKDIR="/app" diff --git a/docs/how-tos/migrate.md b/docs/how-tos/migrate.md new file mode 100644 index 0000000..a624c95 --- /dev/null +++ b/docs/how-tos/migrate.md @@ -0,0 +1,75 @@ +--- +title: Migrate database +layout: default +parent: How Tos +nav_order: 9 +--- + +# Migrate database + +To migrate the database, you need to add `migrate` command. + +{: .note } +The pg backup has another great feature: migrating your database from a source database to another. + +As you know, to restore a database from a source to a target database, you need 2 operations: to start by backing up the source database and then restoring the source backed database to the target database. +Instead of proceeding like that, you can use the integrated feature `(migrate)` that will help you to migrate your database by doing only one operation. + + +### Docker compose +```yml +services: + pg-bkup: + # In production, it is advised to lock your image tag to a proper + # release version instead of using `latest`. + # Check https://github.com/jkaninda/pg-bkup/releases + # for a list of available releases. + image: jkaninda/pg-bkup + container_name: pg-bkup + command: migrate + volumes: + - ./backup:/backup + environment: + ## Target database + - DB_PORT=5432 + - DB_HOST=postgres + - DB_NAME=database + - DB_USERNAME=username + - DB_PASSWORD=password + ## Source database + - SOURCE_DB_HOST=postgres + - SOURCE_DB_PORT=5432 + - SOURCE_DB_NAME=sourcedb + - SOURCE_DB_USERNAME=jonas + - SOURCE_DB_PASSWORD=password + # pg-bkup container must be connected to the same network with your database + networks: + - web +networks: + web: +``` + +### Migrate database using Docker CLI + +```env +## Target database +DB_PORT=5432 +DB_HOST=postgres +DB_NAME=targetdb +DB_USERNAME=targetuser +DB_PASSWORD=password + +## Source database +SOURCE_DB_HOST=postgres +SOURCE_DB_PORT=5432 +SOURCE_DB_NAME=sourcedb +SOURCE_DB_USERNAME=sourceuser +SOURCE_DB_PASSWORD=password +``` + +```shell + docker run --rm --network your_network_name \ + --env-file your-env + -v $PWD/backup:/backup/ \ + jkaninda/pg-bkup migrate -d database_name +``` diff --git a/pkg/backup.go b/pkg/backup.go index e6f2764..5dafd2f 100644 --- a/pkg/backup.go +++ b/pkg/backup.go @@ -17,11 +17,10 @@ import ( ) func StartBackup(cmd *cobra.Command) { - _, _ = cmd.Flags().GetString("operation") //Set env utils.SetEnv("STORAGE_PATH", storagePath) - utils.GetEnv(cmd, "dbname", "DB_NAME") - utils.GetEnv(cmd, "port", "DB_PORT") + //utils.GetEnv(cmd, "dbname", "DB_NAME") + //utils.GetEnv(cmd, "port", "DB_PORT") utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION") //Get flag value and set env @@ -32,37 +31,38 @@ func StartBackup(cmd *cobra.Command) { prune, _ := cmd.Flags().GetBool("prune") disableCompression, _ = cmd.Flags().GetBool("disable-compression") executionMode, _ = cmd.Flags().GetString("mode") - dbName = os.Getenv("DB_NAME") gpgPassphrase := os.Getenv("GPG_PASSPHRASE") _ = utils.GetEnv(cmd, "path", "AWS_S3_PATH") + dbConf = getDbConfig(cmd) + // if gpgPassphrase != "" { encryption = true } //Generate file name - backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbName, time.Now().Format("20060102_150405")) + backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbConf.dbName, time.Now().Format("20060102_150405")) if disableCompression { - backupFileName = fmt.Sprintf("%s_%s.sql", dbName, time.Now().Format("20060102_150405")) + backupFileName = fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20060102_150405")) } if executionMode == "default" { switch storage { case "s3": - s3Backup(backupFileName, disableCompression, prune, backupRetention, encryption) + s3Backup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption) case "local": - localBackup(backupFileName, disableCompression, prune, backupRetention, encryption) + localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption) case "ssh", "remote": - sshBackup(backupFileName, remotePath, disableCompression, prune, backupRetention, encryption) + sshBackup(dbConf, backupFileName, remotePath, disableCompression, prune, backupRetention, encryption) case "ftp": utils.Fatal("Not supported storage type: %s", storage) default: - localBackup(backupFileName, disableCompression, prune, backupRetention, encryption) + localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption) } } else if executionMode == "scheduled" { - scheduledMode(storage) + scheduledMode(dbConf, storage) } else { utils.Fatal("Error, unknown execution mode!") } @@ -70,7 +70,7 @@ func StartBackup(cmd *cobra.Command) { } // Run in scheduled mode -func scheduledMode(storage string) { +func scheduledMode(db *dbConfig, storage string) { fmt.Println() fmt.Println("**********************************") @@ -81,7 +81,7 @@ func scheduledMode(storage string) { utils.Info("Storage type %s ", storage) //Test database connexion - utils.TestDatabaseConnection() + testDatabaseConnection(db) utils.Info("Creating backup job...") CreateCrontabScript(disableCompression, storage) @@ -120,27 +120,17 @@ func scheduledMode(storage string) { } // BackupDatabase backup database -func BackupDatabase(backupFileName string, disableCompression bool) { - dbHost = os.Getenv("DB_HOST") - dbPassword = os.Getenv("DB_PASSWORD") - dbUserName = os.Getenv("DB_USERNAME") - dbName = os.Getenv("DB_NAME") - dbPort = os.Getenv("DB_PORT") +func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool) { + storagePath = os.Getenv("STORAGE_PATH") utils.Info("Starting database backup...") - err := utils.CheckEnvVars(dbHVars) - if err != nil { - utils.Error("Please make sure all required environment variables for database are set") - utils.Fatal("Error checking environment variables: %s", err) - } - - err = os.Setenv("PGPASSWORD", dbPassword) + err := os.Setenv("PGPASSWORD", db.dbPassword) if err != nil { return } - utils.TestDatabaseConnection() + testDatabaseConnection(db) // Backup Database database utils.Info("Backing up database...") @@ -148,10 +138,10 @@ func BackupDatabase(backupFileName string, disableCompression bool) { if disableCompression { // Execute pg_dump cmd := exec.Command("pg_dump", - "-h", dbHost, - "-p", dbPort, - "-U", dbUserName, - "-d", dbName, + "-h", db.dbHost, + "-p", db.dbPort, + "-U", db.dbUserName, + "-d", db.dbName, ) output, err := cmd.Output() if err != nil { @@ -172,10 +162,10 @@ func BackupDatabase(backupFileName string, disableCompression bool) { } else { // Execute pg_dump cmd := exec.Command("pg_dump", - "-h", dbHost, - "-p", dbPort, - "-U", dbUserName, - "-d", dbName, + "-h", db.dbHost, + "-p", db.dbPort, + "-U", db.dbUserName, + "-d", db.dbName, ) stdout, err := cmd.StdoutPipe() if err != nil { @@ -200,9 +190,9 @@ func BackupDatabase(backupFileName string, disableCompression bool) { utils.Info("Database has been backed up") } -func localBackup(backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) { +func localBackup(db *dbConfig, backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) { utils.Info("Backup database to local storage") - BackupDatabase(backupFileName, disableCompression) + BackupDatabase(db, backupFileName, disableCompression) finalFileName := backupFileName if encrypt { encryptBackup(backupFileName) @@ -218,12 +208,12 @@ func localBackup(backupFileName string, disableCompression bool, prune bool, bac deleteTemp() } -func s3Backup(backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) { +func s3Backup(db *dbConfig, backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) { bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") s3Path := utils.GetEnvVariable("AWS_S3_PATH", "S3_PATH") utils.Info("Backup database to s3 storage") //Backup database - BackupDatabase(backupFileName, disableCompression) + BackupDatabase(db, backupFileName, disableCompression) finalFileName := backupFileName if encrypt { encryptBackup(backupFileName) @@ -255,10 +245,10 @@ func s3Backup(backupFileName string, disableCompression bool, prune bool, backup //Delete temp deleteTemp() } -func sshBackup(backupFileName, remotePath string, disableCompression bool, prune bool, backupRetention int, encrypt bool) { +func sshBackup(db *dbConfig, backupFileName, remotePath string, disableCompression bool, prune bool, backupRetention int, encrypt bool) { utils.Info("Backup database to Remote server") //Backup database - BackupDatabase(backupFileName, disableCompression) + BackupDatabase(db, backupFileName, disableCompression) finalFileName := backupFileName if encrypt { encryptBackup(backupFileName) diff --git a/pkg/config.go b/pkg/config.go index d0b5e01..5c83329 100644 --- a/pkg/config.go +++ b/pkg/config.go @@ -1,4 +1,59 @@ package pkg +import ( + "github.com/jkaninda/pg-bkup/utils" + "github.com/spf13/cobra" + "os" +) + type Config struct { } + +type dbConfig struct { + dbHost string + dbPort string + dbName string + dbUserName string + dbPassword string +} +type dbSourceConfig struct { + sourceDbHost string + sourceDbPort string + sourceDbUserName string + sourceDbPassword string + sourceDbName string +} + +func getDbConfig(cmd *cobra.Command) *dbConfig { + //Set env + utils.GetEnv(cmd, "dbname", "DB_NAME") + utils.GetEnv(cmd, "port", "DB_PORT") + dConf := dbConfig{} + dConf.dbHost = os.Getenv("DB_HOST") + dConf.dbPort = os.Getenv("DB_PORT") + dConf.dbName = os.Getenv("DB_NAME") + dConf.dbUserName = os.Getenv("DB_USERNAME") + dConf.dbPassword = os.Getenv("DB_PASSWORD") + + err := utils.CheckEnvVars(dbHVars) + if err != nil { + utils.Error("Please make sure all required environment variables for database are set") + utils.Fatal("Error checking environment variables: %s", err) + } + return &dConf +} +func getSourceDbConfig() *dbSourceConfig { + sdbConfig := dbSourceConfig{} + sdbConfig.sourceDbHost = os.Getenv("SOURCE_DB_HOST") + sdbConfig.sourceDbPort = os.Getenv("SOURCE_DB_PORT") + sdbConfig.sourceDbName = os.Getenv("SOURCE_DB_NAME") + sdbConfig.sourceDbUserName = os.Getenv("SOURCE_DB_USERNAME") + sdbConfig.sourceDbPassword = os.Getenv("SOURCE_DB_PASSWORD") + + err := utils.CheckEnvVars(sdbRVars) + if err != nil { + utils.Error("Please make sure all required environment variables for source database are set") + utils.Fatal("Error checking environment variables: %s", err) + } + return &sdbConfig +} diff --git a/pkg/helper.go b/pkg/helper.go index 65c4d39..be6ca9b 100644 --- a/pkg/helper.go +++ b/pkg/helper.go @@ -1,8 +1,10 @@ package pkg import ( + "bytes" "github.com/jkaninda/pg-bkup/utils" "os" + "os/exec" "path/filepath" "time" ) @@ -94,3 +96,38 @@ func deleteTemp() { utils.Info("Deleting %s ... done", tmpPath) } } + +// TestDatabaseConnection tests the database connection +func testDatabaseConnection(db *dbConfig) { + + utils.Info("Connecting to %s database ...", db.dbName) + // Test database connection + query := "SELECT version();" + + // Set the environment variable for the database password + err := os.Setenv("PGPASSWORD", db.dbPassword) + if err != nil { + return + } + // Prepare the psql command + cmd := exec.Command("psql", + "-U", db.dbUserName, // database user + "-d", db.dbName, // database name + "-h", db.dbHost, // host + "-p", db.dbPort, // port + "-c", query, // SQL command to execute + ) + // Capture the output + var out bytes.Buffer + cmd.Stdout = &out + cmd.Stderr = &out + + // Run the command and capture any errors + err = cmd.Run() + if err != nil { + utils.Error("Error running psql command: %v\nOutput: %s\n", err, out.String()) + return + } + utils.Info("Successfully connected to %s database", db.dbName) + +} diff --git a/pkg/migrate.go b/pkg/migrate.go new file mode 100644 index 0000000..bbc99b3 --- /dev/null +++ b/pkg/migrate.go @@ -0,0 +1,31 @@ +package pkg + +import ( + "fmt" + "github.com/jkaninda/pg-bkup/utils" + "github.com/spf13/cobra" + "time" +) + +func StartMigration(cmd *cobra.Command) { + utils.Info("Starting database migration...") + //Get DB config + dbConf = getDbConfig(cmd) + sDbConf = getSourceDbConfig() + + //Generate file name + backupFileName := fmt.Sprintf("%s_%s.sql", sDbConf.sourceDbName, time.Now().Format("20060102_150405")) + //Backup Source Database + newDbConfig := dbConfig{} + newDbConfig.dbHost = sDbConf.sourceDbHost + newDbConfig.dbPort = sDbConf.sourceDbPort + newDbConfig.dbName = sDbConf.sourceDbName + newDbConfig.dbUserName = sDbConf.sourceDbUserName + newDbConfig.dbPassword = sDbConf.sourceDbPassword + BackupDatabase(&newDbConfig, backupFileName, true) + //Restore source database into target database + utils.Info("Restoring [%s] database into [%s] database...", sDbConf.sourceDbName, dbConf.dbName) + RestoreDatabase(dbConf, backupFileName) + utils.Info("[%s] database has been restored into [%s] database", sDbConf.sourceDbName, dbConf.dbName) + utils.Info("Database migration completed!") +} diff --git a/pkg/restore.go b/pkg/restore.go index 07b62c6..9175c29 100644 --- a/pkg/restore.go +++ b/pkg/restore.go @@ -13,8 +13,6 @@ func StartRestore(cmd *cobra.Command) { //Set env utils.SetEnv("STORAGE_PATH", storagePath) - utils.GetEnv(cmd, "dbname", "DB_NAME") - utils.GetEnv(cmd, "port", "DB_PORT") //Get flag value and set env s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH") @@ -23,47 +21,46 @@ func StartRestore(cmd *cobra.Command) { file = utils.GetEnv(cmd, "file", "FILE_NAME") executionMode, _ = cmd.Flags().GetString("mode") bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") + + dbConf = getDbConfig(cmd) + switch storage { case "s3": - restoreFromS3(file, bucket, s3Path) + restoreFromS3(dbConf, file, bucket, s3Path) case "local": utils.Info("Restore database from local") copyToTmp(storagePath, file) - RestoreDatabase(file) + RestoreDatabase(dbConf, file) case "ssh": - restoreFromRemote(file, remotePath) + restoreFromRemote(dbConf, file, remotePath) case "ftp": utils.Fatal("Restore from FTP is not yet supported") default: utils.Info("Restore database from local") - RestoreDatabase(file) + copyToTmp(storagePath, file) + RestoreDatabase(dbConf, file) } } -func restoreFromS3(file, bucket, s3Path string) { +func restoreFromS3(db *dbConfig, file, bucket, s3Path string) { utils.Info("Restore database from s3") err := utils.DownloadFile(tmpPath, file, bucket, s3Path) if err != nil { utils.Fatal("Error download file from s3 %s %v ", file, err) } - RestoreDatabase(file) + RestoreDatabase(db, file) } -func restoreFromRemote(file, remotePath string) { +func restoreFromRemote(db *dbConfig, file, remotePath string) { utils.Info("Restore database from remote server") err := CopyFromRemote(file, remotePath) if err != nil { utils.Fatal("Error download file from remote server: %s %v", filepath.Join(remotePath, file), err) } - RestoreDatabase(file) + RestoreDatabase(db, file) } // RestoreDatabase restore database -func RestoreDatabase(file string) { - dbHost = os.Getenv("DB_HOST") - dbPassword = os.Getenv("DB_PASSWORD") - dbUserName = os.Getenv("DB_USERNAME") - dbName = os.Getenv("DB_NAME") - dbPort = os.Getenv("DB_PORT") +func RestoreDatabase(db *dbConfig, file string) { gpgPassphrase := os.Getenv("GPG_PASSPHRASE") if file == "" { utils.Fatal("Error, file required") @@ -93,17 +90,17 @@ func RestoreDatabase(file string) { if utils.FileExists(fmt.Sprintf("%s/%s", tmpPath, file)) { - err := os.Setenv("PGPASSWORD", dbPassword) + err := os.Setenv("PGPASSWORD", db.dbPassword) if err != nil { return } - utils.TestDatabaseConnection() + testDatabaseConnection(db) utils.Info("Restoring database...") extension := filepath.Ext(fmt.Sprintf("%s/%s", tmpPath, file)) // Restore from compressed file / .sql.gz if extension == ".gz" { - str := "zcat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + os.Getenv("DB_HOST") + " -p " + os.Getenv("DB_PORT") + " -U " + os.Getenv("DB_USERNAME") + " -v -d " + os.Getenv("DB_NAME") + str := "zcat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName _, err := exec.Command("bash", "-c", str).Output() if err != nil { utils.Fatal("Error, in restoring the database %v", err) @@ -115,7 +112,7 @@ func RestoreDatabase(file string) { } else if extension == ".sql" { //Restore from sql file - str := "cat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + os.Getenv("DB_HOST") + " -p " + os.Getenv("DB_PORT") + " -U " + os.Getenv("DB_USERNAME") + " -v -d " + os.Getenv("DB_NAME") + str := "cat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName _, err := exec.Command("bash", "-c", str).Output() if err != nil { utils.Fatal("Error in restoring the database %v", err) diff --git a/pkg/var.go b/pkg/var.go index f415d89..70eb102 100644 --- a/pkg/var.go +++ b/pkg/var.go @@ -7,13 +7,13 @@ const algorithm = "aes256" const gpgExtension = "gpg" var ( - storage = "local" - file = "" - dbPassword = "" - dbUserName = "" - dbName = "" - dbHost = "" - dbPort = "5432" + storage = "local" + file = "" + //dbPassword = "" + //dbUserName = "" + //dbName = "" + //dbHost = "" + //dbPort = "5432" executionMode = "default" storagePath = "/backup" disableCompression = false @@ -27,6 +27,16 @@ var dbHVars = []string{ "DB_USERNAME", "DB_NAME", } +var sdbRVars = []string{ + "SOURCE_DB_HOST", + "SOURCE_DB_PORT", + "SOURCE_DB_NAME", + "SOURCE_DB_USERNAME", + "SOURCE_DB_PASSWORD", +} + +var dbConf *dbConfig +var sDbConf *dbSourceConfig // sshVars Required environment variables for SSH remote server storage var sshVars = []string{ diff --git a/utils/utils.go b/utils/utils.go index f208b2f..0a3e88c 100644 --- a/utils/utils.go +++ b/utils/utils.go @@ -1,19 +1,17 @@ package utils /***** -* MySQL Backup & Restore +* PostgreSQL Backup & Restore * @author Jonas Kaninda * @license MIT License -* @link https://github.com/jkaninda/mysql-bkup +* @link https://github.com/jkaninda/pg-bkup **/ import ( - "bytes" "fmt" "github.com/spf13/cobra" "io" "io/fs" "os" - "os/exec" ) func FileExists(filename string) bool { @@ -90,49 +88,6 @@ func IsDirEmpty(name string) (bool, error) { return true, nil } -// TestDatabaseConnection tests the database connection -func TestDatabaseConnection() { - dbHost := os.Getenv("DB_HOST") - dbPassword := os.Getenv("DB_PASSWORD") - dbUserName := os.Getenv("DB_USERNAME") - dbName := os.Getenv("DB_NAME") - dbPort := os.Getenv("DB_PORT") - - if os.Getenv("DB_HOST") == "" || os.Getenv("DB_NAME") == "" || os.Getenv("DB_USERNAME") == "" || os.Getenv("DB_PASSWORD") == "" { - Fatal("Please make sure all required database environment variables are set") - } else { - Info("Connecting to database ...") - // Test database connection - query := "SELECT version();" - - // Set the environment variable for the database password - err := os.Setenv("PGPASSWORD", dbPassword) - if err != nil { - return - } - // Prepare the psql command - cmd := exec.Command("psql", - "-U", dbUserName, // database user - "-d", dbName, // database name - "-h", dbHost, // host - "-p", dbPort, // port - "-c", query, // SQL command to execute - ) - // Capture the output - var out bytes.Buffer - cmd.Stdout = &out - cmd.Stderr = &out - - // Run the command and capture any errors - err = cmd.Run() - if err != nil { - Error("Error running psql command: %v\nOutput: %s\n", err, out.String()) - return - } - Info("Successfully connected to database") - - } -} func GetEnv(cmd *cobra.Command, flagName, envName string) string { value, _ := cmd.Flags().GetString(flagName) if value != "" { From a3322034f4f891a260856e8d4e83315591ef7c2a Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Thu, 29 Aug 2024 21:53:08 +0200 Subject: [PATCH 2/5] docs: update migrate note --- docs/how-tos/migrate.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/how-tos/migrate.md b/docs/how-tos/migrate.md index a624c95..be72f20 100644 --- a/docs/how-tos/migrate.md +++ b/docs/how-tos/migrate.md @@ -12,8 +12,8 @@ To migrate the database, you need to add `migrate` command. {: .note } The pg backup has another great feature: migrating your database from a source database to another. -As you know, to restore a database from a source to a target database, you need 2 operations: to start by backing up the source database and then restoring the source backed database to the target database. -Instead of proceeding like that, you can use the integrated feature `(migrate)` that will help you to migrate your database by doing only one operation. +As you know, to restore a database from a source to a target database, you need 2 operations: which is to start by backing up the source database and then restoring the source backed database to the target database. +Instead of proceeding like that, you can use the integrated feature `(migrate)`, which will help you migrate your database by doing only one operation. ### Docker compose From 13237ad6342a11f83572338b9a51fc790e5dee22 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda <> Date: Fri, 30 Aug 2024 09:20:14 +0200 Subject: [PATCH 3/5] Refactoring of Dockerfile --- docker/Dockerfile | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 59f5053..44f3b23 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -39,7 +39,7 @@ ARG DEBIAN_FRONTEND=noninteractive ENV VERSION="v1.2.3" ENV BACKUP_CRON_EXPRESSION="" ENV GNUPGHOME="/tmp/gnupg" -ARG WORKDIR="/app" +ARG WORKDIR="/config" ARG BACKUPDIR="/backup" ARG BACKUP_TMP_DIR="/tmp/backup" ARG BACKUP_CRON="/etc/cron.d/backup_cron" @@ -74,18 +74,14 @@ RUN ln -s /usr/local/bin/pg-bkup /usr/local/bin/bkup ADD docker/supervisord.conf /etc/supervisor/supervisord.conf WORKDIR $WORKDIR -# Create backup shell script -COPY < /usr/local/bin/backup && \ + chmod +x /usr/local/bin/backup + +# Create the restore script and make it executable +RUN echo '#!/bin/sh\n/usr/local/bin/pg-bkup restore "$@"' > /usr/local/bin/restore && \ chmod +x /usr/local/bin/restore +# Create the migrate script and make it executable +RUN echo '#!/bin/sh\n/usr/local/bin/pg-bkup migrate "$@"' > /usr/local/bin/migrate && \ + chmod +x /usr/local/bin/migrate ENTRYPOINT ["/usr/local/bin/pg-bkup"] From 3d7f1cdd3bedf5e0d95b58eb6dada180bcbff1d1 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda <> Date: Fri, 30 Aug 2024 13:47:50 +0200 Subject: [PATCH 4/5] fix: gpg encrypt permission warning message, update Kubernetes deployment example --- README.md | 40 +++++++++-------------- docker/Dockerfile | 14 ++++----- docs/how-tos/deploy-on-kubernetes.md | 26 +++------------ docs/how-tos/migrate.md | 47 ++++++++++++++++++++++++++++ docs/index.md | 42 +++++++++++++++++++++++++ pkg/encrypt.go | 16 ++++++++-- pkg/var.go | 10 ++---- utils/utils.go | 18 +++++++++++ 8 files changed, 150 insertions(+), 63 deletions(-) diff --git a/README.md b/README.md index 2f40b98..1d62811 100644 --- a/README.md +++ b/README.md @@ -95,8 +95,9 @@ For Kubernetes, you don't need to run it in scheduled mode. You can deploy it as apiVersion: batch/v1 kind: Job metadata: - name: backup + name: backup-job spec: + ttlSecondsAfterFinished: 100 template: spec: containers: @@ -109,38 +110,27 @@ spec: command: - /bin/sh - -c - - bkup - - backup - - --storage - - s3 + - backup -d dbname resources: limits: memory: "128Mi" cpu: "500m" env: - - name: DB_PORT - value: "5432" - name: DB_HOST - value: "" - - name: DB_NAME - value: "" + value: "postgres" - name: DB_USERNAME - value: "" - # Please use secret! + value: "postgres" - name: DB_PASSWORD - value: "" - - name: AWS_S3_ENDPOINT - value: "https://s3.amazonaws.com" - - name: AWS_S3_BUCKET_NAME - value: "xxx" - - name: AWS_REGION - value: "us-west-2" - - name: AWS_ACCESS_KEY - value: "xxxx" - - name: AWS_SECRET_KEY - value: "xxxx" - - name: AWS_DISABLE_SSL - value: "false" + value: "password" + volumeMounts: + - mountPath: /backup + name: backup + volumes: + - name: backup + hostPath: + path: /home/toto/backup # directory location on host + type: Directory # this field is optional + restartPolicy: Never ``` ## Available image registries diff --git a/docker/Dockerfile b/docker/Dockerfile index 44f3b23..0c9cb0e 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -38,7 +38,6 @@ ENV SOURCE_DB_PASSWORD="" ARG DEBIAN_FRONTEND=noninteractive ENV VERSION="v1.2.3" ENV BACKUP_CRON_EXPRESSION="" -ENV GNUPGHOME="/tmp/gnupg" ARG WORKDIR="/config" ARG BACKUPDIR="/backup" ARG BACKUP_TMP_DIR="/tmp/backup" @@ -55,25 +54,21 @@ RUN apt-get clean && rm -rf /var/lib/apt/lists/* RUN mkdir $WORKDIR RUN mkdir $BACKUPDIR -RUN mkdir -p $BACKUP_TMP_DIR && \ - mkdir -p $GNUPGHOME +RUN mkdir -p $BACKUP_TMP_DIR RUN chmod 777 $WORKDIR RUN chmod 777 $BACKUPDIR RUN chmod 777 $BACKUP_TMP_DIR RUN touch $BACKUP_CRON && \ touch $BACKUP_CRON_SCRIPT && \ + chmod 777 $WORKDIR && \ chmod 777 $BACKUP_CRON && \ - chmod 777 $BACKUP_CRON_SCRIPT && \ - chmod 777 $GNUPGHOME - + chmod 777 $BACKUP_CRON_SCRIPT COPY --from=build /app/pg-bkup /usr/local/bin/pg-bkup RUN chmod +x /usr/local/bin/pg-bkup RUN ln -s /usr/local/bin/pg-bkup /usr/local/bin/bkup ADD docker/supervisord.conf /etc/supervisor/supervisord.conf - -WORKDIR $WORKDIR # Create the backup script and make it executable RUN echo '#!/bin/sh\n/usr/local/bin/pg-bkup backup "$@"' > /usr/local/bin/backup && \ chmod +x /usr/local/bin/backup @@ -84,4 +79,7 @@ RUN echo '#!/bin/sh\n/usr/local/bin/pg-bkup restore "$@"' > /usr/local/bin/resto # Create the migrate script and make it executable RUN echo '#!/bin/sh\n/usr/local/bin/pg-bkup migrate "$@"' > /usr/local/bin/migrate && \ chmod +x /usr/local/bin/migrate + +WORKDIR $WORKDIR ENTRYPOINT ["/usr/local/bin/pg-bkup"] + diff --git a/docs/how-tos/deploy-on-kubernetes.md b/docs/how-tos/deploy-on-kubernetes.md index f9f7f89..244873d 100644 --- a/docs/how-tos/deploy-on-kubernetes.md +++ b/docs/how-tos/deploy-on-kubernetes.md @@ -30,10 +30,7 @@ spec: command: - /bin/sh - -c - - bkup - - backup - - --storage - - s3 + - backup --storage s3 resources: limits: memory: "128Mi" @@ -87,10 +84,7 @@ spec: - /bin/sh - -c - bkup - - backup - - --storage - - ssh - - --disable-compression + - backup --storage ssh --disable-compression resources: limits: memory: "128Mi" @@ -145,10 +139,7 @@ spec: - /bin/sh - -c - bkup - - restore - - --storage - - ssh - - --file store_20231219_022941.sql.gz + - restore --storage ssh --file store_20231219_022941.sql.gz resources: limits: memory: "128Mi" @@ -205,10 +196,7 @@ spec: - /bin/sh - -c - bkup - - backup - - --storage - - ssh - - --disable-compression + - backup --storage ssh --disable-compression resources: limits: memory: "128Mi" @@ -272,11 +260,7 @@ spec: command: - /bin/sh - -c - - bkup - - backup - - --storage - - ssh - - --disable-compression + - backup --storage ssh --disable-compression resources: limits: memory: "128Mi" diff --git a/docs/how-tos/migrate.md b/docs/how-tos/migrate.md index be72f20..d159fa9 100644 --- a/docs/how-tos/migrate.md +++ b/docs/how-tos/migrate.md @@ -73,3 +73,50 @@ SOURCE_DB_PASSWORD=password -v $PWD/backup:/backup/ \ jkaninda/pg-bkup migrate -d database_name ``` + +## Kubernetes + +```yaml +apiVersion: batch/v1 +kind: Job +metadata: + name: migrate-db +spec: + ttlSecondsAfterFinished: 100 + template: + spec: + containers: + - name: pg-bkup + # In production, it is advised to lock your image tag to a proper + # release version instead of using `latest`. + # Check https://github.com/jkaninda/pg-bkup/releases + # for a list of available releases. + image: jkaninda/pg-bkup + command: + - /bin/sh + - -c + - migrate -d targetdb + resources: + limits: + memory: "128Mi" + cpu: "500m" + env: + ## Target DB + - name: DB_HOST + value: "postgres-target" + - name: DB_USERNAME + value: "postgres" + - name: DB_PASSWORD + value: "password" + ## Source DB + - name: SOURCE_DB_HOST + value: "postgres-source" + - name: SOURCE_DB_NAME + value: "sourcedb" + - name: SOURCE_DB_USERNAME + value: "postgres" + # Please use secret! + - name: SOURCE_DB_PASSWORD + value: "password" + restartPolicy: Never +``` \ No newline at end of file diff --git a/docs/index.md b/docs/index.md index 4d0cb94..5ad1355 100644 --- a/docs/index.md +++ b/docs/index.md @@ -78,7 +78,49 @@ services: networks: web: ``` +## Kubernetes +```yaml +apiVersion: batch/v1 +kind: Job +metadata: + name: backup-job +spec: + ttlSecondsAfterFinished: 100 + template: + spec: + containers: + - name: pg-bkup + # In production, it is advised to lock your image tag to a proper + # release version instead of using `latest`. + # Check https://github.com/jkaninda/pg-bkup/releases + # for a list of available releases. + image: jkaninda/pg-bkup + command: + - /bin/sh + - -c + - backup -d dbname + resources: + limits: + memory: "128Mi" + cpu: "500m" + env: + - name: DB_HOST + value: "postgres" + - name: DB_USERNAME + value: "postgres" + - name: DB_PASSWORD + value: "password" + volumeMounts: + - mountPath: /backup + name: backup + volumes: + - name: backup + hostPath: + path: /home/toto/backup # directory location on host + type: Directory # this field is optional + restartPolicy: Never +``` ## Available image registries This Docker image is published to both Docker Hub and the GitHub container registry. diff --git a/pkg/encrypt.go b/pkg/encrypt.go index 017e6b0..00127e7 100644 --- a/pkg/encrypt.go +++ b/pkg/encrypt.go @@ -9,11 +9,17 @@ import ( func Decrypt(inputFile string, passphrase string) error { utils.Info("Decrypting backup file: %s...", inputFile) + //Create gpg home dir + err := utils.MakeDir(gpgHome) + if err != nil { + return err + } + utils.SetEnv("GNUPGHOME", gpgHome) cmd := exec.Command("gpg", "--batch", "--passphrase", passphrase, "--output", RemoveLastExtension(inputFile), "--decrypt", inputFile) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr - err := cmd.Run() + err = cmd.Run() if err != nil { return err } @@ -24,11 +30,17 @@ func Decrypt(inputFile string, passphrase string) error { func Encrypt(inputFile string, passphrase string) error { utils.Info("Encrypting backup...") + //Create gpg home dir + err := utils.MakeDir(gpgHome) + if err != nil { + return err + } + utils.SetEnv("GNUPGHOME", gpgHome) cmd := exec.Command("gpg", "--batch", "--passphrase", passphrase, "--symmetric", "--cipher-algo", algorithm, inputFile) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr - err := cmd.Run() + err = cmd.Run() if err != nil { return err } diff --git a/pkg/var.go b/pkg/var.go index 70eb102..2f159c6 100644 --- a/pkg/var.go +++ b/pkg/var.go @@ -3,17 +3,13 @@ package pkg const cronLogFile = "/var/log/pg-bkup.log" const tmpPath = "/tmp/backup" const backupCronFile = "/usr/local/bin/backup_cron.sh" +const gpgHome = "gnupg" const algorithm = "aes256" const gpgExtension = "gpg" var ( - storage = "local" - file = "" - //dbPassword = "" - //dbUserName = "" - //dbName = "" - //dbHost = "" - //dbPort = "5432" + storage = "local" + file = "" executionMode = "default" storagePath = "/backup" disableCompression = false diff --git a/utils/utils.go b/utils/utils.go index 0a3e88c..a08fb0d 100644 --- a/utils/utils.go +++ b/utils/utils.go @@ -149,3 +149,21 @@ func CheckEnvVars(vars []string) error { return nil } + +// MakeDir create directory +func MakeDir(dirPath string) error { + err := os.Mkdir(dirPath, 0700) + if err != nil { + return err + } + return nil +} + +// MakeDirAll create directory +func MakeDirAll(dirPath string) error { + err := os.MkdirAll(dirPath, 0700) + if err != nil { + return err + } + return nil +} From c5ad456688ad634cd0c3efebacaf6dd130d79bec Mon Sep 17 00:00:00 2001 From: Jonas Kaninda <> Date: Fri, 30 Aug 2024 18:54:23 +0200 Subject: [PATCH 5/5] docs: update configuration reference, update kubernetes deployment --- docs/reference/index.md | 52 ++++++++++++++++++++++++----------------- examples/k8s-job.yaml | 5 +--- 2 files changed, 31 insertions(+), 26 deletions(-) diff --git a/docs/reference/index.md b/docs/reference/index.md index 38e931f..63b16e1 100644 --- a/docs/reference/index.md +++ b/docs/reference/index.md @@ -6,7 +6,7 @@ nav_order: 2 # Configuration reference -Backup and restore targets, schedule and retention are configured using environment variables or flags. +Backup, restore and migrate targets, schedule and retention are configured using environment variables or flags. @@ -19,6 +19,7 @@ Backup and restore targets, schedule and retention are configured using environm | pg-bkup | bkup | CLI utility | | backup | | Backup database operation | | restore | | Restore database operation | +| migrate | | Migrate database from one instance to another one | | --storage | -s | Storage. local or s3 (default: local) | | --file | -f | File name for restoration | | --path | | AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup` | @@ -34,27 +35,34 @@ Backup and restore targets, schedule and retention are configured using environm ## Environment variables -| Name | Requirement | Description | -|-------------------|--------------------------------------------------|------------------------------------------------------| -| DB_PORT | Optional, default 5432 | Database port number | -| DB_HOST | Required | Database host | -| DB_NAME | Optional if it was provided from the -d flag | Database name | -| DB_USERNAME | Required | Database user name | -| DB_PASSWORD | Required | Database password | -| AWS_ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key | -| AWS_SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key | -| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name | -| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name | -| AWS_REGION | Optional, required for S3 storage | AWS Region | -| AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL | -| FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) | -| GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase | -| SSH_HOST_NAME | Optional, required for SSH storage | ssh remote hostname or ip | -| SSH_USER | Optional, required for SSH storage | ssh remote user | -| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password | -| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key | -| SSH_PORT | Optional, required for SSH storage | ssh remote server port | -| SSH_REMOTE_PATH | Optional, required for SSH storage | ssh remote path (/home/toto/backup) | +| Name | Requirement | Description | +|------------------------|----------------------------------------------------|------------------------------------------------------| +| DB_PORT | Optional, default 5432 | Database port number | +| DB_HOST | Required | Database host | +| DB_NAME | Optional if it was provided from the -d flag | Database name | +| DB_USERNAME | Required | Database user name | +| DB_PASSWORD | Required | Database password | +| AWS_ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key | +| AWS_SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key | +| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name | +| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name | +| AWS_REGION | Optional, required for S3 storage | AWS Region | +| AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL | +| FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) | +| GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase | +| BACKUP_CRON_EXPRESSION | Optional if it was provided from the --period flag | Backup cron expression for docker in scheduled mode | +| SSH_HOST_NAME | Optional, required for SSH storage | ssh remote hostname or ip | +| SSH_USER | Optional, required for SSH storage | ssh remote user | +| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password | +| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key | +| SSH_PORT | Optional, required for SSH storage | ssh remote server port | +| SSH_REMOTE_PATH | Optional, required for SSH storage | ssh remote path (/home/toto/backup) | +| SOURCE_DB_HOST | Optional, required for database migration | Source database host | +| SOURCE_DB_PORT | Optional, required for database migration | Source database port | +| SOURCE_DB_NAME | Optional, required for database migration | Source database name | +| SOURCE_DB_USERNAME | Optional, required for database migration | Source database username | +| SOURCE_DB_PASSWORD | Optional, required for database migration | Source database password | + --- ## Run in Scheduled mode diff --git a/examples/k8s-job.yaml b/examples/k8s-job.yaml index 9f2650d..cdf766f 100644 --- a/examples/k8s-job.yaml +++ b/examples/k8s-job.yaml @@ -15,10 +15,7 @@ spec: command: - /bin/sh - -c - - bkup - - backup - - --storage - - s3 + - backup --storage s3 resources: limits: memory: "128Mi"