From 29a58aa26d31fc28ea8608767edbc6f14bb1419c Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sat, 28 Sep 2024 04:45:03 +0200 Subject: [PATCH 1/7] chore: add cron expression verification --- cmd/backup.go | 2 +- go.mod | 1 + go.sum | 2 ++ pkg/backup.go | 19 ++++++++++++++----- utils/utils.go | 9 +++++++-- 5 files changed, 25 insertions(+), 8 deletions(-) diff --git a/cmd/backup.go b/cmd/backup.go index ad19ba0..0eeff19 100644 --- a/cmd/backup.go +++ b/cmd/backup.go @@ -30,7 +30,7 @@ func init() { BackupCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3") BackupCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`") BackupCmd.PersistentFlags().StringP("mode", "m", "default", "Execution mode. default or scheduled") - BackupCmd.PersistentFlags().StringP("period", "", "0 1 * * *", "Schedule period time") + BackupCmd.PersistentFlags().StringP("period", "", "", "Schedule period time") BackupCmd.PersistentFlags().BoolP("prune", "", false, "Delete old backup, default disabled") BackupCmd.PersistentFlags().IntP("keep-last", "", 7, "Delete files created more than specified days ago, default 7 days") BackupCmd.PersistentFlags().BoolP("disable-compression", "", false, "Disable backup compression") diff --git a/go.mod b/go.mod index 2ef1395..2e1e200 100644 --- a/go.mod +++ b/go.mod @@ -15,6 +15,7 @@ require ( require ( github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/robfig/cron/v3 v3.0.1 // indirect golang.org/x/sys v0.22.0 // indirect gopkg.in/fsnotify.v1 v1.4.7 // indirect gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect diff --git a/go.sum b/go.sum index d9de32f..375550e 100644 --- a/go.sum +++ b/go.sum @@ -15,6 +15,8 @@ github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9Y github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= +github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= diff --git a/pkg/backup.go b/pkg/backup.go index e6d34df..96fe23f 100644 --- a/pkg/backup.go +++ b/pkg/backup.go @@ -34,6 +34,7 @@ func StartBackup(cmd *cobra.Command) { executionMode, _ = cmd.Flags().GetString("mode") gpqPassphrase := os.Getenv("GPG_PASSPHRASE") _ = utils.GetEnv(cmd, "path", "AWS_S3_PATH") + cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION") dbConf = getDbConfig(cmd) @@ -48,7 +49,7 @@ func StartBackup(cmd *cobra.Command) { backupFileName = fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20060102_150405")) } - if executionMode == "default" { + if cronExpression == "" { switch storage { case "s3": s3Backup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption) @@ -62,10 +63,12 @@ func StartBackup(cmd *cobra.Command) { localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption) } - } else if executionMode == "scheduled" { - scheduledMode(dbConf, storage) } else { - utils.Fatal("Error, unknown execution mode!") + if utils.IsValidCronExpression(cronExpression) { + scheduledMode(dbConf, storage) + } else { + utils.Fatal("Cron expression is not valid: %s", cronExpression) + } } } @@ -87,11 +90,17 @@ func scheduledMode(db *dbConfig, storage string) { utils.Info("Creating backup job...") CreateCrontabScript(disableCompression, storage) + //Set BACKUP_CRON_EXPRESSION to nil + err := os.Setenv("BACKUP_CRON_EXPRESSION", "") + if err != nil { + return + } + supervisorConfig := "/etc/supervisor/supervisord.conf" // Start Supervisor cmd := exec.Command("supervisord", "-c", supervisorConfig) - err := cmd.Start() + err = cmd.Start() if err != nil { utils.Fatal(fmt.Sprintf("Failed to start supervisord: %v", err)) } diff --git a/utils/utils.go b/utils/utils.go index 548bdc7..caf07ed 100644 --- a/utils/utils.go +++ b/utils/utils.go @@ -10,6 +10,7 @@ import ( "bytes" "encoding/json" "fmt" + "github.com/robfig/cron/v3" "github.com/spf13/cobra" "io" "io/fs" @@ -224,7 +225,7 @@ func NotifySuccess(fileName string) { //Telegram notification err := CheckEnvVars(vars) if err == nil { - message := "MySQL Backup \n" + + message := "[✅ MySQL Backup ]\n" + "Database has been backed up \n" + "Backup name is " + fileName sendMessage(message) @@ -239,7 +240,7 @@ func NotifyError(error string) { //Telegram notification err := CheckEnvVars(vars) if err == nil { - message := "MySQL Backup \n" + + message := "[🔴MySQL Backup ]\n" + "An error occurred during database backup \n" + "Error: " + error sendMessage(message) @@ -250,3 +251,7 @@ func getTgUrl() string { return fmt.Sprintf("https://api.telegram.org/bot%s", os.Getenv("TG_TOKEN")) } +func IsValidCronExpression(cronExpr string) bool { + _, err := cron.ParseStandard(cronExpr) + return err == nil +} From cbb73ae89b8395d6637c6b88d44b4503f2891bc0 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sat, 28 Sep 2024 07:26:33 +0200 Subject: [PATCH 2/7] chore: migrate backup scheduled mode from linux cron to go cron --- cmd/backup.go | 5 +- pkg/backup.go | 130 ++++++++++++++----------------------------------- pkg/config.go | 85 +++++++++++++++++++++++++++++++- pkg/helper.go | 4 ++ pkg/migrate.go | 4 +- pkg/restore.go | 25 +++------- pkg/var.go | 1 - 7 files changed, 137 insertions(+), 117 deletions(-) diff --git a/cmd/backup.go b/cmd/backup.go index 0eeff19..e1647b7 100644 --- a/cmd/backup.go +++ b/cmd/backup.go @@ -29,8 +29,9 @@ func init() { //Backup BackupCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3") BackupCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`") - BackupCmd.PersistentFlags().StringP("mode", "m", "default", "Execution mode. default or scheduled") - BackupCmd.PersistentFlags().StringP("period", "", "", "Schedule period time") + BackupCmd.PersistentFlags().StringP("mode", "m", "default", "Execution mode. | Deprecated") + BackupCmd.PersistentFlags().StringP("period", "", "", "Schedule period time | Deprecated") + BackupCmd.PersistentFlags().StringP("cron-expression", "", "", "Backup cron expression") BackupCmd.PersistentFlags().BoolP("prune", "", false, "Delete old backup, default disabled") BackupCmd.PersistentFlags().IntP("keep-last", "", 7, "Delete files created more than specified days ago, default 7 days") BackupCmd.PersistentFlags().BoolP("disable-compression", "", false, "Disable backup compression") diff --git a/pkg/backup.go b/pkg/backup.go index 96fe23f..f7bb3c0 100644 --- a/pkg/backup.go +++ b/pkg/backup.go @@ -8,126 +8,70 @@ package pkg import ( "fmt" - "github.com/hpcloud/tail" "github.com/jkaninda/mysql-bkup/utils" + "github.com/robfig/cron/v3" "github.com/spf13/cobra" "log" "os" "os/exec" "path/filepath" - "time" ) func StartBackup(cmd *cobra.Command) { intro() - //Set env - utils.SetEnv("STORAGE_PATH", storagePath) - utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION") - - //Get flag value and set env - remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH") - storage = utils.GetEnv(cmd, "storage", "STORAGE") - file = utils.GetEnv(cmd, "file", "FILE_NAME") - backupRetention, _ := cmd.Flags().GetInt("keep-last") - prune, _ := cmd.Flags().GetBool("prune") - disableCompression, _ = cmd.Flags().GetBool("disable-compression") - executionMode, _ = cmd.Flags().GetString("mode") - gpqPassphrase := os.Getenv("GPG_PASSPHRASE") - _ = utils.GetEnv(cmd, "path", "AWS_S3_PATH") - cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION") - - dbConf = getDbConfig(cmd) - - // - if gpqPassphrase != "" { - encryption = true - } - - //Generate file name - backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbConf.dbName, time.Now().Format("20060102_150405")) - if disableCompression { - backupFileName = fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20060102_150405")) - } - - if cronExpression == "" { - switch storage { - case "s3": - s3Backup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption) - case "local": - localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption) - case "ssh", "remote": - sshBackup(dbConf, backupFileName, remotePath, disableCompression, prune, backupRetention, encryption) - case "ftp": - utils.Fatal("Not supported storage type: %s", storage) - default: - localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption) - } + dbConf = initDbConfig(cmd) + //Initialize backup configs + config := initBackupConfig(cmd) + if config.cronExpression == "" { + BackupTask(dbConf, config) } else { - if utils.IsValidCronExpression(cronExpression) { - scheduledMode(dbConf, storage) + if utils.IsValidCronExpression(config.cronExpression) { + scheduledMode(dbConf, config) } else { - utils.Fatal("Cron expression is not valid: %s", cronExpression) + utils.Fatal("Cron expression is not valid: %s", config.cronExpression) } } } // Run in scheduled mode -func scheduledMode(db *dbConfig, storage string) { - - fmt.Println() - fmt.Println("**********************************") - fmt.Println(" Starting MySQL Bkup... ") - fmt.Println("***********************************") +func scheduledMode(db *dbConfig, config *BackupConfig) { utils.Info("Running in Scheduled mode") - utils.Info("Execution period %s", os.Getenv("BACKUP_CRON_EXPRESSION")) + utils.Info("Backup cron expression: %s", os.Getenv("BACKUP_CRON_EXPRESSION")) utils.Info("Storage type %s ", storage) //Test database connexion testDatabaseConnection(db) - utils.Info("Creating backup job...") - CreateCrontabScript(disableCompression, storage) + utils.Info("Creating a new cron instance...") + // Create a new cron instance + c := cron.New() - //Set BACKUP_CRON_EXPRESSION to nil - err := os.Setenv("BACKUP_CRON_EXPRESSION", "") - if err != nil { - return - } - - supervisorConfig := "/etc/supervisor/supervisord.conf" - - // Start Supervisor - cmd := exec.Command("supervisord", "-c", supervisorConfig) - err = cmd.Start() - if err != nil { - utils.Fatal(fmt.Sprintf("Failed to start supervisord: %v", err)) - } - utils.Info("Backup job started") - defer func() { - if err := cmd.Process.Kill(); err != nil { - utils.Info("Failed to kill supervisord process: %v", err) - } else { - utils.Info("Supervisor stopped.") - } - }() - if _, err := os.Stat(cronLogFile); os.IsNotExist(err) { - utils.Fatal(fmt.Sprintf("Log file %s does not exist.", cronLogFile)) - } - t, err := tail.TailFile(cronLogFile, tail.Config{Follow: true}) - if err != nil { - utils.Fatal("Failed to tail file: %v", err) - } - - // Read and print new lines from the log file - for line := range t.Lines { - fmt.Println(line.Text) - } + // Add a cron job that runs every 10 seconds + c.AddFunc(config.cronExpression, func() { + BackupTask(db, config) + }) + // Start the cron scheduler + c.Start() + utils.Info("Creating a new cron instance...done") + defer c.Stop() + select {} } -func intro() { - utils.Info("Starting MySQL Backup...") - utils.Info("Copyright © 2024 Jonas Kaninda ") +func BackupTask(db *dbConfig, config *BackupConfig) { + utils.Info("Starting backup task...") + switch config.storage { + case "s3": + s3Backup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption) + case "local": + localBackup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption) + case "ssh", "remote": + sshBackup(db, config.backupFileName, config.remotePath, config.disableCompression, config.prune, config.backupRetention, config.encryption) + case "ftp": + utils.Fatal("Not supported storage type: %s", config.storage) + default: + localBackup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption) + } } // BackupDatabase backup database diff --git a/pkg/config.go b/pkg/config.go index af0f10f..5c109ab 100644 --- a/pkg/config.go +++ b/pkg/config.go @@ -7,9 +7,11 @@ package pkg import ( + "fmt" "github.com/jkaninda/mysql-bkup/utils" "github.com/spf13/cobra" "os" + "time" ) type Config struct { @@ -30,7 +32,27 @@ type targetDbConfig struct { targetDbName string } -func getDbConfig(cmd *cobra.Command) *dbConfig { +type BackupConfig struct { + backupFileName string + backupRetention int + disableCompression bool + prune bool + encryption bool + remotePath string + gpqPassphrase string + storage string + cronExpression string +} +type RestoreConfig struct { + s3Path string + remotePath string + storage string + file string + bucket string + gpqPassphrase string +} + +func initDbConfig(cmd *cobra.Command) *dbConfig { //Set env utils.GetEnv(cmd, "dbname", "DB_NAME") dConf := dbConfig{} @@ -47,7 +69,66 @@ func getDbConfig(cmd *cobra.Command) *dbConfig { } return &dConf } -func getTargetDbConfig() *targetDbConfig { +func initBackupConfig(cmd *cobra.Command) *BackupConfig { + utils.SetEnv("STORAGE_PATH", storagePath) + utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION") + + //Get flag value and set env + remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH") + storage = utils.GetEnv(cmd, "storage", "STORAGE") + backupRetention, _ := cmd.Flags().GetInt("keep-last") + prune, _ := cmd.Flags().GetBool("prune") + disableCompression, _ = cmd.Flags().GetBool("disable-compression") + _, _ = cmd.Flags().GetString("mode") + gpqPassphrase := os.Getenv("GPG_PASSPHRASE") + _ = utils.GetEnv(cmd, "path", "AWS_S3_PATH") + cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION") + + if gpqPassphrase != "" { + encryption = true + } + //Generate file name + backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbConf.dbName, time.Now().Format("20240102_150405")) + if disableCompression { + backupFileName = fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20240102_150405")) + } + + //Initialize backup configs + config := BackupConfig{} + config.backupFileName = backupFileName + config.backupRetention = backupRetention + config.disableCompression = disableCompression + config.prune = prune + config.storage = storage + config.encryption = encryption + config.remotePath = remotePath + config.gpqPassphrase = gpqPassphrase + config.cronExpression = cronExpression + return &config +} +func initRestoreConfig(cmd *cobra.Command) *RestoreConfig { + utils.SetEnv("STORAGE_PATH", storagePath) + + //Get flag value and set env + s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH") + remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH") + storage = utils.GetEnv(cmd, "storage", "STORAGE") + file = utils.GetEnv(cmd, "file", "FILE_NAME") + _, _ = cmd.Flags().GetString("mode") + bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") + gpqPassphrase := os.Getenv("GPG_PASSPHRASE") + //Initialize restore configs + rConfig := RestoreConfig{} + rConfig.s3Path = s3Path + rConfig.remotePath = remotePath + rConfig.storage = storage + rConfig.bucket = bucket + rConfig.file = file + rConfig.storage = storage + rConfig.gpqPassphrase = gpqPassphrase + return &rConfig +} +func initTargetDbConfig() *targetDbConfig { tdbConfig := targetDbConfig{} tdbConfig.targetDbHost = os.Getenv("TARGET_DB_HOST") tdbConfig.targetDbPort = os.Getenv("TARGET_DB_PORT") diff --git a/pkg/helper.go b/pkg/helper.go index 425b439..dcf3ecc 100644 --- a/pkg/helper.go +++ b/pkg/helper.go @@ -125,3 +125,7 @@ func testDatabaseConnection(db *dbConfig) { utils.Info("Successfully connected to %s database", db.dbName) } +func intro() { + utils.Info("Starting MySQL Backup...") + utils.Info("Copyright © 2024 Jonas Kaninda ") +} diff --git a/pkg/migrate.go b/pkg/migrate.go index 27e705f..7f52148 100644 --- a/pkg/migrate.go +++ b/pkg/migrate.go @@ -17,8 +17,8 @@ func StartMigration(cmd *cobra.Command) { intro() utils.Info("Starting database migration...") //Get DB config - dbConf = getDbConfig(cmd) - targetDbConf = getTargetDbConfig() + dbConf = initDbConfig(cmd) + targetDbConf = initTargetDbConfig() //Defining the target database variables newDbConfig := dbConfig{} diff --git a/pkg/restore.go b/pkg/restore.go index 16bd321..2a07529 100644 --- a/pkg/restore.go +++ b/pkg/restore.go @@ -17,33 +17,24 @@ import ( func StartRestore(cmd *cobra.Command) { intro() - //Set env - utils.SetEnv("STORAGE_PATH", storagePath) - - //Get flag value and set env - s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH") - remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH") - storage = utils.GetEnv(cmd, "storage", "STORAGE") - file = utils.GetEnv(cmd, "file", "FILE_NAME") - executionMode, _ = cmd.Flags().GetString("mode") - bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") - dbConf = getDbConfig(cmd) + dbConf = initDbConfig(cmd) + restoreConf := initRestoreConfig(cmd) switch storage { case "s3": - restoreFromS3(dbConf, file, bucket, s3Path) + restoreFromS3(dbConf, restoreConf.file, restoreConf.bucket, restoreConf.s3Path) case "local": utils.Info("Restore database from local") - copyToTmp(storagePath, file) - RestoreDatabase(dbConf, file) + copyToTmp(storagePath, restoreConf.file) + RestoreDatabase(dbConf, restoreConf.file) case "ssh": - restoreFromRemote(dbConf, file, remotePath) + restoreFromRemote(dbConf, restoreConf.file, restoreConf.remotePath) case "ftp": utils.Fatal("Restore from FTP is not yet supported") default: utils.Info("Restore database from local") - copyToTmp(storagePath, file) - RestoreDatabase(dbConf, file) + copyToTmp(storagePath, restoreConf.file) + RestoreDatabase(dbConf, restoreConf.file) } } diff --git a/pkg/var.go b/pkg/var.go index 837c8ec..fb9df35 100644 --- a/pkg/var.go +++ b/pkg/var.go @@ -16,7 +16,6 @@ const gpgExtension = "gpg" var ( storage = "local" file = "" - executionMode = "default" storagePath = "/backup" disableCompression = false encryption = false From b39e97b77d8f7eec768618d521e1bb5bfd3f1f86 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sat, 28 Sep 2024 08:01:33 +0200 Subject: [PATCH 3/7] refactor: clean up project, delete unused files, variables --- docker/Dockerfile | 6 ++-- pkg/backup.go | 12 ++++---- pkg/scripts.go | 71 ----------------------------------------------- utils/constant.go | 8 +++--- 4 files changed, 13 insertions(+), 84 deletions(-) delete mode 100644 pkg/scripts.go diff --git a/docker/Dockerfile b/docker/Dockerfile index b87fc10..389b3d2 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -36,7 +36,7 @@ ENV TARGET_DB_NAME="localhost" ENV TARGET_DB_USERNAME="" ENV TARGET_DB_PASSWORD="" ARG DEBIAN_FRONTEND=noninteractive -ENV VERSION="v1.2.7" +ENV VERSION="v1.2.8" ENV BACKUP_CRON_EXPRESSION="" ENV TG_TOKEN="" ENV TG_CHAT_ID="" @@ -48,7 +48,7 @@ ARG BACKUP_CRON_SCRIPT="/usr/local/bin/backup_cron.sh" LABEL author="Jonas Kaninda" RUN apt-get update -qq -RUN apt install mysql-client supervisor cron gnupg -y +RUN apt install mysql-client cron gnupg -y # Clear cache RUN apt-get clean && rm -rf /var/lib/apt/lists/* @@ -69,8 +69,6 @@ RUN chmod +x /usr/local/bin/mysql-bkup RUN ln -s /usr/local/bin/mysql-bkup /usr/local/bin/bkup -ADD docker/supervisord.conf /etc/supervisor/supervisord.conf - # Create backup script and make it executable RUN echo '#!/bin/sh\n/usr/local/bin/mysql-bkup backup "$@"' > /usr/local/bin/backup && \ chmod +x /usr/local/bin/backup diff --git a/pkg/backup.go b/pkg/backup.go index f7bb3c0..c95a780 100644 --- a/pkg/backup.go +++ b/pkg/backup.go @@ -38,23 +38,25 @@ func StartBackup(cmd *cobra.Command) { // Run in scheduled mode func scheduledMode(db *dbConfig, config *BackupConfig) { utils.Info("Running in Scheduled mode") - utils.Info("Backup cron expression: %s", os.Getenv("BACKUP_CRON_EXPRESSION")) + utils.Info("Backup cron expression: %s", config.cronExpression) utils.Info("Storage type %s ", storage) //Test database connexion testDatabaseConnection(db) - utils.Info("Creating a new cron instance...") + utils.Info("Creating cron instance...") // Create a new cron instance c := cron.New() - // Add a cron job that runs every 10 seconds - c.AddFunc(config.cronExpression, func() { + _, err := c.AddFunc(config.cronExpression, func() { BackupTask(db, config) }) + if err != nil { + return + } // Start the cron scheduler c.Start() - utils.Info("Creating a new cron instance...done") + utils.Info("Creating cron instance...done") defer c.Stop() select {} } diff --git a/pkg/scripts.go b/pkg/scripts.go deleted file mode 100644 index cac17a8..0000000 --- a/pkg/scripts.go +++ /dev/null @@ -1,71 +0,0 @@ -// Package pkg / -/***** -@author Jonas Kaninda -@license MIT License -@Copyright © 2024 Jonas Kaninda -**/ -package pkg - -import ( - "fmt" - "github.com/jkaninda/mysql-bkup/utils" - "os" - "os/exec" -) - -func CreateCrontabScript(disableCompression bool, storage string) { - //task := "/usr/local/bin/backup_cron.sh" - touchCmd := exec.Command("touch", backupCronFile) - if err := touchCmd.Run(); err != nil { - utils.Fatal("Error creating file %s: %v\n", backupCronFile, err) - } - var disableC = "" - if disableCompression { - disableC = "--disable-compression" - } - - scriptContent := fmt.Sprintf(`#!/usr/bin/env bash -set -e -/usr/local/bin/mysql-bkup backup --dbname %s --storage %s %v -`, os.Getenv("DB_NAME"), storage, disableC) - - if err := utils.WriteToFile(backupCronFile, scriptContent); err != nil { - utils.Fatal("Error writing to %s: %v\n", backupCronFile, err) - } - - chmodCmd := exec.Command("chmod", "+x", "/usr/local/bin/backup_cron.sh") - if err := chmodCmd.Run(); err != nil { - utils.Fatal("Error changing permissions of %s: %v\n", backupCronFile, err) - } - - lnCmd := exec.Command("ln", "-s", "/usr/local/bin/backup_cron.sh", "/usr/local/bin/backup_cron") - if err := lnCmd.Run(); err != nil { - utils.Fatal("Error creating symbolic link: %v\n", err) - - } - - touchLogCmd := exec.Command("touch", cronLogFile) - if err := touchLogCmd.Run(); err != nil { - utils.Fatal("Error creating file %s: %v\n", cronLogFile, err) - } - - cronJob := "/etc/cron.d/backup_cron" - touchCronCmd := exec.Command("touch", cronJob) - if err := touchCronCmd.Run(); err != nil { - utils.Fatal("Error creating file %s: %v\n", cronJob, err) - } - - cronContent := fmt.Sprintf(`%s root exec /bin/bash -c ". /run/supervisord.env; /usr/local/bin/backup_cron.sh >> %s" -`, os.Getenv("BACKUP_CRON_EXPRESSION"), cronLogFile) - - if err := utils.WriteToFile(cronJob, cronContent); err != nil { - utils.Fatal("Error writing to %s: %v\n", cronJob, err) - } - utils.ChangePermission("/etc/cron.d/backup_cron", 0644) - - crontabCmd := exec.Command("crontab", "/etc/cron.d/backup_cron") - if err := crontabCmd.Run(); err != nil { - utils.Fatal("Error updating crontab: ", err) - } - utils.Info("Backup job created.") -} diff --git a/utils/constant.go b/utils/constant.go index 84421e5..9a7183f 100644 --- a/utils/constant.go +++ b/utils/constant.go @@ -7,10 +7,10 @@ package utils const RestoreExample = "mysql-bkup restore --dbname database --file db_20231219_022941.sql.gz\n" + - "bkup restore --dbname database --storage s3 --path /custom-path --file db_20231219_022941.sql.gz" + "restore --dbname database --storage s3 --path /custom-path --file db_20231219_022941.sql.gz" const BackupExample = "mysql-bkup backup --dbname database --disable-compression\n" + - "mysql-bkup backup --dbname database --storage s3 --path /custom-path --disable-compression" + "backup --dbname database --storage s3 --path /custom-path --disable-compression" const MainExample = "mysql-bkup backup --dbname database --disable-compression\n" + - "mysql-bkup backup --dbname database --storage s3 --path /custom-path\n" + - "mysql-bkup restore --dbname database --file db_20231219_022941.sql.gz" + "backup --dbname database --storage s3 --path /custom-path\n" + + "restore --dbname database --file db_20231219_022941.sql.gz" From 050f5e81bc96cf1aff09a20ba1041ad11295b09c Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sat, 28 Sep 2024 08:30:53 +0200 Subject: [PATCH 4/7] docs: update scheduled mode deployment --- docs/how-tos/backup-to-s3.md | 5 +++-- docs/how-tos/backup-to-ssh.md | 5 +++-- docs/how-tos/backup.md | 5 +++-- docs/reference/index.md | 25 +++++++++++++++++++++---- 4 files changed, 30 insertions(+), 10 deletions(-) diff --git a/docs/how-tos/backup-to-s3.md b/docs/how-tos/backup-to-s3.md index 9277a48..cd62461 100644 --- a/docs/how-tos/backup-to-s3.md +++ b/docs/how-tos/backup-to-s3.md @@ -48,7 +48,7 @@ networks: ### Recurring backups to S3 As explained above, you need just to add AWS environment variables and specify the storage type `--storage s3`. -In case you need to use recurring backups, you can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below. +In case you need to use recurring backups, you can use `--cron-expression "0 1 * * *"` flag or `BACKUP_CRON_EXPRESSION=0 1 * * *` as described below. ```yml services: @@ -59,7 +59,7 @@ services: # for a list of available releases. image: jkaninda/mysql-bkup container_name: mysql-bkup - command: backup --storage s3 -d my-database --mode scheduled --period "0 1 * * *" + command: backup --storage s3 -d my-database --cron-expression "0 1 * * *" environment: - DB_PORT=3306 - DB_HOST=mysql @@ -72,6 +72,7 @@ services: - AWS_REGION="us-west-2" - AWS_ACCESS_KEY=xxxx - AWS_SECRET_KEY=xxxxx + # - BACKUP_CRON_EXPRESSION=0 1 * * * # Optional ## In case you are using S3 alternative such as Minio and your Minio instance is not secured, you change it to true - AWS_DISABLE_SSL="false" # mysql-bkup container must be connected to the same network with your database diff --git a/docs/how-tos/backup-to-ssh.md b/docs/how-tos/backup-to-ssh.md index 4f413f1..484b47e 100644 --- a/docs/how-tos/backup-to-ssh.md +++ b/docs/how-tos/backup-to-ssh.md @@ -52,7 +52,7 @@ networks: ### Recurring backups to SSH remote server As explained above, you need just to add required environment variables and specify the storage type `--storage ssh`. -You can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below. +You can use `--cron-expression "* * * * *"` or `BACKUP_CRON_EXPRESSION=0 1 * * *` as described below. ```yml services: @@ -63,7 +63,7 @@ services: # for a list of available releases. image: jkaninda/mysql-bkup container_name: mysql-bkup - command: backup -d database --storage ssh --mode scheduled --period "0 1 * * *" + command: backup -d database --storage ssh --cron-expression "0 1 * * *" volumes: - ./id_ed25519:/tmp/id_ed25519" environment: @@ -78,6 +78,7 @@ services: - SSH_USER=user - SSH_REMOTE_PATH=/home/jkaninda/backups - SSH_IDENTIFY_FILE=/tmp/id_ed25519 + # - BACKUP_CRON_EXPRESSION=0 1 * * * # Optional ## We advise you to use a private jey instead of password #- SSH_PASSWORD=password # mysql-bkup container must be connected to the same network with your database diff --git a/docs/how-tos/backup.md b/docs/how-tos/backup.md index 1c7a7bb..eca46c0 100644 --- a/docs/how-tos/backup.md +++ b/docs/how-tos/backup.md @@ -54,7 +54,7 @@ networks: jkaninda/mysql-bkup backup -d database_name ``` -In case you need to use recurring backups, you can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below. +In case you need to use recurring backups, you can use `--cron-expression "0 1 * * *"` flag or `BACKUP_CRON_EXPRESSION=0 1 * * *` as described below. ```yml services: @@ -65,7 +65,7 @@ services: # for a list of available releases. image: jkaninda/mysql-bkup container_name: mysql-bkup - command: backup -d database --mode scheduled --period "0 1 * * *" + command: backup -d database --cron-expression "0 1 * * *" volumes: - ./backup:/backup environment: @@ -74,6 +74,7 @@ services: - DB_NAME=database - DB_USERNAME=username - DB_PASSWORD=password + - BACKUP_CRON_EXPRESSION=0 1 * * * # mysql-bkup container must be connected to the same network with your database networks: - web diff --git a/docs/reference/index.md b/docs/reference/index.md index efa683e..02e80a5 100644 --- a/docs/reference/index.md +++ b/docs/reference/index.md @@ -25,11 +25,10 @@ Backup, restore and migrate targets, schedule and retention are configured using | --path | | AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup` | | --dbname | -d | Database name | | --port | -p | Database port (default: 3306) | -| --mode | -m | Execution mode. default or scheduled (default: default) | | --disable-compression | | Disable database backup compression | | --prune | | Delete old backup, default disabled | | --keep-last | | Delete old backup created more than specified days ago, default 7 days | -| --period | | Crontab period for scheduled mode only. (default: "0 1 * * *") | +| --cron-expression | | Backup cron expression, eg: (* * * * *) or @daily | | --help | -h | Print this help message and exit | | --version | -V | Print version information and exit | @@ -68,7 +67,7 @@ Backup, restore and migrate targets, schedule and retention are configured using ## Run in Scheduled mode This image can be run as CronJob in Kubernetes for a regular backup which makes deployment on Kubernetes easy as Kubernetes has CronJob resources. -For Docker, you need to run it in scheduled mode by adding `--mode scheduled` flag and specify the periodical backup time by adding `--period "0 1 * * *"` flag. +For Docker, you need to run it in scheduled mode by adding `--cron-expression "* * * * *"` flag or by defining `BACKUP_CRON_EXPRESSION=0 1 * * *` environment variable. ## Syntax of crontab (field description) @@ -110,4 +109,22 @@ Easy to remember format: ```conf 0 1 * * * -``` \ No newline at end of file +``` +## Predefined schedules +You may use one of several pre-defined schedules in place of a cron expression. + +| Entry | Description | Equivalent To | +|------------------------|--------------------------------------------|---------------| +| @yearly (or @annually) | Run once a year, midnight, Jan. 1st | 0 0 1 1 * | +| @monthly | Run once a month, midnight, first of month | 0 0 1 * * | +| @weekly | Run once a week, midnight between Sat/Sun | 0 0 * * 0 | +| @daily (or @midnight) | Run once a day, midnight | 0 0 * * * | +| @hourly | Run once an hour, beginning of hour | 0 * * * * | + +### Intervals +You may also schedule a job to execute at fixed intervals, starting at the time it's added or cron is run. This is supported by formatting the cron spec like this: + +@every +where "duration" is a string accepted by time. + +For example, "@every 1h30m10s" would indicate a schedule that activates after 1 hour, 30 minutes, 10 seconds, and then every interval after that. \ No newline at end of file From 7912ce46edb4bcc9638482e5c29feadb034e6ec4 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sat, 28 Sep 2024 08:32:04 +0200 Subject: [PATCH 5/7] chore: add cron-expression to get value from flag --- pkg/config.go | 1 + 1 file changed, 1 insertion(+) diff --git a/pkg/config.go b/pkg/config.go index 5c109ab..1592705 100644 --- a/pkg/config.go +++ b/pkg/config.go @@ -71,6 +71,7 @@ func initDbConfig(cmd *cobra.Command) *dbConfig { } func initBackupConfig(cmd *cobra.Command) *BackupConfig { utils.SetEnv("STORAGE_PATH", storagePath) + utils.GetEnv(cmd, "cron-expression", "BACKUP_CRON_EXPRESSION") utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION") //Get flag value and set env From e97fc7512ab1c3c833a4c0e17e62a3628ad28dbc Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sat, 28 Sep 2024 09:18:58 +0200 Subject: [PATCH 6/7] fix: generate backup file name in scheduled mode --- pkg/backup.go | 9 ++++++++- pkg/config.go | 9 --------- pkg/restore.go | 2 +- utils/utils.go | 2 +- 4 files changed, 10 insertions(+), 12 deletions(-) diff --git a/pkg/backup.go b/pkg/backup.go index c95a780..6729754 100644 --- a/pkg/backup.go +++ b/pkg/backup.go @@ -15,6 +15,7 @@ import ( "os" "os/exec" "path/filepath" + "time" ) func StartBackup(cmd *cobra.Command) { @@ -39,7 +40,7 @@ func StartBackup(cmd *cobra.Command) { func scheduledMode(db *dbConfig, config *BackupConfig) { utils.Info("Running in Scheduled mode") utils.Info("Backup cron expression: %s", config.cronExpression) - utils.Info("Storage type %s ", storage) + utils.Info("Storage type %s ", config.storage) //Test database connexion testDatabaseConnection(db) @@ -62,6 +63,12 @@ func scheduledMode(db *dbConfig, config *BackupConfig) { } func BackupTask(db *dbConfig, config *BackupConfig) { utils.Info("Starting backup task...") + //Generate backup file name + backupFileName := fmt.Sprintf("%s_%s.sql.gz", db.dbName, time.Now().Format("20240102_150405")) + if config.disableCompression { + backupFileName = fmt.Sprintf("%s_%s.sql", db.dbName, time.Now().Format("20240102_150405")) + } + config.backupFileName = backupFileName switch config.storage { case "s3": s3Backup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption) diff --git a/pkg/config.go b/pkg/config.go index 1592705..6c1f6b2 100644 --- a/pkg/config.go +++ b/pkg/config.go @@ -7,11 +7,9 @@ package pkg import ( - "fmt" "github.com/jkaninda/mysql-bkup/utils" "github.com/spf13/cobra" "os" - "time" ) type Config struct { @@ -88,15 +86,8 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig { if gpqPassphrase != "" { encryption = true } - //Generate file name - backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbConf.dbName, time.Now().Format("20240102_150405")) - if disableCompression { - backupFileName = fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20240102_150405")) - } - //Initialize backup configs config := BackupConfig{} - config.backupFileName = backupFileName config.backupRetention = backupRetention config.disableCompression = disableCompression config.prune = prune diff --git a/pkg/restore.go b/pkg/restore.go index 2a07529..6c0d72a 100644 --- a/pkg/restore.go +++ b/pkg/restore.go @@ -20,7 +20,7 @@ func StartRestore(cmd *cobra.Command) { dbConf = initDbConfig(cmd) restoreConf := initRestoreConfig(cmd) - switch storage { + switch restoreConf.storage { case "s3": restoreFromS3(dbConf, restoreConf.file, restoreConf.bucket, restoreConf.s3Path) case "local": diff --git a/utils/utils.go b/utils/utils.go index caf07ed..f347e82 100644 --- a/utils/utils.go +++ b/utils/utils.go @@ -240,7 +240,7 @@ func NotifyError(error string) { //Telegram notification err := CheckEnvVars(vars) if err == nil { - message := "[🔴MySQL Backup ]\n" + + message := "[🔴 MySQL Backup ]\n" + "An error occurred during database backup \n" + "Error: " + error sendMessage(message) From 4b2527f4163c30956b4a0e1be93f43f2fc059510 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sat, 28 Sep 2024 09:43:51 +0200 Subject: [PATCH 7/7] chore: define gpg home directory --- docs/how-tos/encrypt-backup.md | 5 ++- docs/reference/index.md | 58 +++++++++++++++++----------------- pkg/encrypt.go | 4 +-- pkg/var.go | 2 +- 4 files changed, 36 insertions(+), 33 deletions(-) diff --git a/docs/how-tos/encrypt-backup.md b/docs/how-tos/encrypt-backup.md index d136fb5..dded0f7 100644 --- a/docs/how-tos/encrypt-backup.md +++ b/docs/how-tos/encrypt-backup.md @@ -9,8 +9,11 @@ nav_order: 7 The image supports encrypting backups using GPG out of the box. In case a `GPG_PASSPHRASE` environment variable is set, the backup archive will be encrypted using the given key and saved as a sql.gpg file instead or sql.gz.gpg. {: .warning } -To restore an encrypted backup, you need to provide the same GPG passphrase used during backup process. +To restore an encrypted backup, you need to provide the same GPG passphrase or key used during backup process. +- GPG home directory `/config/gnupg` +- Cipher algorithm `aes256` +- To decrypt manually, you need to install `gnupg` ### Decrypt backup diff --git a/docs/reference/index.md b/docs/reference/index.md index 02e80a5..9ad5e39 100644 --- a/docs/reference/index.md +++ b/docs/reference/index.md @@ -34,35 +34,35 @@ Backup, restore and migrate targets, schedule and retention are configured using ## Environment variables -| Name | Requirement | Description | -|------------------------|----------------------------------------------------|------------------------------------------------------| -| DB_PORT | Optional, default 3306 | Database port number | -| DB_HOST | Required | Database host | -| DB_NAME | Optional if it was provided from the -d flag | Database name | -| DB_USERNAME | Required | Database user name | -| DB_PASSWORD | Required | Database password | -| AWS_ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key | -| AWS_SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key | -| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name | -| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name | -| AWS_REGION | Optional, required for S3 storage | AWS Region | -| AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL | -| FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) | -| BACKUP_CRON_EXPRESSION | Optional if it was provided from the --period flag | Backup cron expression for docker in scheduled mode | -| GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase | -| SSH_HOST_NAME | Optional, required for SSH storage | ssh remote hostname or ip | -| SSH_USER | Optional, required for SSH storage | ssh remote user | -| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password | -| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key | -| SSH_PORT | Optional, required for SSH storage | ssh remote server port | -| SSH_REMOTE_PATH | Optional, required for SSH storage | ssh remote path (/home/toto/backup) | -| TARGET_DB_HOST | Optional, required for database migration | Target database host | -| TARGET_DB_PORT | Optional, required for database migration | Target database port | -| TARGET_DB_NAME | Optional, required for database migration | Target database name | -| TARGET_DB_USERNAME | Optional, required for database migration | Target database username | -| TARGET_DB_PASSWORD | Optional, required for database migration | Target database password | -| TG_TOKEN | Optional, required for Telegram notification | Telegram token | -| TG_CHAT_ID | Optional, required for Telegram notification | Telegram Chat ID | +| Name | Requirement | Description | +|------------------------|--------------------------------------------------------------|------------------------------------------------------| +| DB_PORT | Optional, default 3306 | Database port number | +| DB_HOST | Required | Database host | +| DB_NAME | Optional if it was provided from the -d flag | Database name | +| DB_USERNAME | Required | Database user name | +| DB_PASSWORD | Required | Database password | +| AWS_ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key | +| AWS_SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key | +| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name | +| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name | +| AWS_REGION | Optional, required for S3 storage | AWS Region | +| AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL | +| FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) | +| BACKUP_CRON_EXPRESSION | Optional if it was provided from the --cron-expression flag | Backup cron expression for docker in scheduled mode | +| GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase | +| SSH_HOST_NAME | Optional, required for SSH storage | ssh remote hostname or ip | +| SSH_USER | Optional, required for SSH storage | ssh remote user | +| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password | +| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key | +| SSH_PORT | Optional, required for SSH storage | ssh remote server port | +| SSH_REMOTE_PATH | Optional, required for SSH storage | ssh remote path (/home/toto/backup) | +| TARGET_DB_HOST | Optional, required for database migration | Target database host | +| TARGET_DB_PORT | Optional, required for database migration | Target database port | +| TARGET_DB_NAME | Optional, required for database migration | Target database name | +| TARGET_DB_USERNAME | Optional, required for database migration | Target database username | +| TARGET_DB_PASSWORD | Optional, required for database migration | Target database password | +| TG_TOKEN | Optional, required for Telegram notification | Telegram token | +| TG_CHAT_ID | Optional, required for Telegram notification | Telegram Chat ID | --- ## Run in Scheduled mode diff --git a/pkg/encrypt.go b/pkg/encrypt.go index d096321..ddd85b7 100644 --- a/pkg/encrypt.go +++ b/pkg/encrypt.go @@ -16,7 +16,7 @@ import ( func Decrypt(inputFile string, passphrase string) error { utils.Info("Decrypting backup file: " + inputFile + " ...") //Create gpg home dir - err := utils.MakeDir(gpgHome) + err := utils.MakeDirAll(gpgHome) if err != nil { return err } @@ -37,7 +37,7 @@ func Decrypt(inputFile string, passphrase string) error { func Encrypt(inputFile string, passphrase string) error { utils.Info("Encrypting backup...") //Create gpg home dir - err := utils.MakeDir(gpgHome) + err := utils.MakeDirAll(gpgHome) if err != nil { return err } diff --git a/pkg/var.go b/pkg/var.go index fb9df35..f72226c 100644 --- a/pkg/var.go +++ b/pkg/var.go @@ -10,7 +10,7 @@ const cronLogFile = "/var/log/mysql-bkup.log" const tmpPath = "/tmp/backup" const backupCronFile = "/usr/local/bin/backup_cron.sh" const algorithm = "aes256" -const gpgHome = "gnupg" +const gpgHome = "/config/gnupg" const gpgExtension = "gpg" var (