chore: migrate backup scheduled mode from linux cron to go cron
This commit is contained in:
@@ -30,7 +30,8 @@ func init() {
|
||||
BackupCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3")
|
||||
BackupCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`")
|
||||
BackupCmd.PersistentFlags().StringP("mode", "m", "default", "Execution mode. default or scheduled")
|
||||
BackupCmd.PersistentFlags().StringP("period", "", "0 1 * * *", "Schedule period time")
|
||||
BackupCmd.PersistentFlags().StringP("period", "", "", "Schedule period time | Deprecated")
|
||||
BackupCmd.PersistentFlags().StringP("cron-expression", "", "", "Backup cron expression")
|
||||
BackupCmd.PersistentFlags().BoolP("prune", "", false, "Delete old backup, default disabled")
|
||||
BackupCmd.PersistentFlags().IntP("keep-last", "", 7, "Delete files created more than specified days ago, default 7 days")
|
||||
BackupCmd.PersistentFlags().BoolP("disable-compression", "", false, "Disable backup compression")
|
||||
|
||||
@@ -36,7 +36,7 @@ ENV TARGET_DB_NAME=""
|
||||
ENV TARGET_DB_USERNAME=""
|
||||
ENV TARGET_DB_PASSWORD=""
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ENV VERSION="v1.2.5"
|
||||
ENV VERSION="v1.2.8"
|
||||
ENV BACKUP_CRON_EXPRESSION=""
|
||||
ENV TG_TOKEN=""
|
||||
ENV TG_CHAT_ID=""
|
||||
@@ -50,7 +50,7 @@ LABEL author="Jonas Kaninda"
|
||||
|
||||
RUN apt-get update -qq
|
||||
|
||||
RUN apt install postgresql-client supervisor cron gnupg -y
|
||||
RUN apt install postgresql-client cron gnupg -y
|
||||
|
||||
# Clear cache
|
||||
RUN apt-get clean && rm -rf /var/lib/apt/lists/*
|
||||
@@ -71,7 +71,6 @@ RUN chmod +x /usr/local/bin/pg-bkup
|
||||
|
||||
RUN ln -s /usr/local/bin/pg-bkup /usr/local/bin/bkup
|
||||
|
||||
ADD docker/supervisord.conf /etc/supervisor/supervisord.conf
|
||||
# Create the backup script and make it executable
|
||||
RUN echo '#!/bin/sh\n/usr/local/bin/pg-bkup backup "$@"' > /usr/local/bin/backup && \
|
||||
chmod +x /usr/local/bin/backup
|
||||
|
||||
2
go.mod
2
go.mod
@@ -12,9 +12,9 @@ require (
|
||||
github.com/bramvdbogaerde/go-scp v1.5.0 // indirect
|
||||
github.com/hpcloud/tail v1.0.0 // indirect
|
||||
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
||||
github.com/robfig/cron/v3 v3.0.1 // indirect
|
||||
golang.org/x/crypto v0.18.0 // indirect
|
||||
golang.org/x/sys v0.22.0 // indirect
|
||||
gopkg.in/fsnotify.v1 v1.4.7 // indirect
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect
|
||||
)
|
||||
|
||||
|
||||
2
go.sum
2
go.sum
@@ -29,6 +29,8 @@ github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEp
|
||||
github.com/minio/minio-go/v7 v7.0.74 h1:fTo/XlPBTSpo3BAMshlwKL5RspXRv9us5UeHEGYCFe0=
|
||||
github.com/minio/minio-go/v7 v7.0.74/go.mod h1:qydcVzV8Hqtj1VtEocfxbmVFa2siu6HGa+LDEPogjD8=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
||||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
||||
github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc=
|
||||
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
|
||||
125
pkg/backup.go
125
pkg/backup.go
@@ -8,8 +8,8 @@ package pkg
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/hpcloud/tail"
|
||||
"github.com/jkaninda/pg-bkup/utils"
|
||||
"github.com/robfig/cron/v3"
|
||||
"github.com/spf13/cobra"
|
||||
"log"
|
||||
"os"
|
||||
@@ -20,104 +20,67 @@ import (
|
||||
|
||||
func StartBackup(cmd *cobra.Command) {
|
||||
intro()
|
||||
//Set env
|
||||
utils.SetEnv("STORAGE_PATH", storagePath)
|
||||
utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION")
|
||||
dbConf = initDbConfig(cmd)
|
||||
//Initialize backup configs
|
||||
config := initBackupConfig(cmd)
|
||||
|
||||
//Get flag value and set env
|
||||
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
|
||||
storage = utils.GetEnv(cmd, "storage", "STORAGE")
|
||||
file = utils.GetEnv(cmd, "file", "FILE_NAME")
|
||||
backupRetention, _ := cmd.Flags().GetInt("keep-last")
|
||||
prune, _ := cmd.Flags().GetBool("prune")
|
||||
disableCompression, _ = cmd.Flags().GetBool("disable-compression")
|
||||
executionMode, _ = cmd.Flags().GetString("mode")
|
||||
gpgPassphrase := os.Getenv("GPG_PASSPHRASE")
|
||||
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
|
||||
|
||||
dbConf = getDbConfig(cmd)
|
||||
|
||||
//
|
||||
if gpgPassphrase != "" {
|
||||
encryption = true
|
||||
}
|
||||
|
||||
//Generate file name
|
||||
backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbConf.dbName, time.Now().Format("20060102_150405"))
|
||||
if disableCompression {
|
||||
backupFileName = fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20060102_150405"))
|
||||
}
|
||||
|
||||
if executionMode == "default" {
|
||||
switch storage {
|
||||
case "s3":
|
||||
s3Backup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
|
||||
case "local":
|
||||
localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
|
||||
case "ssh", "remote":
|
||||
sshBackup(dbConf, backupFileName, remotePath, disableCompression, prune, backupRetention, encryption)
|
||||
case "ftp":
|
||||
utils.Fatal("Not supported storage type: %s", storage)
|
||||
default:
|
||||
localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
|
||||
}
|
||||
|
||||
} else if executionMode == "scheduled" {
|
||||
scheduledMode(dbConf, storage)
|
||||
if config.cronExpression == "" {
|
||||
BackupTask(dbConf, config)
|
||||
} else {
|
||||
utils.Fatal("Error, unknown execution mode!")
|
||||
if utils.IsValidCronExpression(config.cronExpression) {
|
||||
scheduledMode(dbConf, config)
|
||||
} else {
|
||||
utils.Fatal("Cron expression is not valid: %s", config.cronExpression)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Run in scheduled mode
|
||||
func scheduledMode(db *dbConfig, storage string) {
|
||||
|
||||
fmt.Println()
|
||||
fmt.Println("**********************************")
|
||||
fmt.Println(" Starting PostgreSQL Bkup... ")
|
||||
fmt.Println("***********************************")
|
||||
func scheduledMode(db *dbConfig, config *BackupConfig) {
|
||||
utils.Info("Running in Scheduled mode")
|
||||
utils.Info("Execution period %s ", os.Getenv("BACKUP_CRON_EXPRESSION"))
|
||||
utils.Info("Storage type %s ", storage)
|
||||
utils.Info("Backup cron expression: %s", config.cronExpression)
|
||||
utils.Info("Storage type %s ", config.storage)
|
||||
|
||||
//Test database connexion
|
||||
testDatabaseConnection(db)
|
||||
|
||||
utils.Info("Creating backup job...")
|
||||
CreateCrontabScript(disableCompression, storage)
|
||||
utils.Info("Creating cron job...")
|
||||
// Create a new cron instance
|
||||
c := cron.New()
|
||||
|
||||
supervisorConfig := "/etc/supervisor/supervisord.conf"
|
||||
|
||||
// Start Supervisor
|
||||
cmd := exec.Command("supervisord", "-c", supervisorConfig)
|
||||
err := cmd.Start()
|
||||
_, err := c.AddFunc(config.cronExpression, func() {
|
||||
BackupTask(db, config)
|
||||
})
|
||||
if err != nil {
|
||||
utils.Fatal("Failed to start supervisord: %v", err)
|
||||
return
|
||||
}
|
||||
utils.Info("Backup job started")
|
||||
|
||||
defer func() {
|
||||
if err := cmd.Process.Kill(); err != nil {
|
||||
utils.Info("Failed to kill supervisord process: %v", err)
|
||||
} else {
|
||||
utils.Info("Supervisor stopped.")
|
||||
// Start the cron scheduler
|
||||
c.Start()
|
||||
utils.Info("Creating cron job...done")
|
||||
defer c.Stop()
|
||||
select {}
|
||||
}
|
||||
}()
|
||||
|
||||
if _, err := os.Stat(cronLogFile); os.IsNotExist(err) {
|
||||
utils.Fatal(fmt.Sprintf("Log file %s does not exist.", cronLogFile))
|
||||
func BackupTask(db *dbConfig, config *BackupConfig) {
|
||||
utils.Info("Starting backup task...")
|
||||
//Generate file name
|
||||
backupFileName := fmt.Sprintf("%s_%s.sql.gz", db.dbName, time.Now().Format("20240102_150405"))
|
||||
if config.disableCompression {
|
||||
backupFileName = fmt.Sprintf("%s_%s.sql", db.dbName, time.Now().Format("20240102_150405"))
|
||||
}
|
||||
t, err := tail.TailFile(cronLogFile, tail.Config{Follow: true})
|
||||
if err != nil {
|
||||
utils.Fatal("Failed to tail file: %v", err)
|
||||
config.backupFileName = backupFileName
|
||||
switch config.storage {
|
||||
case "s3":
|
||||
s3Backup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption)
|
||||
case "local":
|
||||
localBackup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption)
|
||||
case "ssh", "remote":
|
||||
sshBackup(db, config.backupFileName, config.remotePath, config.disableCompression, config.prune, config.backupRetention, config.encryption)
|
||||
case "ftp":
|
||||
utils.Fatal("Not supported storage type: %s", config.storage)
|
||||
default:
|
||||
localBackup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption)
|
||||
}
|
||||
|
||||
// Read and print new lines from the log file
|
||||
for line := range t.Lines {
|
||||
fmt.Println(line.Text)
|
||||
}
|
||||
|
||||
}
|
||||
func intro() {
|
||||
utils.Info("Starting PostgreSQL Backup...")
|
||||
|
||||
@@ -33,8 +33,19 @@ type TgConfig struct {
|
||||
Token string
|
||||
ChatId string
|
||||
}
|
||||
type BackupConfig struct {
|
||||
backupFileName string
|
||||
backupRetention int
|
||||
disableCompression bool
|
||||
prune bool
|
||||
encryption bool
|
||||
remotePath string
|
||||
gpqPassphrase string
|
||||
storage string
|
||||
cronExpression string
|
||||
}
|
||||
|
||||
func getDbConfig(cmd *cobra.Command) *dbConfig {
|
||||
func initDbConfig(cmd *cobra.Command) *dbConfig {
|
||||
//Set env
|
||||
utils.GetEnv(cmd, "dbname", "DB_NAME")
|
||||
dConf := dbConfig{}
|
||||
@@ -51,7 +62,71 @@ func getDbConfig(cmd *cobra.Command) *dbConfig {
|
||||
}
|
||||
return &dConf
|
||||
}
|
||||
func getTargetDbConfig() *targetDbConfig {
|
||||
func initBackupConfig(cmd *cobra.Command) *BackupConfig {
|
||||
utils.SetEnv("STORAGE_PATH", storagePath)
|
||||
utils.GetEnv(cmd, "cron-expression", "BACKUP_CRON_EXPRESSION")
|
||||
utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION")
|
||||
|
||||
//Get flag value and set env
|
||||
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
|
||||
storage = utils.GetEnv(cmd, "storage", "STORAGE")
|
||||
backupRetention, _ := cmd.Flags().GetInt("keep-last")
|
||||
prune, _ := cmd.Flags().GetBool("prune")
|
||||
disableCompression, _ = cmd.Flags().GetBool("disable-compression")
|
||||
executionMode, _ = cmd.Flags().GetString("mode")
|
||||
gpqPassphrase := os.Getenv("GPG_PASSPHRASE")
|
||||
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
|
||||
cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION")
|
||||
|
||||
if gpqPassphrase != "" {
|
||||
encryption = true
|
||||
}
|
||||
|
||||
//Initialize backup configs
|
||||
config := BackupConfig{}
|
||||
config.backupRetention = backupRetention
|
||||
config.disableCompression = disableCompression
|
||||
config.prune = prune
|
||||
config.storage = storage
|
||||
config.encryption = encryption
|
||||
config.remotePath = remotePath
|
||||
config.gpqPassphrase = gpqPassphrase
|
||||
config.cronExpression = cronExpression
|
||||
return &config
|
||||
}
|
||||
|
||||
type RestoreConfig struct {
|
||||
s3Path string
|
||||
remotePath string
|
||||
storage string
|
||||
file string
|
||||
bucket string
|
||||
gpqPassphrase string
|
||||
}
|
||||
|
||||
func initRestoreConfig(cmd *cobra.Command) *RestoreConfig {
|
||||
utils.SetEnv("STORAGE_PATH", storagePath)
|
||||
|
||||
//Get flag value and set env
|
||||
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
|
||||
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
|
||||
storage = utils.GetEnv(cmd, "storage", "STORAGE")
|
||||
file = utils.GetEnv(cmd, "file", "FILE_NAME")
|
||||
_, _ = cmd.Flags().GetString("mode")
|
||||
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
|
||||
gpqPassphrase := os.Getenv("GPG_PASSPHRASE")
|
||||
//Initialize restore configs
|
||||
rConfig := RestoreConfig{}
|
||||
rConfig.s3Path = s3Path
|
||||
rConfig.remotePath = remotePath
|
||||
rConfig.storage = storage
|
||||
rConfig.bucket = bucket
|
||||
rConfig.file = file
|
||||
rConfig.storage = storage
|
||||
rConfig.gpqPassphrase = gpqPassphrase
|
||||
return &rConfig
|
||||
}
|
||||
func initTargetDbConfig() *targetDbConfig {
|
||||
tdbConfig := targetDbConfig{}
|
||||
tdbConfig.targetDbHost = os.Getenv("TARGET_DB_HOST")
|
||||
tdbConfig.targetDbPort = os.Getenv("TARGET_DB_PORT")
|
||||
|
||||
@@ -17,8 +17,8 @@ func StartMigration(cmd *cobra.Command) {
|
||||
intro()
|
||||
utils.Info("Starting database migration...")
|
||||
//Get DB config
|
||||
dbConf = getDbConfig(cmd)
|
||||
targetDbConf = getTargetDbConfig()
|
||||
dbConf = initDbConfig(cmd)
|
||||
targetDbConf = initTargetDbConfig()
|
||||
|
||||
//Defining the target database variables
|
||||
newDbConfig := dbConfig{}
|
||||
|
||||
@@ -17,34 +17,24 @@ import (
|
||||
|
||||
func StartRestore(cmd *cobra.Command) {
|
||||
intro()
|
||||
//Set env
|
||||
utils.SetEnv("STORAGE_PATH", storagePath)
|
||||
dbConf = initDbConfig(cmd)
|
||||
restoreConf := initRestoreConfig(cmd)
|
||||
|
||||
//Get flag value and set env
|
||||
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
|
||||
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
|
||||
storage = utils.GetEnv(cmd, "storage", "STORAGE")
|
||||
file = utils.GetEnv(cmd, "file", "FILE_NAME")
|
||||
executionMode, _ = cmd.Flags().GetString("mode")
|
||||
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
|
||||
|
||||
dbConf = getDbConfig(cmd)
|
||||
|
||||
switch storage {
|
||||
switch restoreConf.storage {
|
||||
case "s3":
|
||||
restoreFromS3(dbConf, file, bucket, s3Path)
|
||||
restoreFromS3(dbConf, restoreConf.file, restoreConf.bucket, restoreConf.s3Path)
|
||||
case "local":
|
||||
utils.Info("Restore database from local")
|
||||
copyToTmp(storagePath, file)
|
||||
RestoreDatabase(dbConf, file)
|
||||
copyToTmp(storagePath, restoreConf.file)
|
||||
RestoreDatabase(dbConf, restoreConf.file)
|
||||
case "ssh":
|
||||
restoreFromRemote(dbConf, file, remotePath)
|
||||
restoreFromRemote(dbConf, restoreConf.file, restoreConf.remotePath)
|
||||
case "ftp":
|
||||
utils.Fatal("Restore from FTP is not yet supported")
|
||||
default:
|
||||
utils.Info("Restore database from local")
|
||||
copyToTmp(storagePath, file)
|
||||
RestoreDatabase(dbConf, file)
|
||||
copyToTmp(storagePath, restoreConf.file)
|
||||
RestoreDatabase(dbConf, restoreConf.file)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/robfig/cron/v3"
|
||||
"github.com/spf13/cobra"
|
||||
"io"
|
||||
"io/fs"
|
||||
@@ -248,3 +249,7 @@ func getTgUrl() string {
|
||||
return fmt.Sprintf("https://api.telegram.org/bot%s", os.Getenv("TG_TOKEN"))
|
||||
|
||||
}
|
||||
func IsValidCronExpression(cronExpr string) bool {
|
||||
_, err := cron.ParseStandard(cronExpr)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user