From 05a195e1ba59cf0a3ed45c3c597cbc63a4eabbb8 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Tue, 30 Jul 2024 07:02:18 +0200 Subject: [PATCH] Refactoring of code --- Makefile | 10 +++++----- docker/Dockerfile | 27 +++++++++++++++------------ pkg/backup.go | 10 +++++----- pkg/encrypt.go | 2 +- pkg/helper.go | 2 +- pkg/scripts.go | 2 +- utils/s3.go | 12 +++++++----- utils/utils.go | 44 ++++++++++++++++++++++++++++++++++++++++++-- 8 files changed, 77 insertions(+), 32 deletions(-) diff --git a/Makefile b/Makefile index 06ec18d..9720f2c 100644 --- a/Makefile +++ b/Makefile @@ -19,20 +19,20 @@ docker-build: docker-run: docker-build docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/pg-bkup bkup backup --prune --keep-last 2 docker-restore: docker-build - docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/pg-bkup bkup restore -f uzaraka_20240729_200543.sql.gz.gpg + docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/pg-bkup bkup restore -f ${FILE_NAME} docker-run-scheduled: docker-build - docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" jkaninda/pg-bkup bkup backup --mode scheduled --period "* * * * *" + docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/pg-bkup bkup backup --mode scheduled --period "* * * * *" docker-run-scheduled-s3: docker-build - docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" jkaninda/pg-bkup bkup backup --storage s3 --mode scheduled --path /custom-path --period "* * * * *" + docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/pg-bkup bkup backup --storage s3 --mode scheduled --path /custom-path --period "* * * * *" docker-run-s3: docker-build - docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" -e "REGION=eu2" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/pg-bkup bkup backup --storage s3 --path /custom-path + docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" -e "AWS_REGION=eu2" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/pg-bkup bkup backup --storage s3 --path /custom-path docker-restore-s3: docker-build - docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" -e "REGION=eu2" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/pg-bkup bkup restore --storage s3 --path /custom-path -f uzaraka_20240729_205710.sql.gz.gpg + docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" -e "AWS_REGION=eu2" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/pg-bkup bkup restore --storage s3 --path /custom-path -f $FILE_NAME diff --git a/docker/Dockerfile b/docker/Dockerfile index 356d7bf..16b4074 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -19,17 +19,21 @@ ENV STORAGE=local ENV BUCKET_NAME="" ENV ACCESS_KEY="" ENV SECRET_KEY="" -ENV REGION="" +ENV AWS_REGION="us-west-2" +ENV AWS_DISABLE_SSL="false" +ENV GPG_PASSPHRASE="" ENV SSH_USER="" ENV SSH_PASSWORD="" ENV SSH_HOST_NAME="" ENV SSH_IDENTIFY_FILE="/root/.ssh/id_rsa" -ENV GPG_PASS_PHRASE="" ENV SSH_PORT="22" ENV S3_ENDPOINT=https://s3.amazonaws.com ARG DEBIAN_FRONTEND=noninteractive -ENV VERSION="v0.6" -LABEL authors="Jonas Kaninda" +ENV VERSION="v0.8" +ARG WORKDIR="/app" +ARG BACKUPDIR="/backup" +ARG BACKUP_TMP_DIR="/tmp/backup" +LABEL author="Jonas Kaninda" RUN apt-get update -qq @@ -38,10 +42,12 @@ RUN apt install postgresql-client postgresql-client-common supervisor cron opens # Clear cache RUN apt-get clean && rm -rf /var/lib/apt/lists/* -RUN mkdir /s3mnt -RUN mkdir /tmp/s3cache -RUN chmod 777 /s3mnt -RUN chmod 777 /tmp/s3cache +RUN mkdir $WORKDIR +RUN mkdir $BACKUPDIR +RUN mkdir -p $BACKUP_TMP_DIR +RUN chmod 777 $WORKDIR +RUN chmod 777 $BACKUPDIR +RUN chmod 777 $BACKUP_TMP_DIR COPY --from=build /app/pg-bkup /usr/local/bin/pg-bkup RUN chmod +x /usr/local/bin/pg-bkup @@ -50,7 +56,4 @@ RUN ln -s /usr/local/bin/pg-bkup /usr/local/bin/bkup ADD docker/supervisord.conf /etc/supervisor/supervisord.conf - -RUN mkdir /backup -RUN mkdir /tmp/backup -WORKDIR /root +WORKDIR $WORKDIR diff --git a/pkg/backup.go b/pkg/backup.go index c4b72fb..33024d4 100644 --- a/pkg/backup.go +++ b/pkg/backup.go @@ -48,17 +48,14 @@ func StartBackup(cmd *cobra.Command) { if executionMode == "default" { switch storage { case "s3": - utils.Info("Backup database to s3 storage") s3Backup(backupFileName, s3Path, disableCompression, prune, backupRetention, encryption) case "local": - utils.Info("Backup database to local storage") localBackup(backupFileName, disableCompression, prune, backupRetention, encryption) case "ssh": fmt.Println("x is 2") case "ftp": fmt.Println("x is 3") default: - utils.Info("Backup database to local storage") localBackup(backupFileName, disableCompression, prune, backupRetention, encryption) } @@ -94,7 +91,7 @@ func scheduledMode() { if err != nil { utils.Fatal("Failed to start supervisord: %v", err) } - utils.Info("Starting backup job...") + utils.Info("Backup job started") defer func() { if err := cmd.Process.Kill(); err != nil { utils.Info("Failed to kill supervisord process: %v", err) @@ -203,6 +200,7 @@ func localBackup(backupFileName string, disableCompression bool, prune bool, bac encryptBackup(backupFileName) finalFileName = fmt.Sprintf("%s.%s", backupFileName, gpgExtension) } + utils.Info("Backup name is ", finalFileName) moveToBackup(finalFileName, storagePath) //Delete old backup if prune { @@ -213,6 +211,7 @@ func localBackup(backupFileName string, disableCompression bool, prune bool, bac func s3Backup(backupFileName string, s3Path string, disableCompression bool, prune bool, backupRetention int, encrypt bool) { bucket := os.Getenv("BUCKET_NAME") storagePath = os.Getenv("STORAGE_PATH") + utils.Info("Backup database to s3 storage") //Backup database BackupDatabase(backupFileName, disableCompression) finalFileName := backupFileName @@ -220,7 +219,8 @@ func s3Backup(backupFileName string, s3Path string, disableCompression bool, pru encryptBackup(backupFileName) finalFileName = fmt.Sprintf("%s.%s", backupFileName, "gpg") } - utils.Info("Uploading file to S3 storage") + utils.Info("Uploading backup file to S3 storage...") + utils.Info("Backup name is ", backupFileName) err := utils.UploadFileToS3(tmpPath, finalFileName, bucket, s3Path) if err != nil { utils.Fatalf("Error uploading file to S3: %s ", err) diff --git a/pkg/encrypt.go b/pkg/encrypt.go index ecd43b5..ea74108 100644 --- a/pkg/encrypt.go +++ b/pkg/encrypt.go @@ -9,7 +9,7 @@ import ( ) func Decrypt(inputFile string, passphrase string) error { - utils.Info("Decrypting backup...") + utils.Info("Decrypting backup file: " + inputFile + " ...") cmd := exec.Command("gpg", "--batch", "--passphrase", passphrase, "--output", RemoveLastExtension(inputFile), "--decrypt", inputFile) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr diff --git a/pkg/helper.go b/pkg/helper.go index 95b648e..a6cabaa 100644 --- a/pkg/helper.go +++ b/pkg/helper.go @@ -29,7 +29,7 @@ func moveToBackup(backupFileName string, destinationPath string) { fmt.Println("Error deleting file:", err) } - utils.Done("Database has been backed up and copied to destination ") + utils.Done("Database has been backed up and copied to ", filepath.Join(destinationPath, backupFileName)) } func deleteOldBackup(retentionDays int) { utils.Info("Deleting old backups...") diff --git a/pkg/scripts.go b/pkg/scripts.go index 0cc2d38..b056804 100644 --- a/pkg/scripts.go +++ b/pkg/scripts.go @@ -74,5 +74,5 @@ bkup backup --dbname %s --port %s %v if err := crontabCmd.Run(); err != nil { utils.Fatal("Error updating crontab: ", err) } - utils.Info("Starting backup in scheduled mode") + utils.Info("Backup job created.") } diff --git a/utils/s3.go b/utils/s3.go index fbc4667..2a3cb1a 100644 --- a/utils/s3.go +++ b/utils/s3.go @@ -12,25 +12,27 @@ import ( "net/http" "os" "path/filepath" + "strconv" "time" ) // CreateSession creates a new AWS session func CreateSession() (*session.Session, error) { - //key := aws.String("testobject") endPoint := os.Getenv("S3_ENDPOINT") - //bucket := os.Getenv("BUCKET_NAME") - region := os.Getenv("REGION") accessKey := os.Getenv("ACCESS_KEY") secretKey := os.Getenv("SECRET_KEY") - + region := os.Getenv("AWS_REGION") + awsDisableSsl, err := strconv.ParseBool(os.Getenv("AWS_DISABLE_SSL")) + if err != nil { + Fatalf("Unable to parse AWS_DISABLE_SSL env var: %s", err) + } // Configure to use MinIO Server s3Config := &aws.Config{ Credentials: credentials.NewStaticCredentials(accessKey, secretKey, ""), Endpoint: aws.String(endPoint), Region: aws.String(region), - DisableSSL: aws.Bool(false), + DisableSSL: aws.Bool(awsDisableSsl), S3ForcePathStyle: aws.Bool(true), } return session.NewSession(s3Config) diff --git a/utils/utils.go b/utils/utils.go index c2aa642..6424efc 100644 --- a/utils/utils.go +++ b/utils/utils.go @@ -7,11 +7,13 @@ package utils * @link https://github.com/jkaninda/mysql-bkup **/ import ( + "bytes" "fmt" "github.com/spf13/cobra" "io" "io/fs" "os" + "os/exec" ) func Info(v ...any) { @@ -105,8 +107,46 @@ func IsDirEmpty(name string) (bool, error) { // TestDatabaseConnection tests the database connection func TestDatabaseConnection() { - Info("Testing database connection...") - // Test database connection + dbHost := os.Getenv("DB_HOST") + dbPassword := os.Getenv("DB_PASSWORD") + dbUserName := os.Getenv("DB_USERNAME") + dbName := os.Getenv("DB_NAME") + dbPort := os.Getenv("DB_PORT") + + if os.Getenv("DB_HOST") == "" || os.Getenv("DB_NAME") == "" || os.Getenv("DB_USERNAME") == "" || os.Getenv("DB_PASSWORD") == "" { + Fatal("Please make sure all required database environment variables are set") + } else { + Info("Connecting to database ...") + // Test database connection + query := "SELECT version();" + + // Set the environment variable for the database password + err := os.Setenv("PGPASSWORD", dbPassword) + if err != nil { + return + } + // Prepare the psql command + cmd := exec.Command("psql", + "-U", dbUserName, // database user + "-d", dbName, // database name + "-h", dbHost, // host + "-p", dbPort, // port + "-c", query, // SQL command to execute + ) + // Capture the output + var out bytes.Buffer + cmd.Stdout = &out + cmd.Stderr = &out + + // Run the command and capture any errors + err = cmd.Run() + if err != nil { + fmt.Printf("Error running psql command: %v\nOutput: %s\n", err, out.String()) + return + } + Info("Successfully connected to database") + + } } func GetEnv(cmd *cobra.Command, flagName, envName string) string { value, _ := cmd.Flags().GetString(flagName)