From 9380a18b45f75b6a92d4ed0a601f0a514e77c8f9 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Wed, 2 Oct 2024 04:07:14 +0200 Subject: [PATCH] refactor: remove old arguments, refactor aws and ssh configuration --- .github/workflows/build.yml | 4 +- .github/workflows/release.yml | 10 +-- cmd/backup.go | 2 - docker/Dockerfile | 9 ++- pkg/config.go | 123 ++++++++++++++++++++++++++-------- pkg/s3.go | 56 ++++------------ pkg/scp.go | 100 +++++++++++++-------------- pkg/var.go | 9 +++ 8 files changed, 178 insertions(+), 135 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 9fb40d7..fdb150e 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -27,6 +27,8 @@ jobs: push: true file: "./docker/Dockerfile" platforms: linux/amd64,linux/arm64,linux/arm/v7 + build-args: | + appVersion=develop-${{ github.sha }} tags: | - "${{env.BUILDKIT_IMAGE}}:develop-${{ github.sha }}" + "${{vars.BUILDKIT_IMAGE}}:develop-${{ github.sha }}" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8a19c2c..7ecf515 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -41,9 +41,11 @@ jobs: push: true file: "./docker/Dockerfile" platforms: linux/amd64,linux/arm64,linux/arm/v7 + build-args: | + appVersion=${{ env.TAG_NAME }} tags: | - "${{env.BUILDKIT_IMAGE}}:${{ env.TAG_NAME }}" - "${{env.BUILDKIT_IMAGE}}:latest" - "ghcr.io/${{env.BUILDKIT_IMAGE}}:${{ env.TAG_NAME }}" - "ghcr.io/${{env.BUILDKIT_IMAGE}}:latest" + "${{vars.BUILDKIT_IMAGE}}:${{ env.TAG_NAME }}" + "${{vars.BUILDKIT_IMAGE}}:latest" + "ghcr.io/${{vars.BUILDKIT_IMAGE}}:${{ env.TAG_NAME }}" + "ghcr.io/${{vars.BUILDKIT_IMAGE}}:latest" diff --git a/cmd/backup.go b/cmd/backup.go index e1647b7..0bdb118 100644 --- a/cmd/backup.go +++ b/cmd/backup.go @@ -29,8 +29,6 @@ func init() { //Backup BackupCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3") BackupCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`") - BackupCmd.PersistentFlags().StringP("mode", "m", "default", "Execution mode. | Deprecated") - BackupCmd.PersistentFlags().StringP("period", "", "", "Schedule period time | Deprecated") BackupCmd.PersistentFlags().StringP("cron-expression", "", "", "Backup cron expression") BackupCmd.PersistentFlags().BoolP("prune", "", false, "Delete old backup, default disabled") BackupCmd.PersistentFlags().IntP("keep-last", "", 7, "Delete files created more than specified days ago, default 7 days") diff --git a/docker/Dockerfile b/docker/Dockerfile index 31cbb6a..8a245e9 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -23,14 +23,15 @@ ENV AWS_SECRET_KEY="" ENV AWS_S3_PATH="" ENV AWS_REGION="us-west-2" ENV AWS_DISABLE_SSL="false" +ENV AWS_FORCE_PATH_STYLE="true" ENV GPG_PASSPHRASE="" ENV SSH_USER="" ENV SSH_PASSWORD="" -ENV SSH_HOST_NAME="" +ENV SSH_HOST="" ENV SSH_IDENTIFY_FILE="" ENV SSH_PORT=22 ENV REMOTE_PATH="" -ENV FTP_HOST_NAME="" +ENV FTP_HOST="" ENV FTP_PORT=21 ENV FTP_USER="" ENV FTP_PASSWORD="" @@ -39,7 +40,6 @@ ENV TARGET_DB_PORT=3306 ENV TARGET_DB_NAME="" ENV TARGET_DB_USERNAME="" ENV TARGET_DB_PASSWORD="" -ENV VERSION="v1.2.12" ENV BACKUP_CRON_EXPRESSION="" ENV TG_TOKEN="" ENV TG_CHAT_ID="" @@ -47,7 +47,10 @@ ENV TZ=UTC ARG WORKDIR="/config" ARG BACKUPDIR="/backup" ARG BACKUP_TMP_DIR="/tmp/backup" +ARG appVersion="v1.2.12" +ENV VERSION=${appVersion} LABEL author="Jonas Kaninda" +LABEL version=${appVersion} RUN apk --update add --no-cache mysql-client mariadb-connector-c gnupg tzdata RUN mkdir $WORKDIR diff --git a/pkg/config.go b/pkg/config.go index cc38b95..d9db7c4 100644 --- a/pkg/config.go +++ b/pkg/config.go @@ -7,9 +7,11 @@ package pkg import ( + "fmt" "github.com/jkaninda/mysql-bkup/utils" "github.com/spf13/cobra" "os" + "strconv" ) type Config struct { @@ -29,7 +31,10 @@ type targetDbConfig struct { targetDbPassword string targetDbName string } - +type TgConfig struct { + Token string + ChatId string +} type BackupConfig struct { backupFileName string backupRetention int @@ -41,14 +46,6 @@ type BackupConfig struct { storage string cronExpression string } -type RestoreConfig struct { - s3Path string - remotePath string - storage string - file string - bucket string - gpqPassphrase string -} type FTPConfig struct { host string user string @@ -57,6 +54,24 @@ type FTPConfig struct { remotePath string } +// SSHConfig holds the SSH connection details +type SSHConfig struct { + user string + password string + hostName string + port string + identifyFile string +} +type AWSConfig struct { + endpoint string + bucket string + accessKey string + secretKey string + region string + disableSsl bool + forcePathStyle bool +} + func initDbConfig(cmd *cobra.Command) *dbConfig { //Set env utils.GetEnv(cmd, "dbname", "DB_NAME") @@ -74,14 +89,71 @@ func initDbConfig(cmd *cobra.Command) *dbConfig { } return &dConf } + +// loadSSHConfig loads the SSH configuration from environment variables +func loadSSHConfig() (*SSHConfig, error) { + utils.GetEnvVariable("SSH_HOST", "SSH_HOST_NAME") + sshVars := []string{"SSH_USER", "SSH_HOST", "SSH_PORT", "REMOTE_PATH"} + err := utils.CheckEnvVars(sshVars) + if err != nil { + return nil, fmt.Errorf("error missing environment variables: %w", err) + } + + return &SSHConfig{ + user: os.Getenv("SSH_USER"), + password: os.Getenv("SSH_PASSWORD"), + hostName: os.Getenv("SSH_HOST"), + port: os.Getenv("SSH_PORT"), + identifyFile: os.Getenv("SSH_IDENTIFY_FILE"), + }, nil +} +func initFtpConfig() *FTPConfig { + //Initialize data configs + fConfig := FTPConfig{} + fConfig.host = utils.GetEnvVariable("FTP_HOST", "FTP_HOST_NAME") + fConfig.user = os.Getenv("FTP_USER") + fConfig.password = os.Getenv("FTP_PASSWORD") + fConfig.port = os.Getenv("FTP_PORT") + fConfig.remotePath = os.Getenv("REMOTE_PATH") + err := utils.CheckEnvVars(ftpVars) + if err != nil { + utils.Error("Please make sure all required environment variables for FTP are set") + utils.Fatal("Error missing environment variables: %s", err) + } + return &fConfig +} +func initAWSConfig() *AWSConfig { + //Initialize AWS configs + aConfig := AWSConfig{} + aConfig.endpoint = utils.GetEnvVariable("AWS_S3_ENDPOINT", "S3_ENDPOINT") + aConfig.accessKey = utils.GetEnvVariable("AWS_ACCESS_KEY", "ACCESS_KEY") + aConfig.secretKey = utils.GetEnvVariable("AWS_SECRET_KEY", "SECRET_KEY") + aConfig.bucket = utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") + aConfig.region = os.Getenv("AWS_REGION") + disableSsl, err := strconv.ParseBool(os.Getenv("AWS_DISABLE_SSL")) + if err != nil { + utils.Fatal("Unable to parse AWS_DISABLE_SSL env var: %s", err) + } + forcePathStyle, err := strconv.ParseBool(os.Getenv("AWS_FORCE_PATH_STYLE")) + if err != nil { + utils.Fatal("Unable to parse AWS_FORCE_PATH_STYLE env var: %s", err) + } + aConfig.disableSsl = disableSsl + aConfig.forcePathStyle = forcePathStyle + err = utils.CheckEnvVars(awsVars) + if err != nil { + utils.Error("Please make sure all required environment variables for AWS S3 are set") + utils.Fatal("Error checking environment variables: %s", err) + } + return &aConfig +} func initBackupConfig(cmd *cobra.Command) *BackupConfig { utils.SetEnv("STORAGE_PATH", storagePath) utils.GetEnv(cmd, "cron-expression", "BACKUP_CRON_EXPRESSION") utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION") utils.GetEnv(cmd, "path", "REMOTE_PATH") - remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH") - //Get flag value and set env + remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH") storage = utils.GetEnv(cmd, "storage", "STORAGE") backupRetention, _ := cmd.Flags().GetInt("keep-last") prune, _ := cmd.Flags().GetBool("prune") @@ -94,6 +166,7 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig { if passphrase != "" { encryption = true } + //Initialize backup configs config := BackupConfig{} config.backupRetention = backupRetention @@ -106,16 +179,25 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig { config.cronExpression = cronExpression return &config } + +type RestoreConfig struct { + s3Path string + remotePath string + storage string + file string + bucket string + gpqPassphrase string +} + func initRestoreConfig(cmd *cobra.Command) *RestoreConfig { utils.SetEnv("STORAGE_PATH", storagePath) utils.GetEnv(cmd, "path", "REMOTE_PATH") - remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH") //Get flag value and set env s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH") + remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH") storage = utils.GetEnv(cmd, "storage", "STORAGE") file = utils.GetEnv(cmd, "file", "FILE_NAME") - _, _ = cmd.Flags().GetString("mode") bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") gpqPassphrase := os.Getenv("GPG_PASSPHRASE") //Initialize restore configs @@ -144,18 +226,3 @@ func initTargetDbConfig() *targetDbConfig { } return &tdbConfig } -func initFtpConfig() *FTPConfig { - //Initialize backup configs - fConfig := FTPConfig{} - fConfig.host = os.Getenv("FTP_HOST_NAME") - fConfig.user = os.Getenv("FTP_USER") - fConfig.password = os.Getenv("FTP_PASSWORD") - fConfig.port = os.Getenv("FTP_PORT") - fConfig.remotePath = os.Getenv("REMOTE_PATH") - err := utils.CheckEnvVars(ftpVars) - if err != nil { - utils.Error("Please make sure all required environment variables for FTP are set") - utils.Fatal("Error checking environment variables: %s", err) - } - return &fConfig -} diff --git a/pkg/s3.go b/pkg/s3.go index c5e9392..00612dd 100644 --- a/pkg/s3.go +++ b/pkg/s3.go @@ -1,4 +1,4 @@ -// Package utils / +// Package pkg /***** @author Jonas Kaninda @license MIT License @@ -8,56 +8,28 @@ package pkg import ( "bytes" - "fmt" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/credentials" "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/s3" "github.com/aws/aws-sdk-go/service/s3/s3manager" "github.com/jkaninda/mysql-bkup/utils" - "log" "net/http" "os" "path/filepath" - "strconv" "time" ) // CreateSession creates a new AWS session func CreateSession() (*session.Session, error) { - // AwsVars Required environment variables for AWS S3 storage - var awsVars = []string{ - "AWS_S3_ENDPOINT", - "AWS_S3_BUCKET_NAME", - "AWS_ACCESS_KEY", - "AWS_SECRET_KEY", - "AWS_REGION", - "AWS_REGION", - "AWS_REGION", - } - - endPoint := utils.GetEnvVariable("AWS_S3_ENDPOINT", "S3_ENDPOINT") - accessKey := utils.GetEnvVariable("AWS_ACCESS_KEY", "ACCESS_KEY") - secretKey := utils.GetEnvVariable("AWS_SECRET_KEY", "SECRET_KEY") - _ = utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") - - region := os.Getenv("AWS_REGION") - awsDisableSsl, err := strconv.ParseBool(os.Getenv("AWS_DISABLE_SSL")) - if err != nil { - utils.Fatal("Unable to parse AWS_DISABLE_SSL env var: %s", err) - } - - err = utils.CheckEnvVars(awsVars) - if err != nil { - utils.Fatal("Error checking environment variables\n: %s", err) - } - // S3 Config + awsConfig := initAWSConfig() + // Configure to use MinIO Server s3Config := &aws.Config{ - Credentials: credentials.NewStaticCredentials(accessKey, secretKey, ""), - Endpoint: aws.String(endPoint), - Region: aws.String(region), - DisableSSL: aws.Bool(awsDisableSsl), - S3ForcePathStyle: aws.Bool(true), + Credentials: credentials.NewStaticCredentials(awsConfig.accessKey, awsConfig.secretKey, ""), + Endpoint: aws.String(awsConfig.endpoint), + Region: aws.String(awsConfig.region), + DisableSSL: aws.Bool(awsConfig.disableSsl), + S3ForcePathStyle: aws.Bool(awsConfig.forcePathStyle), } return session.NewSession(s3Config) @@ -109,10 +81,10 @@ func DownloadFile(destinationPath, key, bucket, prefix string) error { if err != nil { return err } - utils.Info("Download backup from S3 storage...") + utils.Info("Download data from S3 storage...") file, err := os.Create(filepath.Join(destinationPath, key)) if err != nil { - fmt.Println("Failed to create file", err) + utils.Error("Failed to create file", err) return err } defer file.Close() @@ -159,18 +131,18 @@ func DeleteOldBackup(bucket, prefix string, retention int) error { Key: object.Key, }) if err != nil { - log.Printf("Failed to delete object %s: %v", *object.Key, err) + utils.Info("Failed to delete object %s: %v", *object.Key, err) } else { - fmt.Printf("Deleted object %s\n", *object.Key) + utils.Info("Deleted object %s\n", *object.Key) } } } return !lastPage }) if err != nil { - log.Fatalf("Failed to list objects: %v", err) + utils.Error("Failed to list objects: %v", err) } - fmt.Println("Finished deleting old files.") + utils.Info("Finished deleting old files.") return nil } diff --git a/pkg/scp.go b/pkg/scp.go index 3833082..6df6af7 100644 --- a/pkg/scp.go +++ b/pkg/scp.go @@ -18,83 +18,73 @@ import ( "path/filepath" ) -func CopyToRemote(fileName, remotePath string) error { - sshUser := os.Getenv("SSH_USER") - sshPassword := os.Getenv("SSH_PASSWORD") - sshHostName := os.Getenv("SSH_HOST_NAME") - sshPort := os.Getenv("SSH_PORT") - sshIdentifyFile := os.Getenv("SSH_IDENTIFY_FILE") - - err := utils.CheckEnvVars(sshHVars) - if err != nil { - utils.Error("Error checking environment variables: %s", err) - os.Exit(1) - } - - clientConfig, _ := auth.PasswordKey(sshUser, sshPassword, ssh.InsecureIgnoreHostKey()) - if sshIdentifyFile != "" && utils.FileExists(sshIdentifyFile) { - clientConfig, _ = auth.PrivateKey(sshUser, sshIdentifyFile, ssh.InsecureIgnoreHostKey()) - +// createSSHClientConfig sets up the SSH client configuration based on the provided SSHConfig +func createSSHClientConfig(sshConfig *SSHConfig) (ssh.ClientConfig, error) { + if sshConfig.identifyFile != "" && utils.FileExists(sshConfig.identifyFile) { + return auth.PrivateKey(sshConfig.user, sshConfig.identifyFile, ssh.InsecureIgnoreHostKey()) } else { - if sshPassword == "" { - return errors.New("SSH_PASSWORD environment variable is required if SSH_IDENTIFY_FILE is empty") + if sshConfig.password == "" { + return ssh.ClientConfig{}, errors.New("SSH_PASSWORD environment variable is required if SSH_IDENTIFY_FILE is empty") } - utils.Warn("Accessing the remote server using password, password is not recommended") - clientConfig, _ = auth.PasswordKey(sshUser, sshPassword, ssh.InsecureIgnoreHostKey()) - + utils.Warn("Accessing the remote server using password, which is not recommended.") + return auth.PasswordKey(sshConfig.user, sshConfig.password, ssh.InsecureIgnoreHostKey()) } +} + +// CopyToRemote copies a file to a remote server via SCP +func CopyToRemote(fileName, remotePath string) error { + // Load environment variables + sshConfig, err := loadSSHConfig() + if err != nil { + return fmt.Errorf("failed to load SSH configuration: %w", err) + } + + // Initialize SSH client config + clientConfig, err := createSSHClientConfig(sshConfig) + if err != nil { + return fmt.Errorf("failed to create SSH client config: %w", err) + } + // Create a new SCP client - client := scp.NewClient(fmt.Sprintf("%s:%s", sshHostName, sshPort), &clientConfig) + client := scp.NewClient(fmt.Sprintf("%s:%s", sshConfig.hostName, sshConfig.port), &clientConfig) // Connect to the remote server err = client.Connect() if err != nil { - return errors.New("Couldn't establish a connection to the remote server") + return errors.New("Couldn't establish a connection to the remote server\n") } - // Open a file - file, _ := os.Open(filepath.Join(tmpPath, fileName)) - - // Close client connection after the file has been copied + // Open the local file + filePath := filepath.Join(tmpPath, fileName) + file, err := os.Open(filePath) + if err != nil { + return fmt.Errorf("failed to open file %s: %w", filePath, err) + } defer client.Close() - // Close the file after it has been copied - defer file.Close() - // the context can be adjusted to provide time-outs or inherit from other contexts if this is embedded in a larger application. + // Copy file to the remote server err = client.CopyFromFile(context.Background(), *file, filepath.Join(remotePath, fileName), "0655") if err != nil { - fmt.Println("Error while copying file ") - return err + return fmt.Errorf("failed to copy file to remote server: %w", err) } + return nil } func CopyFromRemote(fileName, remotePath string) error { - sshUser := os.Getenv("SSH_USER") - sshPassword := os.Getenv("SSH_PASSWORD") - sshHostName := os.Getenv("SSH_HOST_NAME") - sshPort := os.Getenv("SSH_PORT") - sshIdentifyFile := os.Getenv("SSH_IDENTIFY_FILE") - - err := utils.CheckEnvVars(sshHVars) + // Load environment variables + sshConfig, err := loadSSHConfig() if err != nil { - utils.Error("Error checking environment variables\n: %s", err) - os.Exit(1) + return fmt.Errorf("failed to load SSH configuration: %w", err) } - clientConfig, _ := auth.PasswordKey(sshUser, sshPassword, ssh.InsecureIgnoreHostKey()) - if sshIdentifyFile != "" && utils.FileExists(sshIdentifyFile) { - clientConfig, _ = auth.PrivateKey(sshUser, sshIdentifyFile, ssh.InsecureIgnoreHostKey()) - - } else { - if sshPassword == "" { - return errors.New("SSH_PASSWORD environment variable is required if SSH_IDENTIFY_FILE is empty\n") - } - utils.Warn("Accessing the remote server using password, password is not recommended") - clientConfig, _ = auth.PasswordKey(sshUser, sshPassword, ssh.InsecureIgnoreHostKey()) - + // Initialize SSH client config + clientConfig, err := createSSHClientConfig(sshConfig) + if err != nil { + return fmt.Errorf("failed to create SSH client config: %w", err) } + // Create a new SCP client - client := scp.NewClient(fmt.Sprintf("%s:%s", sshHostName, sshPort), &clientConfig) + client := scp.NewClient(fmt.Sprintf("%s:%s", sshConfig.hostName, sshConfig.port), &clientConfig) // Connect to the remote server err = client.Connect() @@ -113,7 +103,7 @@ func CopyFromRemote(fileName, remotePath string) error { err = client.CopyFromRemote(context.Background(), file, filepath.Join(remotePath, fileName)) if err != nil { - fmt.Println("Error while copying file ", err) + utils.Error("Error while copying file %s ", err) return err } return nil diff --git a/pkg/var.go b/pkg/var.go index a273e97..17c37eb 100644 --- a/pkg/var.go +++ b/pkg/var.go @@ -51,3 +51,12 @@ var ftpVars = []string{ "FTP_PASSWORD", "FTP_PORT", } + +// AwsVars Required environment variables for AWS S3 storage +var awsVars = []string{ + "AWS_S3_ENDPOINT", + "AWS_S3_BUCKET_NAME", + "AWS_ACCESS_KEY", + "AWS_SECRET_KEY", + "AWS_REGION", +}