diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..9fb40d7 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,32 @@ +name: Build +on: + push: + branches: ['develop'] +env: + BUILDKIT_IMAGE: jkaninda/mysql-bkup +jobs: + docker: + runs-on: ubuntu-latest + steps: + - + name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - + name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - + name: Login to DockerHub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - + name: Build and push + uses: docker/build-push-action@v3 + with: + push: true + file: "./docker/Dockerfile" + platforms: linux/amd64,linux/arm64,linux/arm/v7 + tags: | + "${{env.BUILDKIT_IMAGE}}:develop-${{ github.sha }}" + diff --git a/Makefile b/Makefile index 834a9c9..4538630 100644 --- a/Makefile +++ b/Makefile @@ -27,7 +27,7 @@ docker-run-scheduled: docker-build docker-run-scheduled-s3: docker-build - docker run --rm --network web --user 1000:1000 --name mysql-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/mysql-bkup bkup backup --storage s3 --mode scheduled --path /custom-path --period "* * * * *" + docker run --rm --network web --name mysql-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${AWS_S3_ENDPOINT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/mysql-bkup bkup backup --storage s3 --mode scheduled --path /custom-path --period "* * * * *" docker-run-s3: docker-build docker run --rm --network web --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME}" -e "AWS_S3_ENDPOINT=${AWS_S3_ENDPOINT}" -e "AWS_REGION=eu2" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/mysql-bkup bkup backup --storage s3 --path /custom-path @@ -37,7 +37,7 @@ docker-restore-s3: docker-build docker run --rm --network web --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${AWS_S3_BUCKET_NAME}" -e "S3_ENDPOINT=${AWS_S3_ENDPOINT}" -e "AWS_REGION=eu2" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/mysql-bkup bkup restore --storage s3 -f ${FILE_NAME} --path /custom-path docker-run-ssh: docker-build - docker run --rm --network web -v "${SSH_IDENTIFY_FILE_LOCAL}:" --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/mysql-bkup bkup backup --storage ssh + docker run --rm --network web --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/mysql-bkup bkup backup --storage ssh docker-restore-ssh: docker-build docker run --rm --network web --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" jkaninda/mysql-bkup bkup restore --storage ssh -f ${FILE_NAME} diff --git a/docker/Dockerfile b/docker/Dockerfile index 309e6f3..c34fa43 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -21,6 +21,7 @@ ENV AWS_S3_BUCKET_NAME="" ENV AWS_ACCESS_KEY="" ENV AWS_SECRET_KEY="" ENV AWS_REGION="us-west-2" +ENV AWS_S3_PATH="" ENV AWS_DISABLE_SSL="false" ENV GPG_PASSPHRASE="" ENV SSH_USER="" diff --git a/docs/how-tos/backup-to-ssh.md b/docs/how-tos/backup-to-ssh.md index 1581ee4..f8d4f80 100644 --- a/docs/how-tos/backup-to-ssh.md +++ b/docs/how-tos/backup-to-ssh.md @@ -32,7 +32,7 @@ services: environment: - DB_PORT=3306 - DB_HOST=mysql - - DB_NAME=database + #- DB_NAME=database - DB_USERNAME=username - DB_PASSWORD=password ## SSH config @@ -69,7 +69,7 @@ services: command: - /bin/sh - -c - - mysql-bkup backup -d database --storage s3 --mode scheduled --period "0 1 * * *" + - mysql-bkup backup -d database --storage ssh --mode scheduled --period "0 1 * * *" volumes: - ./id_ed25519:/tmp/id_ed25519" environment: @@ -117,7 +117,7 @@ spec: command: - /bin/sh - -c - - mysql-bkup backup -s s3 --path /custom_path + - mysql-bkup backup -s ssh env: - name: DB_PORT value: "3306" @@ -141,6 +141,6 @@ spec: - name: AWS_ACCESS_KEY value: "xxxx" - name: SSH_IDENTIFY_FILE - value: "/home/jkaninda/backups" - restartPolicy: OnFailure + value: "/tmp/id_ed25519" + restartPolicy: Never ``` \ No newline at end of file diff --git a/docs/how-tos/restore-from-s3.md b/docs/how-tos/restore-from-s3.md index 4c10f14..ed63819 100644 --- a/docs/how-tos/restore-from-s3.md +++ b/docs/how-tos/restore-from-s3.md @@ -48,4 +48,54 @@ services: - web networks: web: +``` + +## Restore on Kubernetes + + +### Simple Kubernetes CronJob usage: + +```yaml +apiVersion: batch/v1 +kind: CronJob +metadata: + name: bkup-job +spec: + schedule: "0 1 * * *" + jobTemplate: + spec: + template: + spec: + containers: + - name: mysql-bkup + image: jkaninda/mysql-bkup + command: + - /bin/sh + - -c + - mysql-bkup restore -s s3 --path /custom_path -f store_20231219_022941.sql.gz + env: + - name: DB_PORT + value: "3306" + - name: DB_HOST + value: "" + - name: DB_NAME + value: "" + - name: DB_USERNAME + value: "" + # Please use secret! + - name: DB_PASSWORD + value: "" + - name: AWS_S3_ENDPOINT + value: "https://s3.amazonaws.com" + - name: AWS_S3_BUCKET_NAME + value: "xxx" + - name: AWS_REGION + value: "us-west-2" + - name: AWS_ACCESS_KEY + value: "xxxx" + - name: AWS_SECRET_KEY + value: "xxxx" + - name: AWS_DISABLE_SSL + value: "false" + restartPolicy: OnFailure ``` \ No newline at end of file diff --git a/docs/how-tos/restore-from-ssh.md b/docs/how-tos/restore-from-ssh.md index f2b7a25..9c4dd10 100644 --- a/docs/how-tos/restore-from-ssh.md +++ b/docs/how-tos/restore-from-ssh.md @@ -47,4 +47,52 @@ services: - web networks: web: +``` +## Restore on Kubernetes + +Simple Kubernetes CronJob usage: + +```yaml +apiVersion: batch/v1 +kind: CronJob +metadata: + name: bkup-job +spec: + schedule: "0 1 * * *" + jobTemplate: + spec: + template: + spec: + containers: + - name: mysql-bkup + image: jkaninda/mysql-bkup + command: + - /bin/sh + - -c + - mysql-bkup restore -s ssh -f store_20231219_022941.sql.gz + env: + - name: DB_PORT + value: "3306" + - name: DB_HOST + value: "" + - name: DB_NAME + value: "" + - name: DB_USERNAME + value: "" + # Please use secret! + - name: DB_PASSWORD + value: "" + - name: SSH_HOST_NAME + value: "" + - name: SSH_PORT + value: "22" + - name: SSH_USER + value: "xxx" + - name: SSH_REMOTE_PATH + value: "/home/jkaninda/backups" + - name: AWS_ACCESS_KEY + value: "xxxx" + - name: SSH_IDENTIFY_FILE + value: "/tmp/id_ed25519" + restartPolicy: Never ``` \ No newline at end of file diff --git a/pkg/backup.go b/pkg/backup.go index 6abff9e..627320e 100644 --- a/pkg/backup.go +++ b/pkg/backup.go @@ -25,7 +25,6 @@ func StartBackup(cmd *cobra.Command) { utils.GetEnv(cmd, "period", "SCHEDULE_PERIOD") //Get flag value and set env - s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH") remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH") storage = utils.GetEnv(cmd, "storage", "STORAGE") file = utils.GetEnv(cmd, "file", "FILE_NAME") @@ -35,6 +34,8 @@ func StartBackup(cmd *cobra.Command) { executionMode, _ = cmd.Flags().GetString("mode") dbName = os.Getenv("DB_NAME") gpqPassphrase := os.Getenv("GPG_PASSPHRASE") + _ = utils.GetEnv(cmd, "path", "AWS_S3_PATH") + // if gpqPassphrase != "" { encryption = true @@ -49,7 +50,7 @@ func StartBackup(cmd *cobra.Command) { if executionMode == "default" { switch storage { case "s3": - s3Backup(backupFileName, s3Path, disableCompression, prune, backupRetention, encryption) + s3Backup(backupFileName, disableCompression, prune, backupRetention, encryption) case "local": localBackup(backupFileName, disableCompression, prune, backupRetention, encryption) case "ssh", "remote": @@ -61,7 +62,7 @@ func StartBackup(cmd *cobra.Command) { } } else if executionMode == "scheduled" { - scheduledMode() + scheduledMode(storage) } else { utils.Fatal("Error, unknown execution mode!") } @@ -69,7 +70,7 @@ func StartBackup(cmd *cobra.Command) { } // Run in scheduled mode -func scheduledMode() { +func scheduledMode(storage string) { fmt.Println() fmt.Println("**********************************") @@ -77,6 +78,7 @@ func scheduledMode() { fmt.Println("***********************************") utils.Info("Running in Scheduled mode") utils.Info("Execution period %s", os.Getenv("SCHEDULE_PERIOD")) + utils.Info("Storage type %s ", storage) //Test database connexion utils.TestDatabaseConnection() @@ -203,8 +205,9 @@ func localBackup(backupFileName string, disableCompression bool, prune bool, bac } } -func s3Backup(backupFileName string, s3Path string, disableCompression bool, prune bool, backupRetention int, encrypt bool) { +func s3Backup(backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) { bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") + s3Path := utils.GetEnvVariable("AWS_S3_PATH", "S3_PATH") utils.Info("Backup database to s3 storage") //Backup database BackupDatabase(backupFileName, disableCompression) diff --git a/pkg/scripts.go b/pkg/scripts.go index 27bf05f..e05ad55 100644 --- a/pkg/scripts.go +++ b/pkg/scripts.go @@ -22,19 +22,10 @@ func CreateCrontabScript(disableCompression bool, storage string) { disableC = "--disable-compression" } - var scriptContent string - - if storage == "s3" { - scriptContent = fmt.Sprintf(`#!/usr/bin/env bash + scriptContent := fmt.Sprintf(`#!/usr/bin/env bash set -e -bkup backup --dbname %s --port %s --storage s3 --path %s %v -`, os.Getenv("DB_NAME"), os.Getenv("DB_PORT"), os.Getenv("S3_PATH"), disableC) - } else { - scriptContent = fmt.Sprintf(`#!/usr/bin/env bash -set -e -bkup backup --dbname %s --port %s %v -`, os.Getenv("DB_NAME"), os.Getenv("DB_PORT"), disableC) - } +bkup backup --dbname %s --port %s --storage %s %v +`, os.Getenv("DB_NAME"), os.Getenv("DB_PORT"), storage, disableC) if err := utils.WriteToFile(backupCronFile, scriptContent); err != nil { utils.Fatal("Error writing to %s: %v\n", backupCronFile, err) diff --git a/utils/logger.go b/utils/logger.go index 05f551c..7059e3b 100644 --- a/utils/logger.go +++ b/utils/logger.go @@ -43,6 +43,7 @@ func Done(msg string, args ...any) { } } +// Fatal logs an error message and exits the program func Fatal(msg string, args ...any) { // Fatal logs an error message and exits the program. formattedMessage := fmt.Sprintf(msg, args...) diff --git a/utils/s3.go b/utils/s3.go index f632449..a9513ee 100644 --- a/utils/s3.go +++ b/utils/s3.go @@ -42,8 +42,7 @@ func CreateSession() (*session.Session, error) { err = CheckEnvVars(awsVars) if err != nil { - Error(fmt.Sprintf("Error checking environment variables\n: %s", err)) - os.Exit(1) + Fatal("Error checking environment variables\n: %s", err) } // S3 Config s3Config := &aws.Config{