Merge pull request #60 from jkaninda/develop

Develop
This commit is contained in:
2024-08-04 13:12:50 +02:00
committed by GitHub
10 changed files with 151 additions and 26 deletions

32
.github/workflows/build.yml vendored Normal file
View File

@@ -0,0 +1,32 @@
name: Build
on:
push:
branches: ['develop']
env:
BUILDKIT_IMAGE: jkaninda/mysql-bkup
jobs:
docker:
runs-on: ubuntu-latest
steps:
-
name: Set up QEMU
uses: docker/setup-qemu-action@v3
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
-
name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
-
name: Build and push
uses: docker/build-push-action@v3
with:
push: true
file: "./docker/Dockerfile"
platforms: linux/amd64,linux/arm64,linux/arm/v7
tags: |
"${{env.BUILDKIT_IMAGE}}:develop-${{ github.sha }}"

View File

@@ -27,7 +27,7 @@ docker-run-scheduled: docker-build
docker-run-scheduled-s3: docker-build docker-run-scheduled-s3: docker-build
docker run --rm --network web --user 1000:1000 --name mysql-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/mysql-bkup bkup backup --storage s3 --mode scheduled --path /custom-path --period "* * * * *" docker run --rm --network web --name mysql-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${AWS_S3_ENDPOINT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/mysql-bkup bkup backup --storage s3 --mode scheduled --path /custom-path --period "* * * * *"
docker-run-s3: docker-build docker-run-s3: docker-build
docker run --rm --network web --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME}" -e "AWS_S3_ENDPOINT=${AWS_S3_ENDPOINT}" -e "AWS_REGION=eu2" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/mysql-bkup bkup backup --storage s3 --path /custom-path docker run --rm --network web --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME}" -e "AWS_S3_ENDPOINT=${AWS_S3_ENDPOINT}" -e "AWS_REGION=eu2" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/mysql-bkup bkup backup --storage s3 --path /custom-path
@@ -37,7 +37,7 @@ docker-restore-s3: docker-build
docker run --rm --network web --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${AWS_S3_BUCKET_NAME}" -e "S3_ENDPOINT=${AWS_S3_ENDPOINT}" -e "AWS_REGION=eu2" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/mysql-bkup bkup restore --storage s3 -f ${FILE_NAME} --path /custom-path docker run --rm --network web --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${AWS_S3_BUCKET_NAME}" -e "S3_ENDPOINT=${AWS_S3_ENDPOINT}" -e "AWS_REGION=eu2" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/mysql-bkup bkup restore --storage s3 -f ${FILE_NAME} --path /custom-path
docker-run-ssh: docker-build docker-run-ssh: docker-build
docker run --rm --network web -v "${SSH_IDENTIFY_FILE_LOCAL}:" --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/mysql-bkup bkup backup --storage ssh docker run --rm --network web --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" jkaninda/mysql-bkup bkup backup --storage ssh
docker-restore-ssh: docker-build docker-restore-ssh: docker-build
docker run --rm --network web --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" jkaninda/mysql-bkup bkup restore --storage ssh -f ${FILE_NAME} docker run --rm --network web --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" jkaninda/mysql-bkup bkup restore --storage ssh -f ${FILE_NAME}

View File

@@ -21,6 +21,7 @@ ENV AWS_S3_BUCKET_NAME=""
ENV AWS_ACCESS_KEY="" ENV AWS_ACCESS_KEY=""
ENV AWS_SECRET_KEY="" ENV AWS_SECRET_KEY=""
ENV AWS_REGION="us-west-2" ENV AWS_REGION="us-west-2"
ENV AWS_S3_PATH=""
ENV AWS_DISABLE_SSL="false" ENV AWS_DISABLE_SSL="false"
ENV GPG_PASSPHRASE="" ENV GPG_PASSPHRASE=""
ENV SSH_USER="" ENV SSH_USER=""

View File

@@ -32,7 +32,7 @@ services:
environment: environment:
- DB_PORT=3306 - DB_PORT=3306
- DB_HOST=mysql - DB_HOST=mysql
- DB_NAME=database #- DB_NAME=database
- DB_USERNAME=username - DB_USERNAME=username
- DB_PASSWORD=password - DB_PASSWORD=password
## SSH config ## SSH config
@@ -69,7 +69,7 @@ services:
command: command:
- /bin/sh - /bin/sh
- -c - -c
- mysql-bkup backup -d database --storage s3 --mode scheduled --period "0 1 * * *" - mysql-bkup backup -d database --storage ssh --mode scheduled --period "0 1 * * *"
volumes: volumes:
- ./id_ed25519:/tmp/id_ed25519" - ./id_ed25519:/tmp/id_ed25519"
environment: environment:
@@ -117,7 +117,7 @@ spec:
command: command:
- /bin/sh - /bin/sh
- -c - -c
- mysql-bkup backup -s s3 --path /custom_path - mysql-bkup backup -s ssh
env: env:
- name: DB_PORT - name: DB_PORT
value: "3306" value: "3306"
@@ -141,6 +141,6 @@ spec:
- name: AWS_ACCESS_KEY - name: AWS_ACCESS_KEY
value: "xxxx" value: "xxxx"
- name: SSH_IDENTIFY_FILE - name: SSH_IDENTIFY_FILE
value: "/home/jkaninda/backups" value: "/tmp/id_ed25519"
restartPolicy: OnFailure restartPolicy: Never
``` ```

View File

@@ -48,4 +48,54 @@ services:
- web - web
networks: networks:
web: web:
```
## Restore on Kubernetes
### Simple Kubernetes CronJob usage:
```yaml
apiVersion: batch/v1
kind: CronJob
metadata:
name: bkup-job
spec:
schedule: "0 1 * * *"
jobTemplate:
spec:
template:
spec:
containers:
- name: mysql-bkup
image: jkaninda/mysql-bkup
command:
- /bin/sh
- -c
- mysql-bkup restore -s s3 --path /custom_path -f store_20231219_022941.sql.gz
env:
- name: DB_PORT
value: "3306"
- name: DB_HOST
value: ""
- name: DB_NAME
value: ""
- name: DB_USERNAME
value: ""
# Please use secret!
- name: DB_PASSWORD
value: ""
- name: AWS_S3_ENDPOINT
value: "https://s3.amazonaws.com"
- name: AWS_S3_BUCKET_NAME
value: "xxx"
- name: AWS_REGION
value: "us-west-2"
- name: AWS_ACCESS_KEY
value: "xxxx"
- name: AWS_SECRET_KEY
value: "xxxx"
- name: AWS_DISABLE_SSL
value: "false"
restartPolicy: OnFailure
``` ```

View File

@@ -47,4 +47,52 @@ services:
- web - web
networks: networks:
web: web:
```
## Restore on Kubernetes
Simple Kubernetes CronJob usage:
```yaml
apiVersion: batch/v1
kind: CronJob
metadata:
name: bkup-job
spec:
schedule: "0 1 * * *"
jobTemplate:
spec:
template:
spec:
containers:
- name: mysql-bkup
image: jkaninda/mysql-bkup
command:
- /bin/sh
- -c
- mysql-bkup restore -s ssh -f store_20231219_022941.sql.gz
env:
- name: DB_PORT
value: "3306"
- name: DB_HOST
value: ""
- name: DB_NAME
value: ""
- name: DB_USERNAME
value: ""
# Please use secret!
- name: DB_PASSWORD
value: ""
- name: SSH_HOST_NAME
value: ""
- name: SSH_PORT
value: "22"
- name: SSH_USER
value: "xxx"
- name: SSH_REMOTE_PATH
value: "/home/jkaninda/backups"
- name: AWS_ACCESS_KEY
value: "xxxx"
- name: SSH_IDENTIFY_FILE
value: "/tmp/id_ed25519"
restartPolicy: Never
``` ```

View File

@@ -25,7 +25,6 @@ func StartBackup(cmd *cobra.Command) {
utils.GetEnv(cmd, "period", "SCHEDULE_PERIOD") utils.GetEnv(cmd, "period", "SCHEDULE_PERIOD")
//Get flag value and set env //Get flag value and set env
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH") remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
storage = utils.GetEnv(cmd, "storage", "STORAGE") storage = utils.GetEnv(cmd, "storage", "STORAGE")
file = utils.GetEnv(cmd, "file", "FILE_NAME") file = utils.GetEnv(cmd, "file", "FILE_NAME")
@@ -35,6 +34,8 @@ func StartBackup(cmd *cobra.Command) {
executionMode, _ = cmd.Flags().GetString("mode") executionMode, _ = cmd.Flags().GetString("mode")
dbName = os.Getenv("DB_NAME") dbName = os.Getenv("DB_NAME")
gpqPassphrase := os.Getenv("GPG_PASSPHRASE") gpqPassphrase := os.Getenv("GPG_PASSPHRASE")
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
// //
if gpqPassphrase != "" { if gpqPassphrase != "" {
encryption = true encryption = true
@@ -49,7 +50,7 @@ func StartBackup(cmd *cobra.Command) {
if executionMode == "default" { if executionMode == "default" {
switch storage { switch storage {
case "s3": case "s3":
s3Backup(backupFileName, s3Path, disableCompression, prune, backupRetention, encryption) s3Backup(backupFileName, disableCompression, prune, backupRetention, encryption)
case "local": case "local":
localBackup(backupFileName, disableCompression, prune, backupRetention, encryption) localBackup(backupFileName, disableCompression, prune, backupRetention, encryption)
case "ssh", "remote": case "ssh", "remote":
@@ -61,7 +62,7 @@ func StartBackup(cmd *cobra.Command) {
} }
} else if executionMode == "scheduled" { } else if executionMode == "scheduled" {
scheduledMode() scheduledMode(storage)
} else { } else {
utils.Fatal("Error, unknown execution mode!") utils.Fatal("Error, unknown execution mode!")
} }
@@ -69,7 +70,7 @@ func StartBackup(cmd *cobra.Command) {
} }
// Run in scheduled mode // Run in scheduled mode
func scheduledMode() { func scheduledMode(storage string) {
fmt.Println() fmt.Println()
fmt.Println("**********************************") fmt.Println("**********************************")
@@ -77,6 +78,7 @@ func scheduledMode() {
fmt.Println("***********************************") fmt.Println("***********************************")
utils.Info("Running in Scheduled mode") utils.Info("Running in Scheduled mode")
utils.Info("Execution period %s", os.Getenv("SCHEDULE_PERIOD")) utils.Info("Execution period %s", os.Getenv("SCHEDULE_PERIOD"))
utils.Info("Storage type %s ", storage)
//Test database connexion //Test database connexion
utils.TestDatabaseConnection() utils.TestDatabaseConnection()
@@ -203,8 +205,9 @@ func localBackup(backupFileName string, disableCompression bool, prune bool, bac
} }
} }
func s3Backup(backupFileName string, s3Path string, disableCompression bool, prune bool, backupRetention int, encrypt bool) { func s3Backup(backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
s3Path := utils.GetEnvVariable("AWS_S3_PATH", "S3_PATH")
utils.Info("Backup database to s3 storage") utils.Info("Backup database to s3 storage")
//Backup database //Backup database
BackupDatabase(backupFileName, disableCompression) BackupDatabase(backupFileName, disableCompression)

View File

@@ -22,19 +22,10 @@ func CreateCrontabScript(disableCompression bool, storage string) {
disableC = "--disable-compression" disableC = "--disable-compression"
} }
var scriptContent string scriptContent := fmt.Sprintf(`#!/usr/bin/env bash
if storage == "s3" {
scriptContent = fmt.Sprintf(`#!/usr/bin/env bash
set -e set -e
bkup backup --dbname %s --port %s --storage s3 --path %s %v bkup backup --dbname %s --port %s --storage %s %v
`, os.Getenv("DB_NAME"), os.Getenv("DB_PORT"), os.Getenv("S3_PATH"), disableC) `, os.Getenv("DB_NAME"), os.Getenv("DB_PORT"), storage, disableC)
} else {
scriptContent = fmt.Sprintf(`#!/usr/bin/env bash
set -e
bkup backup --dbname %s --port %s %v
`, os.Getenv("DB_NAME"), os.Getenv("DB_PORT"), disableC)
}
if err := utils.WriteToFile(backupCronFile, scriptContent); err != nil { if err := utils.WriteToFile(backupCronFile, scriptContent); err != nil {
utils.Fatal("Error writing to %s: %v\n", backupCronFile, err) utils.Fatal("Error writing to %s: %v\n", backupCronFile, err)

View File

@@ -43,6 +43,7 @@ func Done(msg string, args ...any) {
} }
} }
// Fatal logs an error message and exits the program
func Fatal(msg string, args ...any) { func Fatal(msg string, args ...any) {
// Fatal logs an error message and exits the program. // Fatal logs an error message and exits the program.
formattedMessage := fmt.Sprintf(msg, args...) formattedMessage := fmt.Sprintf(msg, args...)

View File

@@ -42,8 +42,7 @@ func CreateSession() (*session.Session, error) {
err = CheckEnvVars(awsVars) err = CheckEnvVars(awsVars)
if err != nil { if err != nil {
Error(fmt.Sprintf("Error checking environment variables\n: %s", err)) Fatal("Error checking environment variables\n: %s", err)
os.Exit(1)
} }
// S3 Config // S3 Config
s3Config := &aws.Config{ s3Config := &aws.Config{