Compare commits

...

18 Commits

Author SHA1 Message Date
4c44166921 Merge pull request #113 from jkaninda/develop
Develop
2024-10-09 12:51:15 +02:00
554df819ab Merge pull request #112 from jkaninda/multi-backup
docs: add mutli database backup example
2024-10-09 12:49:46 +02:00
Jonas Kaninda
ca5633882e docs: add mutli database backup example 2024-10-09 12:45:55 +02:00
c5cca82841 Merge pull request #111 from jkaninda/multi-backup
Add Multi database backup
2024-10-09 12:24:37 +02:00
Jonas Kaninda
bbd5422089 ci: change Dockerfile path 2024-10-09 12:23:45 +02:00
Jonas Kaninda
d72156f890 feat: add multi database backup 2024-10-09 12:23:14 +02:00
Jonas Kaninda
909a50dbe7 docs: update backup encryption example 2024-10-08 23:20:50 +02:00
Jonas Kaninda
94ceb71da2 docs: update backup encryption example 2024-10-08 23:05:10 +02:00
Jonas Kaninda
fe05fe5110 feat: add encrypt backup using public key, migrate gpg to go gpg dependency 2024-10-08 23:02:46 +02:00
dabba2050a Merge pull request #110 from jkaninda/refactor
chore: remove os.kill.signal
2024-10-05 10:42:55 +02:00
Jonas Kaninda
47e1ac407b chore: remove os.kill.signal 2024-10-05 10:41:46 +02:00
28f6ed3a82 Merge pull request #109 from jkaninda/refactor
fix: logging time
2024-10-05 10:40:11 +02:00
Jonas Kaninda
504926c7cd fix: logging time 2024-10-05 10:39:49 +02:00
737f473f92 Merge pull request #108 from jkaninda/refactor
Refactor
2024-10-03 18:19:12 +02:00
Jonas Kaninda
300d2a8205 chore: remove testDatabaseConnection function for scheduled mode 2024-10-03 18:18:47 +02:00
Jonas Kaninda
a4ad0502cf chore: add storage type alt for smallcase and uppercase 2024-10-03 18:17:48 +02:00
f344867edf Merge pull request #107 from jkaninda/refactor
docs: update configuration reference
2024-10-02 04:26:05 +02:00
Jonas Kaninda
d774584f64 docs: update configuration reference 2024-10-02 04:25:35 +02:00
17 changed files with 668 additions and 169 deletions

View File

@@ -25,7 +25,7 @@ jobs:
uses: docker/build-push-action@v3 uses: docker/build-push-action@v3
with: with:
push: true push: true
file: "./docker/Dockerfile" file: "./Dockerfile"
platforms: linux/amd64,linux/arm64,linux/arm/v7 platforms: linux/amd64,linux/arm64,linux/arm/v7
build-args: | build-args: |
appVersion=develop-${{ github.sha }} appVersion=develop-${{ github.sha }}

View File

@@ -39,7 +39,7 @@ jobs:
uses: docker/build-push-action@v3 uses: docker/build-push-action@v3
with: with:
push: true push: true
file: "./docker/Dockerfile" file: "./Dockerfile"
platforms: linux/amd64,linux/arm64,linux/arm/v7 platforms: linux/amd64,linux/arm64,linux/arm/v7
build-args: | build-args: |
appVersion=${{ env.TAG_NAME }} appVersion=${{ env.TAG_NAME }}

View File

@@ -52,7 +52,7 @@ ENV VERSION=${appVersion}
LABEL author="Jonas Kaninda" LABEL author="Jonas Kaninda"
LABEL version=${appVersion} LABEL version=${appVersion}
RUN apk --update add --no-cache mysql-client mariadb-connector-c gnupg tzdata RUN apk --update add --no-cache mysql-client mariadb-connector-c tzdata
RUN mkdir $WORKDIR RUN mkdir $WORKDIR
RUN mkdir $BACKUPDIR RUN mkdir $BACKUPDIR
RUN mkdir -p $BACKUP_TMP_DIR RUN mkdir -p $BACKUP_TMP_DIR

View File

@@ -0,0 +1,6 @@
---
title: Update deprecated configurations
layout: default
parent: How Tos
nav_order: 11
---

View File

@@ -1,30 +1,39 @@
--- ---
title: Encrypt backups using GPG title: Encrypt backups
layout: default layout: default
parent: How Tos parent: How Tos
nav_order: 8 nav_order: 8
--- ---
# Encrypt backup # Encrypt backup
The image supports encrypting backups using GPG out of the box. In case a `GPG_PASSPHRASE` environment variable is set, the backup archive will be encrypted using the given key and saved as a sql.gpg file instead or sql.gz.gpg. The image supports encrypting backups using one of two available methods: GPG with passphrase or GPG with a public key.
The image supports encrypting backups using GPG out of the box. In case a `GPG_PASSPHRASE` or `GPG_PUBLIC_KEY` environment variable is set, the backup archive will be encrypted using the given key and saved as a sql.gpg file instead or sql.gz.gpg.
{: .warning } {: .warning }
To restore an encrypted backup, you need to provide the same GPG passphrase or key used during backup process. To restore an encrypted backup, you need to provide the same GPG passphrase used during backup process.
- GPG home directory `/config/gnupg` - GPG home directory `/config/gnupg`
- Cipher algorithm `aes256` - Cipher algorithm `aes256`
-
To decrypt manually, you need to install `gnupg`
### Decrypt backup {: .note }
The backup encrypted using `GPG passphrase` method can be restored automatically, no need to decrypt it before restoration.
Suppose you used a GPG public key during the backup process. In that case, you need to decrypt your backup before restoration because decryption using a `GPG private` key is not fully supported.
To decrypt manually, you need to install `gnupg`
```shell ```shell
gpg --batch --passphrase "my-passphrase" \ gpg --batch --passphrase "my-passphrase" \
--output database_20240730_044201.sql.gz \ --output database_20240730_044201.sql.gz \
--decrypt database_20240730_044201.sql.gz.gpg --decrypt database_20240730_044201.sql.gz.gpg
``` ```
Using your private key
### Backup ```shell
gpg --output database_20240730_044201.sql.gz --decrypt database_20240730_044201.sql.gz.gpg
```
## Using GPG passphrase
```yml ```yml
services: services:
@@ -51,4 +60,32 @@ services:
- web - web
networks: networks:
web: web:
```
## Using GPG Public Key
```yml
services:
mysql-bkup:
# In production, it is advised to lock your image tag to a proper
# release version instead of using `latest`.
# Check https://github.com/jkaninda/mysql-bkup/releases
# for a list of available releases.
image: jkaninda/mysql-bkup
container_name: mysql-bkup
command: backup -d database
volumes:
- ./backup:/backup
environment:
- DB_PORT=3306
- DB_HOST=mysql
- DB_NAME=database
- DB_USERNAME=username
- DB_PASSWORD=password
## Required to encrypt backup
- GPG_PUBLIC_KEY=/config/public_key.asc
# mysql-bkup container must be connected to the same network with your database
networks:
- web
networks:
web:
``` ```

View File

@@ -0,0 +1,63 @@
---
title: Run multiple backup schedules in the same container
layout: default
parent: How Tos
nav_order: 11
---
Multiple backup schedules with different configuration can be configured by mounting a configuration file into `/config/config.yaml` `/config/config.yml` or by defining an environment variable `BACKUP_CONFIG_FILE=/backup/config.yaml`.
## Configuration file
```yaml
#cronExpression: "@every 20m" //Optional for scheduled backups
cronExpression: ""
databases:
- host: mysql1
port: 3306
name: database1
user: database1
password: password
path: /s3-path/database1 #For SSH or FTP you need to define the full path (/home/toto/backup/)
- host: mysql2
port: 3306
name: lldap
user: lldap
password: password
path: /s3-path/lldap #For SSH or FTP you need to define the full path (/home/toto/backup/)
- host: mysql3
port: 3306
name: keycloak
user: keycloak
password: password
path: /s3-path/keycloak #For SSH or FTP you need to define the full path (/home/toto/backup/)
- host: mysql4
port: 3306
name: joplin
user: joplin
password: password
path: /s3-path/joplin #For SSH or FTP you need to define the full path (/home/toto/backup/)
```
## Docker compose file
```yaml
services:
mysql-bkup:
# In production, it is advised to lock your image tag to a proper
# release version instead of using `latest`.
# Check https://github.com/jkaninda/mysql-bkup/releases
# for a list of available releases.
image: jkaninda/mysql-bkup
container_name: mysql-bkup
command: backup
volumes:
- ./backup:/backup
environment:
## Multi backup config file
- BACKUP_CONFIG_FILE=/backup/config.yaml
# mysql-bkup container must be connected to the same network with your database
networks:
- web
networks:
web:
```

View File

@@ -34,40 +34,42 @@ Backup, restore and migrate targets, schedule and retention are configured using
## Environment variables ## Environment variables
| Name | Requirement | Description | | Name | Requirement | Description |
|------------------------|---------------------------------------------------------------|------------------------------------------------------| |------------------------|---------------------------------------------------------------|-----------------------------------------------------------------|
| DB_PORT | Optional, default 3306 | Database port number | | DB_PORT | Optional, default 3306 | Database port number |
| DB_HOST | Required | Database host | | DB_HOST | Required | Database host |
| DB_NAME | Optional if it was provided from the -d flag | Database name | | DB_NAME | Optional if it was provided from the -d flag | Database name |
| DB_USERNAME | Required | Database user name | | DB_USERNAME | Required | Database user name |
| DB_PASSWORD | Required | Database password | | DB_PASSWORD | Required | Database password |
| AWS_ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key | | AWS_ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key |
| AWS_SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key | | AWS_SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key |
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name | | AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name | | AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
| AWS_REGION | Optional, required for S3 storage | AWS Region | | AWS_REGION | Optional, required for S3 storage | AWS Region |
| AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL | | AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL |
| FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) | | AWS_FORCE_PATH_STYLE | Optional, required for S3 storage | Force path style |
| GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase | | FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) |
| BACKUP_CRON_EXPRESSION | Optional if it was provided from the `--cron-expression` flag | Backup cron expression for docker in scheduled mode | | GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase |
| SSH_HOST_NAME | Optional, required for SSH storage | ssh remote hostname or ip | | GPG_PUBLIC_KEY | Optional, required to encrypt backup | GPG public key, used to encrypt backup (/config/public_key.asc) |
| SSH_USER | Optional, required for SSH storage | ssh remote user | | BACKUP_CRON_EXPRESSION | Optional if it was provided from the `--cron-expression` flag | Backup cron expression for docker in scheduled mode |
| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password | | SSH_HOST | Optional, required for SSH storage | ssh remote hostname or ip |
| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key | | SSH_USER | Optional, required for SSH storage | ssh remote user |
| SSH_PORT | Optional, required for SSH storage | ssh remote server port | | SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password |
| REMOTE_PATH | Optional, required for SSH or FTP storage | remote path (/home/toto/backup) | | SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key |
| FTP_HOST_NAME | Optional, required for FTP storage | FTP host name | | SSH_PORT | Optional, required for SSH storage | ssh remote server port |
| FTP_PORT | Optional, required for FTP storage | FTP server port number | | REMOTE_PATH | Optional, required for SSH or FTP storage | remote path (/home/toto/backup) |
| FTP_USER | Optional, required for FTP storage | FTP user | | FTP_HOST | Optional, required for FTP storage | FTP host name |
| FTP_PASSWORD | Optional, required for FTP storage | FTP user password | | FTP_PORT | Optional, required for FTP storage | FTP server port number |
| TARGET_DB_HOST | Optional, required for database migration | Target database host | | FTP_USER | Optional, required for FTP storage | FTP user |
| TARGET_DB_PORT | Optional, required for database migration | Target database port | | FTP_PASSWORD | Optional, required for FTP storage | FTP user password |
| TARGET_DB_NAME | Optional, required for database migration | Target database name | | TARGET_DB_HOST | Optional, required for database migration | Target database host |
| TARGET_DB_USERNAME | Optional, required for database migration | Target database username | | TARGET_DB_PORT | Optional, required for database migration | Target database port |
| TARGET_DB_PASSWORD | Optional, required for database migration | Target database password | | TARGET_DB_NAME | Optional, required for database migration | Target database name |
| TG_TOKEN | Optional, required for Telegram notification | Telegram token (`BOT-ID:BOT-TOKEN`) | | TARGET_DB_USERNAME | Optional, required for database migration | Target database username |
| TG_CHAT_ID | Optional, required for Telegram notification | Telegram Chat ID | | TARGET_DB_PASSWORD | Optional, required for database migration | Target database password |
| TZ | Optional | Time Zone | | TG_TOKEN | Optional, required for Telegram notification | Telegram token (`BOT-ID:BOT-TOKEN`) |
| TG_CHAT_ID | Optional, required for Telegram notification | Telegram Chat ID |
| TZ | Optional | Time Zone |
--- ---
## Run in Scheduled mode ## Run in Scheduled mode

11
go.mod
View File

@@ -5,21 +5,28 @@ go 1.22.5
require github.com/spf13/pflag v1.0.5 require github.com/spf13/pflag v1.0.5
require ( require (
github.com/ProtonMail/gopenpgp/v2 v2.7.5
github.com/aws/aws-sdk-go v1.55.3 github.com/aws/aws-sdk-go v1.55.3
github.com/bramvdbogaerde/go-scp v1.5.0 github.com/bramvdbogaerde/go-scp v1.5.0
github.com/hpcloud/tail v1.0.0 github.com/hpcloud/tail v1.0.0
github.com/jlaffaye/ftp v0.2.0
github.com/robfig/cron/v3 v3.0.1
github.com/spf13/cobra v1.8.0 github.com/spf13/cobra v1.8.0
golang.org/x/crypto v0.18.0 golang.org/x/crypto v0.18.0
gopkg.in/yaml.v3 v3.0.1
) )
require ( require (
github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95 // indirect
github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f // indirect
github.com/cloudflare/circl v1.3.3 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jlaffaye/ftp v0.2.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/robfig/cron/v3 v3.0.1 // indirect github.com/pkg/errors v0.9.1 // indirect
golang.org/x/sys v0.22.0 // indirect golang.org/x/sys v0.22.0 // indirect
golang.org/x/text v0.14.0 // indirect
gopkg.in/fsnotify.v1 v1.4.7 // indirect gopkg.in/fsnotify.v1 v1.4.7 // indirect
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect
) )

58
go.sum
View File

@@ -1,12 +1,22 @@
github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95 h1:KLq8BE0KwCL+mmXnjLWEAOYO+2l2AE4YMmqG1ZpZHBs=
github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0=
github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f h1:tCbYj7/299ekTTXpdwKYF8eBlsYsDVoggDAuAjoK66k=
github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f/go.mod h1:gcr0kNtGBqin9zDW9GOHcVntrwnjrK+qdJ06mWYBybw=
github.com/ProtonMail/gopenpgp/v2 v2.7.5 h1:STOY3vgES59gNgoOt2w0nyHBjKViB/qSg7NjbQWPJkA=
github.com/ProtonMail/gopenpgp/v2 v2.7.5/go.mod h1:IhkNEDaxec6NyzSI0PlxapinnwPVIESk8/76da3Ct3g=
github.com/aws/aws-sdk-go v1.55.3 h1:0B5hOX+mIx7I5XPOrjrHlKSDQV/+ypFZpIHOx5LOk3E= github.com/aws/aws-sdk-go v1.55.3 h1:0B5hOX+mIx7I5XPOrjrHlKSDQV/+ypFZpIHOx5LOk3E=
github.com/aws/aws-sdk-go v1.55.3/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= github.com/aws/aws-sdk-go v1.55.3/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU=
github.com/aws/aws-sdk-go v1.55.5 h1:KKUZBfBoyqy5d3swXyiC7Q76ic40rYcbqH7qjh59kzU= github.com/aws/aws-sdk-go v1.55.5 h1:KKUZBfBoyqy5d3swXyiC7Q76ic40rYcbqH7qjh59kzU=
github.com/aws/aws-sdk-go v1.55.5/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= github.com/aws/aws-sdk-go v1.55.5/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU=
github.com/bramvdbogaerde/go-scp v1.5.0 h1:a9BinAjTfQh273eh7vd3qUgmBC+bx+3TRDtkZWmIpzM= github.com/bramvdbogaerde/go-scp v1.5.0 h1:a9BinAjTfQh273eh7vd3qUgmBC+bx+3TRDtkZWmIpzM=
github.com/bramvdbogaerde/go-scp v1.5.0/go.mod h1:on2aH5AxaFb2G0N5Vsdy6B0Ml7k9HuHSwfo1y0QzAbQ= github.com/bramvdbogaerde/go-scp v1.5.0/go.mod h1:on2aH5AxaFb2G0N5Vsdy6B0Ml7k9HuHSwfo1y0QzAbQ=
github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0=
github.com/cloudflare/circl v1.3.3 h1:fE/Qz0QdIGqeWfnwq0RE0R7MI51s0M2E4Ga9kq5AEMs=
github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA=
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
@@ -21,6 +31,8 @@ github.com/jlaffaye/ftp v0.2.0/go.mod h1:is2Ds5qkhceAPy2xD6RLI6hmp/qysSoymZ+Z2uT
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
@@ -32,16 +44,62 @@ github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3k
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU=
golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8= golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8=
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY= golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI= golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI=
golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -20,18 +20,23 @@ import (
func StartBackup(cmd *cobra.Command) { func StartBackup(cmd *cobra.Command) {
intro() intro()
dbConf = initDbConfig(cmd)
//Initialize backup configs //Initialize backup configs
config := initBackupConfig(cmd) config := initBackupConfig(cmd)
//Load backup configuration file
if config.cronExpression == "" { configFile, err := loadConfigFile()
BackupTask(dbConf, config) if err != nil {
} else { dbConf = initDbConfig(cmd)
if utils.IsValidCronExpression(config.cronExpression) { if config.cronExpression == "" {
scheduledMode(dbConf, config) BackupTask(dbConf, config)
} else { } else {
utils.Fatal("Cron expression is not valid: %s", config.cronExpression) if utils.IsValidCronExpression(config.cronExpression) {
scheduledMode(dbConf, config)
} else {
utils.Fatal("Cron expression is not valid: %s", config.cronExpression)
}
} }
} else {
startMultiBackup(config, configFile)
} }
} }
@@ -42,8 +47,6 @@ func scheduledMode(db *dbConfig, config *BackupConfig) {
utils.Info("Backup cron expression: %s", config.cronExpression) utils.Info("Backup cron expression: %s", config.cronExpression)
utils.Info("Storage type %s ", config.storage) utils.Info("Storage type %s ", config.storage)
//Test database connexion
testDatabaseConnection(db)
//Test backup //Test backup
utils.Info("Testing backup configurations...") utils.Info("Testing backup configurations...")
BackupTask(db, config) BackupTask(db, config)
@@ -66,7 +69,8 @@ func scheduledMode(db *dbConfig, config *BackupConfig) {
select {} select {}
} }
func BackupTask(db *dbConfig, config *BackupConfig) { func BackupTask(db *dbConfig, config *BackupConfig) {
//Generate backup file name utils.Info("Starting backup task...")
//Generate file name
backupFileName := fmt.Sprintf("%s_%s.sql.gz", db.dbName, time.Now().Format("20060102_150405")) backupFileName := fmt.Sprintf("%s_%s.sql.gz", db.dbName, time.Now().Format("20060102_150405"))
if config.disableCompression { if config.disableCompression {
backupFileName = fmt.Sprintf("%s_%s.sql", db.dbName, time.Now().Format("20060102_150405")) backupFileName = fmt.Sprintf("%s_%s.sql", db.dbName, time.Now().Format("20060102_150405"))
@@ -75,37 +79,92 @@ func BackupTask(db *dbConfig, config *BackupConfig) {
switch config.storage { switch config.storage {
case "local": case "local":
localBackup(db, config) localBackup(db, config)
case "s3": case "s3", "S3":
s3Backup(db, config) s3Backup(db, config)
case "ssh", "remote": case "ssh", "SSH", "remote":
sshBackup(db, config) sshBackup(db, config)
case "ftp": case "ftp", "FTP":
ftpBackup(db, config) ftpBackup(db, config)
//utils.Fatal("Not supported storage type: %s", config.storage)
default: default:
localBackup(db, config) localBackup(db, config)
} }
} }
func multiBackupTask(databases []Database, bkConfig *BackupConfig) {
for _, db := range databases {
//Check if path is defined in config file
if db.Path != "" {
bkConfig.remotePath = db.Path
}
BackupTask(getDatabase(db), bkConfig)
}
}
func startMultiBackup(bkConfig *BackupConfig, configFile string) {
utils.Info("Starting multiple backup jobs...")
var conf = &Config{}
conf, err := readConf(configFile)
if err != nil {
utils.Fatal("Error reading config file: %s", err)
}
//Check if cronExpression is defined in config file
if conf.CronExpression != "" {
bkConfig.cronExpression = conf.CronExpression
}
// Check if cronExpression is defined
if bkConfig.cronExpression == "" {
multiBackupTask(conf.Databases, bkConfig)
} else {
// Check if cronExpression is valid
if utils.IsValidCronExpression(bkConfig.cronExpression) {
utils.Info("Running MultiBackup in Scheduled mode")
utils.Info("Backup cron expression: %s", bkConfig.cronExpression)
utils.Info("Storage type %s ", bkConfig.storage)
//Test backup
utils.Info("Testing backup configurations...")
multiBackupTask(conf.Databases, bkConfig)
utils.Info("Testing backup configurations...done")
utils.Info("Creating multi backup job...")
// Create a new cron instance
c := cron.New()
_, err := c.AddFunc(bkConfig.cronExpression, func() {
// Create a channel
multiBackupTask(conf.Databases, bkConfig)
})
if err != nil {
return
}
// Start the cron scheduler
c.Start()
utils.Info("Creating multi backup job...done")
utils.Info("Backup job started")
defer c.Stop()
select {}
} else {
utils.Fatal("Cron expression is not valid: %s", bkConfig.cronExpression)
}
}
}
// BackupDatabase backup database // BackupDatabase backup database
func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool) { func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool) {
storagePath = os.Getenv("STORAGE_PATH") storagePath = os.Getenv("STORAGE_PATH")
err := utils.CheckEnvVars(dbHVars)
if err != nil {
utils.Error("Please make sure all required environment variables for database are set")
utils.Fatal("Error checking environment variables: %s", err)
}
utils.Info("Starting database backup...") utils.Info("Starting database backup...")
err = os.Setenv("MYSQL_PWD", db.dbPassword)
err := os.Setenv("MYSQL_PWD", db.dbPassword)
if err != nil { if err != nil {
return return
} }
testDatabaseConnection(db) testDatabaseConnection(db)
// Backup Database database // Backup Database database
utils.Info("Backing up database...") utils.Info("Backing up database...")
// Verify is compression is disabled
if disableCompression { if disableCompression {
// Execute mysqldump // Execute mysqldump
cmd := exec.Command("mysqldump", cmd := exec.Command("mysqldump",
@@ -120,7 +179,7 @@ func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool
} }
// save output // save output
file, err := os.Create(fmt.Sprintf("%s/%s", tmpPath, backupFileName)) file, err := os.Create(filepath.Join(tmpPath, backupFileName))
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
@@ -141,7 +200,7 @@ func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool
} }
gzipCmd := exec.Command("gzip") gzipCmd := exec.Command("gzip")
gzipCmd.Stdin = stdout gzipCmd.Stdin = stdout
gzipCmd.Stdout, err = os.Create(fmt.Sprintf("%s/%s", tmpPath, backupFileName)) gzipCmd.Stdout, err = os.Create(filepath.Join(tmpPath, backupFileName))
gzipCmd.Start() gzipCmd.Start()
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
@@ -162,9 +221,10 @@ func localBackup(db *dbConfig, config *BackupConfig) {
BackupDatabase(db, config.backupFileName, disableCompression) BackupDatabase(db, config.backupFileName, disableCompression)
finalFileName := config.backupFileName finalFileName := config.backupFileName
if config.encryption { if config.encryption {
encryptBackup(config.backupFileName, config.passphrase) encryptBackup(config)
finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, gpgExtension) finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, gpgExtension)
} }
utils.Info("Backup name is %s", finalFileName) utils.Info("Backup name is %s", finalFileName)
moveToBackup(finalFileName, storagePath) moveToBackup(finalFileName, storagePath)
//Send notification //Send notification
@@ -175,6 +235,7 @@ func localBackup(db *dbConfig, config *BackupConfig) {
} }
//Delete temp //Delete temp
deleteTemp() deleteTemp()
utils.Info("Backup completed successfully")
} }
func s3Backup(db *dbConfig, config *BackupConfig) { func s3Backup(db *dbConfig, config *BackupConfig) {
@@ -185,14 +246,15 @@ func s3Backup(db *dbConfig, config *BackupConfig) {
BackupDatabase(db, config.backupFileName, disableCompression) BackupDatabase(db, config.backupFileName, disableCompression)
finalFileName := config.backupFileName finalFileName := config.backupFileName
if config.encryption { if config.encryption {
encryptBackup(config.backupFileName, config.passphrase) encryptBackup(config)
finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg") finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg")
} }
utils.Info("Uploading backup archive to remote storage S3 ... ") utils.Info("Uploading backup archive to remote storage S3 ... ")
utils.Info("Backup name is %s", finalFileName) utils.Info("Backup name is %s", finalFileName)
err := UploadFileToS3(tmpPath, finalFileName, bucket, s3Path) err := UploadFileToS3(tmpPath, finalFileName, bucket, s3Path)
if err != nil { if err != nil {
utils.Fatal("Error uploading file to S3: %s ", err) utils.Fatal("Error uploading backup archive to S3: %s ", err)
} }
@@ -214,16 +276,16 @@ func s3Backup(db *dbConfig, config *BackupConfig) {
utils.NotifySuccess(finalFileName) utils.NotifySuccess(finalFileName)
//Delete temp //Delete temp
deleteTemp() deleteTemp()
} utils.Info("Backup completed successfully")
// sshBackup backup database to SSH remote server }
func sshBackup(db *dbConfig, config *BackupConfig) { func sshBackup(db *dbConfig, config *BackupConfig) {
utils.Info("Backup database to Remote server") utils.Info("Backup database to Remote server")
//Backup database //Backup database
BackupDatabase(db, config.backupFileName, disableCompression) BackupDatabase(db, config.backupFileName, disableCompression)
finalFileName := config.backupFileName finalFileName := config.backupFileName
if config.encryption { if config.encryption {
encryptBackup(config.backupFileName, config.passphrase) encryptBackup(config)
finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg") finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg")
} }
utils.Info("Uploading backup archive to remote storage ... ") utils.Info("Uploading backup archive to remote storage ... ")
@@ -237,7 +299,7 @@ func sshBackup(db *dbConfig, config *BackupConfig) {
//Delete backup file from tmp folder //Delete backup file from tmp folder
err = utils.DeleteFile(filepath.Join(tmpPath, finalFileName)) err = utils.DeleteFile(filepath.Join(tmpPath, finalFileName))
if err != nil { if err != nil {
fmt.Println("Error deleting file: ", err) utils.Error("Error deleting file: %v", err)
} }
if config.prune { if config.prune {
@@ -251,6 +313,8 @@ func sshBackup(db *dbConfig, config *BackupConfig) {
utils.NotifySuccess(finalFileName) utils.NotifySuccess(finalFileName)
//Delete temp //Delete temp
deleteTemp() deleteTemp()
utils.Info("Backup completed successfully")
} }
func ftpBackup(db *dbConfig, config *BackupConfig) { func ftpBackup(db *dbConfig, config *BackupConfig) {
utils.Info("Backup database to the remote FTP server") utils.Info("Backup database to the remote FTP server")
@@ -258,7 +322,7 @@ func ftpBackup(db *dbConfig, config *BackupConfig) {
BackupDatabase(db, config.backupFileName, disableCompression) BackupDatabase(db, config.backupFileName, disableCompression)
finalFileName := config.backupFileName finalFileName := config.backupFileName
if config.encryption { if config.encryption {
encryptBackup(config.backupFileName, config.passphrase) encryptBackup(config)
finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg") finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg")
} }
utils.Info("Uploading backup archive to the remote FTP server ... ") utils.Info("Uploading backup archive to the remote FTP server ... ")
@@ -286,13 +350,22 @@ func ftpBackup(db *dbConfig, config *BackupConfig) {
utils.NotifySuccess(finalFileName) utils.NotifySuccess(finalFileName)
//Delete temp //Delete temp
deleteTemp() deleteTemp()
utils.Info("Backup completed successfully")
} }
// encryptBackup encrypt backup func encryptBackup(config *BackupConfig) {
func encryptBackup(backupFileName, passphrase string) { if config.usingKey {
err := Encrypt(filepath.Join(tmpPath, backupFileName), passphrase) err := encryptWithGPGPublicKey(filepath.Join(tmpPath, config.backupFileName), config.publicKey)
if err != nil { if err != nil {
utils.Fatal("Error during encrypting backup %s", err) utils.Fatal("error during encrypting backup %v", err)
}
} else if config.passphrase != "" {
err := encryptWithGPG(filepath.Join(tmpPath, config.backupFileName), config.passphrase)
if err != nil {
utils.Fatal("error during encrypting backup %v", err)
}
} }
} }

View File

@@ -14,7 +14,17 @@ import (
"strconv" "strconv"
) )
type Database struct {
Host string `yaml:"host"`
Port string `yaml:"port"`
Name string `yaml:"name"`
User string `yaml:"user"`
Password string `yaml:"password"`
Path string `yaml:"path"`
}
type Config struct { type Config struct {
Databases []Database `yaml:"databases"`
CronExpression string `yaml:"cronExpression"`
} }
type dbConfig struct { type dbConfig struct {
@@ -40,9 +50,11 @@ type BackupConfig struct {
backupRetention int backupRetention int
disableCompression bool disableCompression bool
prune bool prune bool
encryption bool
remotePath string remotePath string
encryption bool
usingKey bool
passphrase string passphrase string
publicKey string
storage string storage string
cronExpression string cronExpression string
} }
@@ -90,6 +102,16 @@ func initDbConfig(cmd *cobra.Command) *dbConfig {
return &dConf return &dConf
} }
func getDatabase(database Database) *dbConfig {
return &dbConfig{
dbHost: database.Host,
dbPort: database.Port,
dbName: database.Name,
dbUserName: database.User,
dbPassword: database.Password,
}
}
// loadSSHConfig loads the SSH configuration from environment variables // loadSSHConfig loads the SSH configuration from environment variables
func loadSSHConfig() (*SSHConfig, error) { func loadSSHConfig() (*SSHConfig, error) {
utils.GetEnvVariable("SSH_HOST", "SSH_HOST_NAME") utils.GetEnvVariable("SSH_HOST", "SSH_HOST_NAME")
@@ -163,10 +185,14 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig {
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH") _ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION") cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION")
if passphrase != "" { publicKeyFile, err := checkPubKeyFile(os.Getenv("GPG_PUBLIC_KEY"))
if err == nil {
encryption = true encryption = true
usingKey = true
} else if passphrase != "" {
encryption = true
usingKey = false
} }
//Initialize backup configs //Initialize backup configs
config := BackupConfig{} config := BackupConfig{}
config.backupRetention = backupRetention config.backupRetention = backupRetention
@@ -176,17 +202,21 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig {
config.encryption = encryption config.encryption = encryption
config.remotePath = remotePath config.remotePath = remotePath
config.passphrase = passphrase config.passphrase = passphrase
config.publicKey = publicKeyFile
config.usingKey = usingKey
config.cronExpression = cronExpression config.cronExpression = cronExpression
return &config return &config
} }
type RestoreConfig struct { type RestoreConfig struct {
s3Path string s3Path string
remotePath string remotePath string
storage string storage string
file string file string
bucket string bucket string
gpqPassphrase string usingKey bool
passphrase string
privateKey string
} }
func initRestoreConfig(cmd *cobra.Command) *RestoreConfig { func initRestoreConfig(cmd *cobra.Command) *RestoreConfig {
@@ -199,7 +229,14 @@ func initRestoreConfig(cmd *cobra.Command) *RestoreConfig {
storage = utils.GetEnv(cmd, "storage", "STORAGE") storage = utils.GetEnv(cmd, "storage", "STORAGE")
file = utils.GetEnv(cmd, "file", "FILE_NAME") file = utils.GetEnv(cmd, "file", "FILE_NAME")
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
gpqPassphrase := os.Getenv("GPG_PASSPHRASE") passphrase := os.Getenv("GPG_PASSPHRASE")
privateKeyFile, err := checkPrKeyFile(os.Getenv("GPG_PRIVATE_KEY"))
if err == nil {
usingKey = true
} else if passphrase != "" {
usingKey = false
}
//Initialize restore configs //Initialize restore configs
rConfig := RestoreConfig{} rConfig := RestoreConfig{}
rConfig.s3Path = s3Path rConfig.s3Path = s3Path
@@ -208,7 +245,9 @@ func initRestoreConfig(cmd *cobra.Command) *RestoreConfig {
rConfig.bucket = bucket rConfig.bucket = bucket
rConfig.file = file rConfig.file = file
rConfig.storage = storage rConfig.storage = storage
rConfig.gpqPassphrase = gpqPassphrase rConfig.passphrase = passphrase
rConfig.usingKey = usingKey
rConfig.privateKey = privateKeyFile
return &rConfig return &rConfig
} }
func initTargetDbConfig() *targetDbConfig { func initTargetDbConfig() *targetDbConfig {
@@ -226,3 +265,10 @@ func initTargetDbConfig() *targetDbConfig {
} }
return &tdbConfig return &tdbConfig
} }
func loadConfigFile() (string, error) {
backupConfigFile, err := checkConfigFile(os.Getenv("BACKUP_CONFIG_FILE"))
if err == nil {
return backupConfigFile, nil
}
return "", fmt.Errorf("backup config file not found")
}

View File

@@ -7,54 +7,173 @@
package pkg package pkg
import ( import (
"errors"
"fmt"
"github.com/ProtonMail/gopenpgp/v2/crypto"
"github.com/jkaninda/mysql-bkup/utils" "github.com/jkaninda/mysql-bkup/utils"
"os" "os"
"os/exec"
"strings" "strings"
) )
func Decrypt(inputFile string, passphrase string) error { // decryptWithGPG decrypts backup file using a passphrase
utils.Info("Decrypting backup file: " + inputFile + " ...") func decryptWithGPG(inputFile string, passphrase string) error {
//Create gpg home dir utils.Info("Decrypting backup using passphrase...")
err := utils.MakeDirAll(gpgHome) // Read the encrypted file
encFileContent, err := os.ReadFile(inputFile)
if err != nil { if err != nil {
return err return errors.New(fmt.Sprintf("Error reading encrypted file: %s", err))
} }
utils.SetEnv("GNUPGHOME", gpgHome) // Define the passphrase used to encrypt the file
cmd := exec.Command("gpg", "--batch", "--passphrase", passphrase, "--output", RemoveLastExtension(inputFile), "--decrypt", inputFile) _passphrase := []byte(passphrase)
cmd.Stdout = os.Stdout // Create a PGP message object from the encrypted file content
cmd.Stderr = os.Stderr encryptedMessage := crypto.NewPGPMessage(encFileContent)
// Decrypt the message using the passphrase
err = cmd.Run() plainMessage, err := crypto.DecryptMessageWithPassword(encryptedMessage, _passphrase)
if err != nil { if err != nil {
return err return errors.New(fmt.Sprintf("Error decrypting file: %s", err))
} }
// Save the decrypted file (restore it)
err = os.WriteFile(RemoveLastExtension(inputFile), plainMessage.GetBinary(), 0644)
if err != nil {
return errors.New(fmt.Sprintf("Error saving decrypted file: %s", err))
}
utils.Info("Decrypting backup using passphrase...done")
utils.Info("Backup file decrypted successful!") utils.Info("Backup file decrypted successful!")
return nil return nil
} }
func Encrypt(inputFile string, passphrase string) error { // encryptWithGPG encrypts backup using a passphrase
utils.Info("Encrypting backup...") func encryptWithGPG(inputFile string, passphrase string) error {
//Create gpg home dir utils.Info("Encrypting backup using passphrase...")
err := utils.MakeDirAll(gpgHome) // Read the file to be encrypted
plainFileContent, err := os.ReadFile(inputFile)
if err != nil { if err != nil {
return err return errors.New(fmt.Sprintf("Error reading file: %s", err))
} }
utils.SetEnv("GNUPGHOME", gpgHome) // Define the passphrase to encrypt the file
cmd := exec.Command("gpg", "--batch", "--passphrase", passphrase, "--symmetric", "--cipher-algo", algorithm, inputFile) _passphrase := []byte(passphrase)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
err = cmd.Run() // Create a message object from the file content
message := crypto.NewPlainMessage(plainFileContent)
// Encrypt the message using the passphrase
encryptedMessage, err := crypto.EncryptMessageWithPassword(message, _passphrase)
if err != nil { if err != nil {
return err return errors.New(fmt.Sprintf("Error encrypting backup file: %s", err))
} }
// Save the encrypted .tar file
err = os.WriteFile(fmt.Sprintf("%s.%s", inputFile, gpgExtension), encryptedMessage.GetBinary(), 0644)
if err != nil {
return errors.New(fmt.Sprintf("Error saving encrypted filee: %s", err))
}
utils.Info("Encrypting backup using passphrase...done")
utils.Info("Backup file encrypted successful!") utils.Info("Backup file encrypted successful!")
return nil return nil
} }
// encryptWithGPGPublicKey encrypts backup using a public key
func encryptWithGPGPublicKey(inputFile string, publicKey string) error {
utils.Info("Encrypting backup using public key...")
// Read the public key
pubKeyBytes, err := os.ReadFile(publicKey)
if err != nil {
return errors.New(fmt.Sprintf("Error reading public key: %s", err))
}
// Create a new keyring with the public key
publicKeyObj, err := crypto.NewKeyFromArmored(string(pubKeyBytes))
if err != nil {
return errors.New(fmt.Sprintf("Error parsing public key: %s", err))
}
keyRing, err := crypto.NewKeyRing(publicKeyObj)
if err != nil {
return errors.New(fmt.Sprintf("Error creating key ring: %v", err))
}
// Read the file to encryptWithGPGPublicKey
fileContent, err := os.ReadFile(inputFile)
if err != nil {
return errors.New(fmt.Sprintf("Error reading file: %v", err))
}
// encryptWithGPG the file
message := crypto.NewPlainMessage(fileContent)
encMessage, err := keyRing.Encrypt(message, nil)
if err != nil {
return errors.New(fmt.Sprintf("Error encrypting file: %v", err))
}
// Save the encrypted file
err = os.WriteFile(fmt.Sprintf("%s.%s", inputFile, gpgExtension), encMessage.GetBinary(), 0644)
if err != nil {
return errors.New(fmt.Sprintf("Error saving encrypted file: %v", err))
}
utils.Info("Encrypting backup using public key...done")
utils.Info("Backup file encrypted successful!")
return nil
}
// decryptWithGPGPrivateKey decrypts backup file using a private key and passphrase.
// privateKey GPG private key
// passphrase GPG passphrase
func decryptWithGPGPrivateKey(inputFile, privateKey, passphrase string) error {
utils.Info("Encrypting backup using private key...")
// Read the private key
priKeyBytes, err := os.ReadFile(privateKey)
if err != nil {
return errors.New(fmt.Sprintf("Error reading private key: %s", err))
}
// Read the password for the private key (if its password-protected)
password := []byte(passphrase)
// Create a key object from the armored private key
privateKeyObj, err := crypto.NewKeyFromArmored(string(priKeyBytes))
if err != nil {
return errors.New(fmt.Sprintf("Error parsing private key: %s", err))
}
// Unlock the private key with the password
if passphrase != "" {
// Unlock the private key with the password
_, err = privateKeyObj.Unlock(password)
if err != nil {
return errors.New(fmt.Sprintf("Error unlocking private key: %s", err))
}
}
// Create a new keyring with the private key
keyRing, err := crypto.NewKeyRing(privateKeyObj)
if err != nil {
return errors.New(fmt.Sprintf("Error creating key ring: %v", err))
}
// Read the encrypted file
encFileContent, err := os.ReadFile(inputFile)
if err != nil {
return errors.New(fmt.Sprintf("Error reading encrypted file: %s", err))
}
// decryptWithGPG the file
encryptedMessage := crypto.NewPGPMessage(encFileContent)
message, err := keyRing.Decrypt(encryptedMessage, nil, 0)
if err != nil {
return errors.New(fmt.Sprintf("Error decrypting file: %s", err))
}
// Save the decrypted file
err = os.WriteFile(RemoveLastExtension(inputFile), message.GetBinary(), 0644)
if err != nil {
return errors.New(fmt.Sprintf("Error saving decrypted file: %s", err))
}
utils.Info("Encrypting backup using public key...done")
fmt.Println("File successfully decrypted!")
return nil
}
func RemoveLastExtension(filename string) string { func RemoveLastExtension(filename string) string {
if idx := strings.LastIndex(filename, "."); idx != -1 { if idx := strings.LastIndex(filename, "."); idx != -1 {
return filename[:idx] return filename[:idx]

View File

@@ -10,6 +10,7 @@ import (
"bytes" "bytes"
"fmt" "fmt"
"github.com/jkaninda/mysql-bkup/utils" "github.com/jkaninda/mysql-bkup/utils"
"gopkg.in/yaml.v3"
"os" "os"
"os/exec" "os/exec"
"path/filepath" "path/filepath"
@@ -129,3 +130,84 @@ func intro() {
utils.Info("Starting MySQL Backup...") utils.Info("Starting MySQL Backup...")
utils.Info("Copyright (c) 2024 Jonas Kaninda ") utils.Info("Copyright (c) 2024 Jonas Kaninda ")
} }
func checkPubKeyFile(pubKey string) (string, error) {
// Define possible key file names
keyFiles := []string{filepath.Join(gpgHome, "public_key.asc"), filepath.Join(gpgHome, "public_key.gpg"), pubKey}
// Loop through key file names and check if they exist
for _, keyFile := range keyFiles {
if _, err := os.Stat(keyFile); err == nil {
// File exists
return keyFile, nil
} else if os.IsNotExist(err) {
// File does not exist, continue to the next one
continue
} else {
// An unexpected error occurred
return "", err
}
}
// Return an error if neither file exists
return "", fmt.Errorf("no public key file found")
}
func checkPrKeyFile(prKey string) (string, error) {
// Define possible key file names
keyFiles := []string{filepath.Join(gpgHome, "private_key.asc"), filepath.Join(gpgHome, "private_key.gpg"), prKey}
// Loop through key file names and check if they exist
for _, keyFile := range keyFiles {
if _, err := os.Stat(keyFile); err == nil {
// File exists
return keyFile, nil
} else if os.IsNotExist(err) {
// File does not exist, continue to the next one
continue
} else {
// An unexpected error occurred
return "", err
}
}
// Return an error if neither file exists
return "", fmt.Errorf("no public key file found")
}
func readConf(configFile string) (*Config, error) {
//configFile := filepath.Join("./", filename)
if utils.FileExists(configFile) {
buf, err := os.ReadFile(configFile)
if err != nil {
return nil, err
}
c := &Config{}
err = yaml.Unmarshal(buf, c)
if err != nil {
return nil, fmt.Errorf("in file %q: %w", configFile, err)
}
return c, err
}
return nil, fmt.Errorf("config file %q not found", configFile)
}
func checkConfigFile(filePath string) (string, error) {
// Define possible config file names
configFiles := []string{filepath.Join(workingDir, "config.yaml"), filepath.Join(workingDir, "config.yml"), filePath}
// Loop through config file names and check if they exist
for _, configFile := range configFiles {
if _, err := os.Stat(configFile); err == nil {
// File exists
return configFile, nil
} else if os.IsNotExist(err) {
// File does not exist, continue to the next one
continue
} else {
// An unexpected error occurred
return "", err
}
}
// Return an error if neither file exists
return "", fmt.Errorf("no config file found")
}

View File

@@ -30,11 +30,13 @@ func StartMigration(cmd *cobra.Command) {
//Generate file name //Generate file name
backupFileName := fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20060102_150405")) backupFileName := fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20060102_150405"))
conf := &RestoreConfig{}
conf.file = backupFileName
//Backup source Database //Backup source Database
BackupDatabase(dbConf, backupFileName, true) BackupDatabase(dbConf, backupFileName, true)
//Restore source database into target database //Restore source database into target database
utils.Info("Restoring [%s] database into [%s] database...", dbConf.dbName, targetDbConf.targetDbName) utils.Info("Restoring [%s] database into [%s] database...", dbConf.dbName, targetDbConf.targetDbName)
RestoreDatabase(&newDbConfig, backupFileName) RestoreDatabase(&newDbConfig, conf)
utils.Info("[%s] database has been restored into [%s] database", dbConf.dbName, targetDbConf.targetDbName) utils.Info("[%s] database has been restored into [%s] database", dbConf.dbName, targetDbConf.targetDbName)
utils.Info("Database migration completed.") utils.Info("Database migration completed.")
} }

View File

@@ -7,7 +7,6 @@
package pkg package pkg
import ( import (
"fmt"
"github.com/jkaninda/mysql-bkup/utils" "github.com/jkaninda/mysql-bkup/utils"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"os" "os"
@@ -21,90 +20,91 @@ func StartRestore(cmd *cobra.Command) {
restoreConf := initRestoreConfig(cmd) restoreConf := initRestoreConfig(cmd)
switch restoreConf.storage { switch restoreConf.storage {
case "s3":
restoreFromS3(dbConf, restoreConf.file, restoreConf.bucket, restoreConf.s3Path)
case "local": case "local":
utils.Info("Restore database from local") utils.Info("Restore database from local")
copyToTmp(storagePath, restoreConf.file) copyToTmp(storagePath, restoreConf.file)
RestoreDatabase(dbConf, restoreConf.file) RestoreDatabase(dbConf, restoreConf)
case "ssh": case "s3", "S3":
restoreFromRemote(dbConf, restoreConf.file, restoreConf.remotePath) restoreFromS3(dbConf, restoreConf)
case "ftp": case "ssh", "SSH", "remote":
restoreFromFTP(dbConf, restoreConf.file, restoreConf.remotePath) restoreFromRemote(dbConf, restoreConf)
case "ftp", "FTP":
restoreFromFTP(dbConf, restoreConf)
default: default:
utils.Info("Restore database from local") utils.Info("Restore database from local")
copyToTmp(storagePath, restoreConf.file) copyToTmp(storagePath, restoreConf.file)
RestoreDatabase(dbConf, restoreConf.file) RestoreDatabase(dbConf, restoreConf)
} }
} }
func restoreFromS3(db *dbConfig, file, bucket, s3Path string) { func restoreFromS3(db *dbConfig, conf *RestoreConfig) {
utils.Info("Restore database from s3") utils.Info("Restore database from s3")
err := DownloadFile(tmpPath, file, bucket, s3Path) err := DownloadFile(tmpPath, conf.file, conf.bucket, conf.s3Path)
if err != nil { if err != nil {
utils.Fatal("Error download file from s3 %s %v", file, err) utils.Fatal("Error download file from s3 %s %v ", conf.file, err)
} }
RestoreDatabase(db, file) RestoreDatabase(db, conf)
} }
func restoreFromRemote(db *dbConfig, file, remotePath string) { func restoreFromRemote(db *dbConfig, conf *RestoreConfig) {
utils.Info("Restore database from remote server") utils.Info("Restore database from remote server")
err := CopyFromRemote(file, remotePath) err := CopyFromRemote(conf.file, conf.remotePath)
if err != nil { if err != nil {
utils.Fatal("Error download file from remote server: %s %v ", filepath.Join(remotePath, file), err) utils.Fatal("Error download file from remote server: %s %v", filepath.Join(conf.remotePath, conf.file), err)
} }
RestoreDatabase(db, file) RestoreDatabase(db, conf)
} }
func restoreFromFTP(db *dbConfig, file, remotePath string) { func restoreFromFTP(db *dbConfig, conf *RestoreConfig) {
utils.Info("Restore database from FTP server") utils.Info("Restore database from FTP server")
err := CopyFromFTP(file, remotePath) err := CopyFromFTP(conf.file, conf.remotePath)
if err != nil { if err != nil {
utils.Fatal("Error download file from FTP server: %s %v", filepath.Join(remotePath, file), err) utils.Fatal("Error download file from FTP server: %s %v", filepath.Join(conf.remotePath, conf.file), err)
} }
RestoreDatabase(db, file) RestoreDatabase(db, conf)
} }
// RestoreDatabase restore database // RestoreDatabase restore database
func RestoreDatabase(db *dbConfig, file string) { func RestoreDatabase(db *dbConfig, conf *RestoreConfig) {
gpgPassphrase := os.Getenv("GPG_PASSPHRASE") if conf.file == "" {
if file == "" {
utils.Fatal("Error, file required") utils.Fatal("Error, file required")
} }
extension := filepath.Ext(filepath.Join(tmpPath, conf.file))
err := utils.CheckEnvVars(dbHVars)
if err != nil {
utils.Error("Please make sure all required environment variables for database are set")
utils.Fatal("Error checking environment variables: %s", err)
}
extension := filepath.Ext(fmt.Sprintf("%s/%s", tmpPath, file))
if extension == ".gpg" { if extension == ".gpg" {
if gpgPassphrase == "" {
utils.Fatal("Error: GPG passphrase is required, your file seems to be a GPG file.\nYou need to provide GPG keys. GPG_PASSPHRASE environment variable is required.")
} else { if conf.usingKey {
//Decrypt file utils.Warn("Backup decryption using a private key is not fully supported")
err := Decrypt(filepath.Join(tmpPath, file), gpgPassphrase) err := decryptWithGPGPrivateKey(filepath.Join(tmpPath, conf.file), conf.privateKey, conf.passphrase)
if err != nil { if err != nil {
utils.Fatal("Error decrypting file %s %v", file, err) utils.Fatal("error during decrypting backup %v", err)
}
} else {
if conf.passphrase == "" {
utils.Error("Error, passphrase or private key required")
utils.Fatal("Your file seems to be a GPG file.\nYou need to provide GPG keys. GPG_PASSPHRASE or GPG_PRIVATE_KEY environment variable is required.")
} else {
//decryptWithGPG file
err := decryptWithGPG(filepath.Join(tmpPath, conf.file), conf.passphrase)
if err != nil {
utils.Fatal("Error decrypting file %s %v", file, err)
}
//Update file name
conf.file = RemoveLastExtension(file)
} }
//Update file name
file = RemoveLastExtension(file)
} }
} }
if utils.FileExists(fmt.Sprintf("%s/%s", tmpPath, file)) { if utils.FileExists(filepath.Join(tmpPath, conf.file)) {
err = os.Setenv("MYSQL_PWD", db.dbPassword) err := os.Setenv("MYSQL_PWD", db.dbPassword)
if err != nil { if err != nil {
return return
} }
testDatabaseConnection(db) testDatabaseConnection(db)
utils.Info("Restoring database...") utils.Info("Restoring database...")
extension := filepath.Ext(fmt.Sprintf("%s/%s", tmpPath, file)) extension := filepath.Ext(filepath.Join(tmpPath, conf.file))
// Restore from compressed file / .sql.gz // Restore from compressed file / .sql.gz
if extension == ".gz" { if extension == ".gz" {
str := "zcat " + filepath.Join(tmpPath, file) + " | mysql -h " + db.dbHost + " -P " + db.dbPort + " -u " + db.dbUserName + " " + db.dbName str := "zcat " + filepath.Join(tmpPath, conf.file) + " | mysql -h " + db.dbHost + " -P " + db.dbPort + " -u " + db.dbUserName + " " + db.dbName
_, err := exec.Command("sh", "-c", str).Output() _, err := exec.Command("sh", "-c", str).Output()
if err != nil { if err != nil {
utils.Fatal("Error, in restoring the database %v", err) utils.Fatal("Error, in restoring the database %v", err)
@@ -116,7 +116,7 @@ func RestoreDatabase(db *dbConfig, file string) {
} else if extension == ".sql" { } else if extension == ".sql" {
//Restore from sql file //Restore from sql file
str := "cat " + filepath.Join(tmpPath, file) + " | mysql -h " + db.dbHost + " -P " + db.dbPort + " -u " + db.dbUserName + " " + db.dbName str := "cat " + filepath.Join(tmpPath, conf.file) + " | mysql -h " + db.dbHost + " -P " + db.dbPort + " -u " + db.dbUserName + " " + db.dbName
_, err := exec.Command("sh", "-c", str).Output() _, err := exec.Command("sh", "-c", str).Output()
if err != nil { if err != nil {
utils.Fatal("Error in restoring the database %v", err) utils.Fatal("Error in restoring the database %v", err)
@@ -130,6 +130,6 @@ func RestoreDatabase(db *dbConfig, file string) {
} }
} else { } else {
utils.Fatal("File not found in %s", filepath.Join(tmpPath, file)) utils.Fatal("File not found in %s", filepath.Join(tmpPath, conf.file))
} }
} }

View File

@@ -11,6 +11,7 @@ const tmpPath = "/tmp/backup"
const algorithm = "aes256" const algorithm = "aes256"
const gpgHome = "/config/gnupg" const gpgHome = "/config/gnupg"
const gpgExtension = "gpg" const gpgExtension = "gpg"
const workingDir = "/config"
var ( var (
storage = "local" storage = "local"
@@ -18,6 +19,7 @@ var (
storagePath = "/backup" storagePath = "/backup"
disableCompression = false disableCompression = false
encryption = false encryption = false
usingKey = false
) )
// dbHVars Required environment variables for database // dbHVars Required environment variables for database

View File

@@ -12,9 +12,8 @@ import (
"time" "time"
) )
var currentTime = time.Now().Format("2006/01/02 15:04:05")
func Info(msg string, args ...any) { func Info(msg string, args ...any) {
var currentTime = time.Now().Format("2006/01/02 15:04:05")
formattedMessage := fmt.Sprintf(msg, args...) formattedMessage := fmt.Sprintf(msg, args...)
if len(args) == 0 { if len(args) == 0 {
fmt.Printf("%s INFO: %s\n", currentTime, msg) fmt.Printf("%s INFO: %s\n", currentTime, msg)
@@ -25,6 +24,7 @@ func Info(msg string, args ...any) {
// Warn warning message // Warn warning message
func Warn(msg string, args ...any) { func Warn(msg string, args ...any) {
var currentTime = time.Now().Format("2006/01/02 15:04:05")
formattedMessage := fmt.Sprintf(msg, args...) formattedMessage := fmt.Sprintf(msg, args...)
if len(args) == 0 { if len(args) == 0 {
fmt.Printf("%s WARN: %s\n", currentTime, msg) fmt.Printf("%s WARN: %s\n", currentTime, msg)
@@ -33,6 +33,7 @@ func Warn(msg string, args ...any) {
} }
} }
func Error(msg string, args ...any) { func Error(msg string, args ...any) {
var currentTime = time.Now().Format("2006/01/02 15:04:05")
formattedMessage := fmt.Sprintf(msg, args...) formattedMessage := fmt.Sprintf(msg, args...)
if len(args) == 0 { if len(args) == 0 {
fmt.Printf("%s ERROR: %s\n", currentTime, msg) fmt.Printf("%s ERROR: %s\n", currentTime, msg)
@@ -41,6 +42,7 @@ func Error(msg string, args ...any) {
} }
} }
func Done(msg string, args ...any) { func Done(msg string, args ...any) {
var currentTime = time.Now().Format("2006/01/02 15:04:05")
formattedMessage := fmt.Sprintf(msg, args...) formattedMessage := fmt.Sprintf(msg, args...)
if len(args) == 0 { if len(args) == 0 {
fmt.Printf("%s INFO: %s\n", currentTime, msg) fmt.Printf("%s INFO: %s\n", currentTime, msg)
@@ -51,6 +53,7 @@ func Done(msg string, args ...any) {
// Fatal logs an error message and exits the program // Fatal logs an error message and exits the program
func Fatal(msg string, args ...any) { func Fatal(msg string, args ...any) {
var currentTime = time.Now().Format("2006/01/02 15:04:05")
// Fatal logs an error message and exits the program. // Fatal logs an error message and exits the program.
formattedMessage := fmt.Sprintf(msg, args...) formattedMessage := fmt.Sprintf(msg, args...)
if len(args) == 0 { if len(args) == 0 {
@@ -63,5 +66,4 @@ func Fatal(msg string, args ...any) {
} }
os.Exit(1) os.Exit(1)
os.Kill.Signal()
} }