mirror of
https://github.com/jkaninda/mysql-bkup.git
synced 2025-12-06 13:39:41 +01:00
Compare commits
31 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 3dce2017f8 | |||
|
|
ed2f1b8d9c | ||
| b64875df21 | |||
|
|
fc90507b3f | ||
| df0efd24d3 | |||
|
|
e5dd7e76ce | ||
| 12fbb67a09 | |||
|
|
df490af7b6 | ||
| d930c3e2f6 | |||
|
|
e4258cb12e | ||
| 4c44166921 | |||
| 554df819ab | |||
|
|
ca5633882e | ||
| c5cca82841 | |||
|
|
bbd5422089 | ||
|
|
d72156f890 | ||
|
|
909a50dbe7 | ||
|
|
94ceb71da2 | ||
|
|
fe05fe5110 | ||
| dabba2050a | |||
|
|
47e1ac407b | ||
| 28f6ed3a82 | |||
|
|
504926c7cd | ||
| 737f473f92 | |||
|
|
300d2a8205 | ||
|
|
a4ad0502cf | ||
| f344867edf | |||
|
|
d774584f64 | ||
| 96927cd57e | |||
|
|
ceacfa1d9d | ||
|
|
9380a18b45 |
6
.github/workflows/build.yml
vendored
6
.github/workflows/build.yml
vendored
@@ -25,8 +25,10 @@ jobs:
|
|||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
push: true
|
push: true
|
||||||
file: "./docker/Dockerfile"
|
file: "./Dockerfile"
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||||
|
build-args: |
|
||||||
|
appVersion=develop-${{ github.sha }}
|
||||||
tags: |
|
tags: |
|
||||||
"${{env.BUILDKIT_IMAGE}}:develop-${{ github.sha }}"
|
"${{vars.BUILDKIT_IMAGE}}:develop-${{ github.sha }}"
|
||||||
|
|
||||||
|
|||||||
12
.github/workflows/release.yml
vendored
12
.github/workflows/release.yml
vendored
@@ -39,11 +39,13 @@ jobs:
|
|||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
push: true
|
push: true
|
||||||
file: "./docker/Dockerfile"
|
file: "./Dockerfile"
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||||
|
build-args: |
|
||||||
|
appVersion=${{ env.TAG_NAME }}
|
||||||
tags: |
|
tags: |
|
||||||
"${{env.BUILDKIT_IMAGE}}:${{ env.TAG_NAME }}"
|
"${{vars.BUILDKIT_IMAGE}}:${{ env.TAG_NAME }}"
|
||||||
"${{env.BUILDKIT_IMAGE}}:latest"
|
"${{vars.BUILDKIT_IMAGE}}:latest"
|
||||||
"ghcr.io/${{env.BUILDKIT_IMAGE}}:${{ env.TAG_NAME }}"
|
"ghcr.io/${{vars.BUILDKIT_IMAGE}}:${{ env.TAG_NAME }}"
|
||||||
"ghcr.io/${{env.BUILDKIT_IMAGE}}:latest"
|
"ghcr.io/${{vars.BUILDKIT_IMAGE}}:latest"
|
||||||
|
|
||||||
|
|||||||
@@ -23,14 +23,15 @@ ENV AWS_SECRET_KEY=""
|
|||||||
ENV AWS_S3_PATH=""
|
ENV AWS_S3_PATH=""
|
||||||
ENV AWS_REGION="us-west-2"
|
ENV AWS_REGION="us-west-2"
|
||||||
ENV AWS_DISABLE_SSL="false"
|
ENV AWS_DISABLE_SSL="false"
|
||||||
|
ENV AWS_FORCE_PATH_STYLE="true"
|
||||||
ENV GPG_PASSPHRASE=""
|
ENV GPG_PASSPHRASE=""
|
||||||
ENV SSH_USER=""
|
ENV SSH_USER=""
|
||||||
ENV SSH_PASSWORD=""
|
ENV SSH_PASSWORD=""
|
||||||
ENV SSH_HOST_NAME=""
|
ENV SSH_HOST=""
|
||||||
ENV SSH_IDENTIFY_FILE=""
|
ENV SSH_IDENTIFY_FILE=""
|
||||||
ENV SSH_PORT=22
|
ENV SSH_PORT=22
|
||||||
ENV REMOTE_PATH=""
|
ENV REMOTE_PATH=""
|
||||||
ENV FTP_HOST_NAME=""
|
ENV FTP_HOST=""
|
||||||
ENV FTP_PORT=21
|
ENV FTP_PORT=21
|
||||||
ENV FTP_USER=""
|
ENV FTP_USER=""
|
||||||
ENV FTP_PASSWORD=""
|
ENV FTP_PASSWORD=""
|
||||||
@@ -39,7 +40,6 @@ ENV TARGET_DB_PORT=3306
|
|||||||
ENV TARGET_DB_NAME=""
|
ENV TARGET_DB_NAME=""
|
||||||
ENV TARGET_DB_USERNAME=""
|
ENV TARGET_DB_USERNAME=""
|
||||||
ENV TARGET_DB_PASSWORD=""
|
ENV TARGET_DB_PASSWORD=""
|
||||||
ENV VERSION="v1.2.12"
|
|
||||||
ENV BACKUP_CRON_EXPRESSION=""
|
ENV BACKUP_CRON_EXPRESSION=""
|
||||||
ENV TG_TOKEN=""
|
ENV TG_TOKEN=""
|
||||||
ENV TG_CHAT_ID=""
|
ENV TG_CHAT_ID=""
|
||||||
@@ -47,11 +47,16 @@ ENV TZ=UTC
|
|||||||
ARG WORKDIR="/config"
|
ARG WORKDIR="/config"
|
||||||
ARG BACKUPDIR="/backup"
|
ARG BACKUPDIR="/backup"
|
||||||
ARG BACKUP_TMP_DIR="/tmp/backup"
|
ARG BACKUP_TMP_DIR="/tmp/backup"
|
||||||
|
ARG TEMPLATES_DIR="/config/templates"
|
||||||
|
ARG appVersion="v1.2.12"
|
||||||
|
ENV VERSION=${appVersion}
|
||||||
LABEL author="Jonas Kaninda"
|
LABEL author="Jonas Kaninda"
|
||||||
|
LABEL version=${appVersion}
|
||||||
|
|
||||||
RUN apk --update add --no-cache mysql-client mariadb-connector-c gnupg tzdata
|
RUN apk --update add --no-cache mysql-client mariadb-connector-c tzdata
|
||||||
RUN mkdir $WORKDIR
|
RUN mkdir $WORKDIR
|
||||||
RUN mkdir $BACKUPDIR
|
RUN mkdir $BACKUPDIR
|
||||||
|
RUN mkdir $TEMPLATES_DIR
|
||||||
RUN mkdir -p $BACKUP_TMP_DIR
|
RUN mkdir -p $BACKUP_TMP_DIR
|
||||||
RUN chmod 777 $WORKDIR
|
RUN chmod 777 $WORKDIR
|
||||||
RUN chmod 777 $BACKUPDIR
|
RUN chmod 777 $BACKUPDIR
|
||||||
@@ -59,6 +64,7 @@ RUN chmod 777 $BACKUP_TMP_DIR
|
|||||||
RUN chmod 777 $WORKDIR
|
RUN chmod 777 $WORKDIR
|
||||||
|
|
||||||
COPY --from=build /app/mysql-bkup /usr/local/bin/mysql-bkup
|
COPY --from=build /app/mysql-bkup /usr/local/bin/mysql-bkup
|
||||||
|
COPY ./templates/* $TEMPLATES_DIR/
|
||||||
RUN chmod +x /usr/local/bin/mysql-bkup
|
RUN chmod +x /usr/local/bin/mysql-bkup
|
||||||
|
|
||||||
RUN ln -s /usr/local/bin/mysql-bkup /usr/local/bin/bkup
|
RUN ln -s /usr/local/bin/mysql-bkup /usr/local/bin/bkup
|
||||||
@@ -29,8 +29,6 @@ func init() {
|
|||||||
//Backup
|
//Backup
|
||||||
BackupCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3")
|
BackupCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3")
|
||||||
BackupCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`")
|
BackupCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`")
|
||||||
BackupCmd.PersistentFlags().StringP("mode", "m", "default", "Execution mode. | Deprecated")
|
|
||||||
BackupCmd.PersistentFlags().StringP("period", "", "", "Schedule period time | Deprecated")
|
|
||||||
BackupCmd.PersistentFlags().StringP("cron-expression", "", "", "Backup cron expression")
|
BackupCmd.PersistentFlags().StringP("cron-expression", "", "", "Backup cron expression")
|
||||||
BackupCmd.PersistentFlags().BoolP("prune", "", false, "Delete old backup, default disabled")
|
BackupCmd.PersistentFlags().BoolP("prune", "", false, "Delete old backup, default disabled")
|
||||||
BackupCmd.PersistentFlags().IntP("keep-last", "", 7, "Delete files created more than specified days ago, default 7 days")
|
BackupCmd.PersistentFlags().IntP("keep-last", "", 7, "Delete files created more than specified days ago, default 7 days")
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ As described for SSH backup section, to change the storage of your backup and us
|
|||||||
You need to add the full remote path by adding `--path /home/jkaninda/backups` flag or using `REMOTE_PATH` environment variable.
|
You need to add the full remote path by adding `--path /home/jkaninda/backups` flag or using `REMOTE_PATH` environment variable.
|
||||||
|
|
||||||
{: .note }
|
{: .note }
|
||||||
These environment variables are required for SSH backup `FTP_HOST_NAME`, `FTP_USER`, `REMOTE_PATH`, `FTP_PORT` or `FTP_PASSWORD`.
|
These environment variables are required for SSH backup `FTP_HOST`, `FTP_USER`, `REMOTE_PATH`, `FTP_PORT` or `FTP_PASSWORD`.
|
||||||
|
|
||||||
```yml
|
```yml
|
||||||
services:
|
services:
|
||||||
@@ -30,7 +30,7 @@ services:
|
|||||||
- DB_USERNAME=username
|
- DB_USERNAME=username
|
||||||
- DB_PASSWORD=password
|
- DB_PASSWORD=password
|
||||||
## FTP config
|
## FTP config
|
||||||
- FTP_HOST_NAME="hostname"
|
- FTP_HOST="hostname"
|
||||||
- FTP_PORT=21
|
- FTP_PORT=21
|
||||||
- FTP_USER=user
|
- FTP_USER=user
|
||||||
- FTP_PASSWORD=password
|
- FTP_PASSWORD=password
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ As described for s3 backup section, to change the storage of your backup and use
|
|||||||
You need to add the full remote path by adding `--path /home/jkaninda/backups` flag or using `REMOTE_PATH` environment variable.
|
You need to add the full remote path by adding `--path /home/jkaninda/backups` flag or using `REMOTE_PATH` environment variable.
|
||||||
|
|
||||||
{: .note }
|
{: .note }
|
||||||
These environment variables are required for SSH backup `SSH_HOST_NAME`, `SSH_USER`, `SSH_REMOTE_PATH`, `SSH_IDENTIFY_FILE`, `SSH_PORT` or `SSH_PASSWORD` if you dont use a private key to access to your server.
|
These environment variables are required for SSH backup `SSH_HOST`, `SSH_USER`, `SSH_REMOTE_PATH`, `SSH_IDENTIFY_FILE`, `SSH_PORT` or `SSH_PASSWORD` if you dont use a private key to access to your server.
|
||||||
Accessing the remote server using password is not recommended, use private key instead.
|
Accessing the remote server using password is not recommended, use private key instead.
|
||||||
|
|
||||||
```yml
|
```yml
|
||||||
@@ -33,7 +33,7 @@ services:
|
|||||||
- DB_USERNAME=username
|
- DB_USERNAME=username
|
||||||
- DB_PASSWORD=password
|
- DB_PASSWORD=password
|
||||||
## SSH config
|
## SSH config
|
||||||
- SSH_HOST_NAME="hostname"
|
- SSH_HOST="hostname"
|
||||||
- SSH_PORT=22
|
- SSH_PORT=22
|
||||||
- SSH_USER=user
|
- SSH_USER=user
|
||||||
- REMOTE_PATH=/home/jkaninda/backups
|
- REMOTE_PATH=/home/jkaninda/backups
|
||||||
@@ -73,7 +73,7 @@ services:
|
|||||||
- DB_USERNAME=username
|
- DB_USERNAME=username
|
||||||
- DB_PASSWORD=password
|
- DB_PASSWORD=password
|
||||||
## SSH config
|
## SSH config
|
||||||
- SSH_HOST_NAME="hostname"
|
- SSH_HOST="hostname"
|
||||||
- SSH_PORT=22
|
- SSH_PORT=22
|
||||||
- SSH_USER=user
|
- SSH_USER=user
|
||||||
- REMOTE_PATH=/home/jkaninda/backups
|
- REMOTE_PATH=/home/jkaninda/backups
|
||||||
@@ -125,7 +125,7 @@ spec:
|
|||||||
# Please use secret!
|
# Please use secret!
|
||||||
- name: DB_PASSWORD
|
- name: DB_PASSWORD
|
||||||
value: ""
|
value: ""
|
||||||
- name: SSH_HOST_NAME
|
- name: SSH_HOST
|
||||||
value: ""
|
value: ""
|
||||||
- name: SSH_PORT
|
- name: SSH_PORT
|
||||||
value: "22"
|
value: "22"
|
||||||
|
|||||||
6
docs/how-tos/deprecated-configs.md
Normal file
6
docs/how-tos/deprecated-configs.md
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
title: Update deprecated configurations
|
||||||
|
layout: default
|
||||||
|
parent: How Tos
|
||||||
|
nav_order: 11
|
||||||
|
---
|
||||||
@@ -1,30 +1,39 @@
|
|||||||
---
|
---
|
||||||
title: Encrypt backups using GPG
|
title: Encrypt backups
|
||||||
layout: default
|
layout: default
|
||||||
parent: How Tos
|
parent: How Tos
|
||||||
nav_order: 8
|
nav_order: 8
|
||||||
---
|
---
|
||||||
# Encrypt backup
|
# Encrypt backup
|
||||||
|
|
||||||
The image supports encrypting backups using GPG out of the box. In case a `GPG_PASSPHRASE` environment variable is set, the backup archive will be encrypted using the given key and saved as a sql.gpg file instead or sql.gz.gpg.
|
The image supports encrypting backups using one of two available methods: GPG with passphrase or GPG with a public key.
|
||||||
|
|
||||||
|
|
||||||
|
The image supports encrypting backups using GPG out of the box. In case a `GPG_PASSPHRASE` or `GPG_PUBLIC_KEY` environment variable is set, the backup archive will be encrypted using the given key and saved as a sql.gpg file instead or sql.gz.gpg.
|
||||||
|
|
||||||
{: .warning }
|
{: .warning }
|
||||||
To restore an encrypted backup, you need to provide the same GPG passphrase or key used during backup process.
|
To restore an encrypted backup, you need to provide the same GPG passphrase used during backup process.
|
||||||
|
|
||||||
- GPG home directory `/config/gnupg`
|
- GPG home directory `/config/gnupg`
|
||||||
- Cipher algorithm `aes256`
|
- Cipher algorithm `aes256`
|
||||||
-
|
|
||||||
To decrypt manually, you need to install `gnupg`
|
|
||||||
|
|
||||||
### Decrypt backup
|
{: .note }
|
||||||
|
The backup encrypted using `GPG passphrase` method can be restored automatically, no need to decrypt it before restoration.
|
||||||
|
Suppose you used a GPG public key during the backup process. In that case, you need to decrypt your backup before restoration because decryption using a `GPG private` key is not fully supported.
|
||||||
|
|
||||||
|
To decrypt manually, you need to install `gnupg`
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
gpg --batch --passphrase "my-passphrase" \
|
gpg --batch --passphrase "my-passphrase" \
|
||||||
--output database_20240730_044201.sql.gz \
|
--output database_20240730_044201.sql.gz \
|
||||||
--decrypt database_20240730_044201.sql.gz.gpg
|
--decrypt database_20240730_044201.sql.gz.gpg
|
||||||
```
|
```
|
||||||
|
Using your private key
|
||||||
|
|
||||||
### Backup
|
```shell
|
||||||
|
gpg --output database_20240730_044201.sql.gz --decrypt database_20240730_044201.sql.gz.gpg
|
||||||
|
```
|
||||||
|
## Using GPG passphrase
|
||||||
|
|
||||||
```yml
|
```yml
|
||||||
services:
|
services:
|
||||||
@@ -51,4 +60,32 @@ services:
|
|||||||
- web
|
- web
|
||||||
networks:
|
networks:
|
||||||
web:
|
web:
|
||||||
|
```
|
||||||
|
## Using GPG Public Key
|
||||||
|
|
||||||
|
```yml
|
||||||
|
services:
|
||||||
|
mysql-bkup:
|
||||||
|
# In production, it is advised to lock your image tag to a proper
|
||||||
|
# release version instead of using `latest`.
|
||||||
|
# Check https://github.com/jkaninda/mysql-bkup/releases
|
||||||
|
# for a list of available releases.
|
||||||
|
image: jkaninda/mysql-bkup
|
||||||
|
container_name: mysql-bkup
|
||||||
|
command: backup -d database
|
||||||
|
volumes:
|
||||||
|
- ./backup:/backup
|
||||||
|
environment:
|
||||||
|
- DB_PORT=3306
|
||||||
|
- DB_HOST=mysql
|
||||||
|
- DB_NAME=database
|
||||||
|
- DB_USERNAME=username
|
||||||
|
- DB_PASSWORD=password
|
||||||
|
## Required to encrypt backup
|
||||||
|
- GPG_PUBLIC_KEY=/config/public_key.asc
|
||||||
|
# mysql-bkup container must be connected to the same network with your database
|
||||||
|
networks:
|
||||||
|
- web
|
||||||
|
networks:
|
||||||
|
web:
|
||||||
```
|
```
|
||||||
63
docs/how-tos/mutli-backup.md
Normal file
63
docs/how-tos/mutli-backup.md
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
---
|
||||||
|
title: Run multiple backup schedules in the same container
|
||||||
|
layout: default
|
||||||
|
parent: How Tos
|
||||||
|
nav_order: 11
|
||||||
|
---
|
||||||
|
|
||||||
|
Multiple backup schedules with different configuration can be configured by mounting a configuration file into `/config/config.yaml` `/config/config.yml` or by defining an environment variable `BACKUP_CONFIG_FILE=/backup/config.yaml`.
|
||||||
|
|
||||||
|
## Configuration file
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
#cronExpression: "@every 20m" //Optional for scheduled backups
|
||||||
|
cronExpression: ""
|
||||||
|
databases:
|
||||||
|
- host: mysql1
|
||||||
|
port: 3306
|
||||||
|
name: database1
|
||||||
|
user: database1
|
||||||
|
password: password
|
||||||
|
path: /s3-path/database1 #For SSH or FTP you need to define the full path (/home/toto/backup/)
|
||||||
|
- host: mysql2
|
||||||
|
port: 3306
|
||||||
|
name: lldap
|
||||||
|
user: lldap
|
||||||
|
password: password
|
||||||
|
path: /s3-path/lldap #For SSH or FTP you need to define the full path (/home/toto/backup/)
|
||||||
|
- host: mysql3
|
||||||
|
port: 3306
|
||||||
|
name: keycloak
|
||||||
|
user: keycloak
|
||||||
|
password: password
|
||||||
|
path: /s3-path/keycloak #For SSH or FTP you need to define the full path (/home/toto/backup/)
|
||||||
|
- host: mysql4
|
||||||
|
port: 3306
|
||||||
|
name: joplin
|
||||||
|
user: joplin
|
||||||
|
password: password
|
||||||
|
path: /s3-path/joplin #For SSH or FTP you need to define the full path (/home/toto/backup/)
|
||||||
|
```
|
||||||
|
## Docker compose file
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
services:
|
||||||
|
mysql-bkup:
|
||||||
|
# In production, it is advised to lock your image tag to a proper
|
||||||
|
# release version instead of using `latest`.
|
||||||
|
# Check https://github.com/jkaninda/mysql-bkup/releases
|
||||||
|
# for a list of available releases.
|
||||||
|
image: jkaninda/mysql-bkup
|
||||||
|
container_name: mysql-bkup
|
||||||
|
command: backup
|
||||||
|
volumes:
|
||||||
|
- ./backup:/backup
|
||||||
|
environment:
|
||||||
|
## Multi backup config file
|
||||||
|
- BACKUP_CONFIG_FILE=/backup/config.yaml
|
||||||
|
# mysql-bkup container must be connected to the same network with your database
|
||||||
|
networks:
|
||||||
|
- web
|
||||||
|
networks:
|
||||||
|
web:
|
||||||
|
```
|
||||||
170
docs/how-tos/receive-notification.md
Normal file
170
docs/how-tos/receive-notification.md
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
---
|
||||||
|
title: Receive notifications
|
||||||
|
layout: default
|
||||||
|
parent: How Tos
|
||||||
|
nav_order: 12
|
||||||
|
---
|
||||||
|
Send Email or Telegram notifications on successfully or failed backup.
|
||||||
|
|
||||||
|
### Email
|
||||||
|
To send out email notifications on failed or successfully backup runs, provide SMTP credentials, a sender and a recipient:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
services:
|
||||||
|
mysql-bkup:
|
||||||
|
image: jkaninda/mysql-bkup
|
||||||
|
container_name: mysql-bkup
|
||||||
|
command: backup
|
||||||
|
volumes:
|
||||||
|
- ./backup:/backup
|
||||||
|
environment:
|
||||||
|
- DB_PORT=3306
|
||||||
|
- DB_HOST=mysql
|
||||||
|
- DB_NAME=database
|
||||||
|
- DB_USERNAME=username
|
||||||
|
- DB_PASSWORD=password
|
||||||
|
- MAIL_HOST=
|
||||||
|
- MAIL_PORT=587
|
||||||
|
- MAIL_USERNAME=
|
||||||
|
- MAIL_PASSWORD=!
|
||||||
|
- MAIL_FROM=
|
||||||
|
- MAIL_TO=me@example.com,team@example.com,manager@example.com
|
||||||
|
- MAIL_SKIP_TLS=false
|
||||||
|
## Time format for notification
|
||||||
|
- TIME_FORMAT=2006-01-02 at 15:04:05
|
||||||
|
## Backup reference, in case you want to identify every backup instance
|
||||||
|
- BACKUP_REFERENCE=database/Paris cluster
|
||||||
|
networks:
|
||||||
|
- web
|
||||||
|
networks:
|
||||||
|
web:
|
||||||
|
```
|
||||||
|
|
||||||
|
### Telegram
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
services:
|
||||||
|
mysql-bkup:
|
||||||
|
image: jkaninda/mysql-bkup
|
||||||
|
container_name: mysql-bkup
|
||||||
|
command: backup
|
||||||
|
volumes:
|
||||||
|
- ./backup:/backup
|
||||||
|
environment:
|
||||||
|
- DB_PORT=3306
|
||||||
|
- DB_HOST=mysql
|
||||||
|
- DB_NAME=database
|
||||||
|
- DB_USERNAME=username
|
||||||
|
- DB_PASSWORD=password
|
||||||
|
- TG_TOKEN=[BOT ID]:[BOT TOKEN]
|
||||||
|
- TG_CHAT_ID=
|
||||||
|
## Time format for notification
|
||||||
|
- TIME_FORMAT=2006-01-02 at 15:04:05
|
||||||
|
## Backup reference, in case you want to identify every backup instance
|
||||||
|
- BACKUP_REFERENCE=database/Paris cluster
|
||||||
|
networks:
|
||||||
|
- web
|
||||||
|
networks:
|
||||||
|
web:
|
||||||
|
```
|
||||||
|
|
||||||
|
### Customize notifications
|
||||||
|
|
||||||
|
The title and body of the notifications can be tailored to your needs using Go templates.
|
||||||
|
Template sources must be mounted inside the container in /config/templates:
|
||||||
|
|
||||||
|
- email.template: Email notification template
|
||||||
|
- telegram.template: Telegram notification template
|
||||||
|
- email-error.template: Error notification template
|
||||||
|
- telegram-error.template: Error notification template
|
||||||
|
|
||||||
|
### Data
|
||||||
|
|
||||||
|
Here is a list of all data passed to the template:
|
||||||
|
- `Database` : Database name
|
||||||
|
- `StartTime`: Backup start time process
|
||||||
|
- `EndTime`: Backup start time process
|
||||||
|
- `Storage`: Backup storage
|
||||||
|
- `BackupLocation`: Backup location
|
||||||
|
- `BackupSize`: Backup size
|
||||||
|
- `BackupReference`: Backup reference(eg: database/cluster name or server name)
|
||||||
|
|
||||||
|
> email.template:
|
||||||
|
|
||||||
|
|
||||||
|
```html
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>✅ Database Backup Notification – {{.Database}}</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h2>Hi,</h2>
|
||||||
|
<p>Backup of the {{.Database}} database has been successfully completed on {{.EndTime}}.</p>
|
||||||
|
<h3>Backup Details:</h3>
|
||||||
|
<ul>
|
||||||
|
<li>Database Name: {{.Database}}</li>
|
||||||
|
<li>Backup Start Time: {{.StartTime}}</li>
|
||||||
|
<li>Backup End Time: {{.EndTime}}</li>
|
||||||
|
<li>Backup Storage: {{.Storage}}</li>
|
||||||
|
<li>Backup Location: {{.BackupLocation}}</li>
|
||||||
|
<li>Backup Size: {{.BackupSize}} bytes</li>
|
||||||
|
<li>Backup Reference: {{.BackupReference}} </li>
|
||||||
|
</ul>
|
||||||
|
<p>Best regards,</p>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
```
|
||||||
|
|
||||||
|
> telegram.template
|
||||||
|
|
||||||
|
```html
|
||||||
|
✅ Database Backup Notification – {{.Database}}
|
||||||
|
Hi,
|
||||||
|
Backup of the {{.Database}} database has been successfully completed on {{.EndTime}}.
|
||||||
|
|
||||||
|
Backup Details:
|
||||||
|
- Database Name: {{.Database}}
|
||||||
|
- Backup Start Time: {{.StartTime}}
|
||||||
|
- Backup EndTime: {{.EndTime}}
|
||||||
|
- Backup Storage: {{.Storage}}
|
||||||
|
- Backup Location: {{.BackupLocation}}
|
||||||
|
- Backup Size: {{.BackupSize}} bytes
|
||||||
|
- Backup Reference: {{.BackupReference}}
|
||||||
|
```
|
||||||
|
|
||||||
|
> email-error.template
|
||||||
|
|
||||||
|
```html
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>🔴 Urgent: Database Backup Failure Notification</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h2>Hi,</h2>
|
||||||
|
<p>An error occurred during database backup.</p>
|
||||||
|
<h3>Failure Details:</h3>
|
||||||
|
<ul>
|
||||||
|
<li>Error Message: {{.Error}}</li>
|
||||||
|
<li>Date: {{.EndTime}}</li>
|
||||||
|
<li>Backup Reference: {{.BackupReference}} </li>
|
||||||
|
</ul>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
```
|
||||||
|
|
||||||
|
> telegram-error.template
|
||||||
|
|
||||||
|
|
||||||
|
```html
|
||||||
|
🔴 Urgent: Database Backup Failure Notification
|
||||||
|
|
||||||
|
An error occurred during database backup.
|
||||||
|
Failure Details:
|
||||||
|
|
||||||
|
Error Message: {{.Error}}
|
||||||
|
Date: {{.EndTime}}
|
||||||
|
```
|
||||||
@@ -34,40 +34,42 @@ Backup, restore and migrate targets, schedule and retention are configured using
|
|||||||
|
|
||||||
## Environment variables
|
## Environment variables
|
||||||
|
|
||||||
| Name | Requirement | Description |
|
| Name | Requirement | Description |
|
||||||
|------------------------|---------------------------------------------------------------|------------------------------------------------------|
|
|------------------------|---------------------------------------------------------------|-----------------------------------------------------------------|
|
||||||
| DB_PORT | Optional, default 3306 | Database port number |
|
| DB_PORT | Optional, default 3306 | Database port number |
|
||||||
| DB_HOST | Required | Database host |
|
| DB_HOST | Required | Database host |
|
||||||
| DB_NAME | Optional if it was provided from the -d flag | Database name |
|
| DB_NAME | Optional if it was provided from the -d flag | Database name |
|
||||||
| DB_USERNAME | Required | Database user name |
|
| DB_USERNAME | Required | Database user name |
|
||||||
| DB_PASSWORD | Required | Database password |
|
| DB_PASSWORD | Required | Database password |
|
||||||
| AWS_ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key |
|
| AWS_ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key |
|
||||||
| AWS_SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key |
|
| AWS_SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key |
|
||||||
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
|
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
|
||||||
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
|
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
|
||||||
| AWS_REGION | Optional, required for S3 storage | AWS Region |
|
| AWS_REGION | Optional, required for S3 storage | AWS Region |
|
||||||
| AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL |
|
| AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL |
|
||||||
| FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) |
|
| AWS_FORCE_PATH_STYLE | Optional, required for S3 storage | Force path style |
|
||||||
| GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase |
|
| FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) |
|
||||||
| BACKUP_CRON_EXPRESSION | Optional if it was provided from the `--cron-expression` flag | Backup cron expression for docker in scheduled mode |
|
| GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase |
|
||||||
| SSH_HOST_NAME | Optional, required for SSH storage | ssh remote hostname or ip |
|
| GPG_PUBLIC_KEY | Optional, required to encrypt backup | GPG public key, used to encrypt backup (/config/public_key.asc) |
|
||||||
| SSH_USER | Optional, required for SSH storage | ssh remote user |
|
| BACKUP_CRON_EXPRESSION | Optional if it was provided from the `--cron-expression` flag | Backup cron expression for docker in scheduled mode |
|
||||||
| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password |
|
| SSH_HOST | Optional, required for SSH storage | ssh remote hostname or ip |
|
||||||
| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key |
|
| SSH_USER | Optional, required for SSH storage | ssh remote user |
|
||||||
| SSH_PORT | Optional, required for SSH storage | ssh remote server port |
|
| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password |
|
||||||
| REMOTE_PATH | Optional, required for SSH or FTP storage | remote path (/home/toto/backup) |
|
| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key |
|
||||||
| FTP_HOST_NAME | Optional, required for FTP storage | FTP host name |
|
| SSH_PORT | Optional, required for SSH storage | ssh remote server port |
|
||||||
| FTP_PORT | Optional, required for FTP storage | FTP server port number |
|
| REMOTE_PATH | Optional, required for SSH or FTP storage | remote path (/home/toto/backup) |
|
||||||
| FTP_USER | Optional, required for FTP storage | FTP user |
|
| FTP_HOST | Optional, required for FTP storage | FTP host name |
|
||||||
| FTP_PASSWORD | Optional, required for FTP storage | FTP user password |
|
| FTP_PORT | Optional, required for FTP storage | FTP server port number |
|
||||||
| TARGET_DB_HOST | Optional, required for database migration | Target database host |
|
| FTP_USER | Optional, required for FTP storage | FTP user |
|
||||||
| TARGET_DB_PORT | Optional, required for database migration | Target database port |
|
| FTP_PASSWORD | Optional, required for FTP storage | FTP user password |
|
||||||
| TARGET_DB_NAME | Optional, required for database migration | Target database name |
|
| TARGET_DB_HOST | Optional, required for database migration | Target database host |
|
||||||
| TARGET_DB_USERNAME | Optional, required for database migration | Target database username |
|
| TARGET_DB_PORT | Optional, required for database migration | Target database port |
|
||||||
| TARGET_DB_PASSWORD | Optional, required for database migration | Target database password |
|
| TARGET_DB_NAME | Optional, required for database migration | Target database name |
|
||||||
| TG_TOKEN | Optional, required for Telegram notification | Telegram token (`BOT-ID:BOT-TOKEN`) |
|
| TARGET_DB_USERNAME | Optional, required for database migration | Target database username |
|
||||||
| TG_CHAT_ID | Optional, required for Telegram notification | Telegram Chat ID |
|
| TARGET_DB_PASSWORD | Optional, required for database migration | Target database password |
|
||||||
| TZ | Optional | Time Zone |
|
| TG_TOKEN | Optional, required for Telegram notification | Telegram token (`BOT-ID:BOT-TOKEN`) |
|
||||||
|
| TG_CHAT_ID | Optional, required for Telegram notification | Telegram Chat ID |
|
||||||
|
| TZ | Optional | Time Zone |
|
||||||
|
|
||||||
---
|
---
|
||||||
## Run in Scheduled mode
|
## Run in Scheduled mode
|
||||||
|
|||||||
13
go.mod
13
go.mod
@@ -5,21 +5,30 @@ go 1.22.5
|
|||||||
require github.com/spf13/pflag v1.0.5
|
require github.com/spf13/pflag v1.0.5
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/ProtonMail/gopenpgp/v2 v2.7.5
|
||||||
github.com/aws/aws-sdk-go v1.55.3
|
github.com/aws/aws-sdk-go v1.55.3
|
||||||
github.com/bramvdbogaerde/go-scp v1.5.0
|
github.com/bramvdbogaerde/go-scp v1.5.0
|
||||||
github.com/hpcloud/tail v1.0.0
|
github.com/hpcloud/tail v1.0.0
|
||||||
|
github.com/jlaffaye/ftp v0.2.0
|
||||||
|
github.com/robfig/cron/v3 v3.0.1
|
||||||
github.com/spf13/cobra v1.8.0
|
github.com/spf13/cobra v1.8.0
|
||||||
golang.org/x/crypto v0.18.0
|
golang.org/x/crypto v0.18.0
|
||||||
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95 // indirect
|
||||||
|
github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f // indirect
|
||||||
|
github.com/cloudflare/circl v1.3.3 // indirect
|
||||||
|
github.com/go-mail/mail v2.3.1+incompatible // indirect
|
||||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
github.com/jlaffaye/ftp v0.2.0 // indirect
|
|
||||||
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
||||||
github.com/robfig/cron/v3 v3.0.1 // indirect
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
golang.org/x/sys v0.22.0 // indirect
|
golang.org/x/sys v0.22.0 // indirect
|
||||||
|
golang.org/x/text v0.14.0 // indirect
|
||||||
|
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
|
||||||
gopkg.in/fsnotify.v1 v1.4.7 // indirect
|
gopkg.in/fsnotify.v1 v1.4.7 // indirect
|
||||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect
|
||||||
)
|
)
|
||||||
|
|||||||
62
go.sum
62
go.sum
@@ -1,12 +1,24 @@
|
|||||||
|
github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95 h1:KLq8BE0KwCL+mmXnjLWEAOYO+2l2AE4YMmqG1ZpZHBs=
|
||||||
|
github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0=
|
||||||
|
github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f h1:tCbYj7/299ekTTXpdwKYF8eBlsYsDVoggDAuAjoK66k=
|
||||||
|
github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f/go.mod h1:gcr0kNtGBqin9zDW9GOHcVntrwnjrK+qdJ06mWYBybw=
|
||||||
|
github.com/ProtonMail/gopenpgp/v2 v2.7.5 h1:STOY3vgES59gNgoOt2w0nyHBjKViB/qSg7NjbQWPJkA=
|
||||||
|
github.com/ProtonMail/gopenpgp/v2 v2.7.5/go.mod h1:IhkNEDaxec6NyzSI0PlxapinnwPVIESk8/76da3Ct3g=
|
||||||
github.com/aws/aws-sdk-go v1.55.3 h1:0B5hOX+mIx7I5XPOrjrHlKSDQV/+ypFZpIHOx5LOk3E=
|
github.com/aws/aws-sdk-go v1.55.3 h1:0B5hOX+mIx7I5XPOrjrHlKSDQV/+ypFZpIHOx5LOk3E=
|
||||||
github.com/aws/aws-sdk-go v1.55.3/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU=
|
github.com/aws/aws-sdk-go v1.55.3/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU=
|
||||||
github.com/aws/aws-sdk-go v1.55.5 h1:KKUZBfBoyqy5d3swXyiC7Q76ic40rYcbqH7qjh59kzU=
|
github.com/aws/aws-sdk-go v1.55.5 h1:KKUZBfBoyqy5d3swXyiC7Q76ic40rYcbqH7qjh59kzU=
|
||||||
github.com/aws/aws-sdk-go v1.55.5/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU=
|
github.com/aws/aws-sdk-go v1.55.5/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU=
|
||||||
github.com/bramvdbogaerde/go-scp v1.5.0 h1:a9BinAjTfQh273eh7vd3qUgmBC+bx+3TRDtkZWmIpzM=
|
github.com/bramvdbogaerde/go-scp v1.5.0 h1:a9BinAjTfQh273eh7vd3qUgmBC+bx+3TRDtkZWmIpzM=
|
||||||
github.com/bramvdbogaerde/go-scp v1.5.0/go.mod h1:on2aH5AxaFb2G0N5Vsdy6B0Ml7k9HuHSwfo1y0QzAbQ=
|
github.com/bramvdbogaerde/go-scp v1.5.0/go.mod h1:on2aH5AxaFb2G0N5Vsdy6B0Ml7k9HuHSwfo1y0QzAbQ=
|
||||||
|
github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0=
|
||||||
|
github.com/cloudflare/circl v1.3.3 h1:fE/Qz0QdIGqeWfnwq0RE0R7MI51s0M2E4Ga9kq5AEMs=
|
||||||
|
github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/go-mail/mail v2.3.1+incompatible h1:UzNOn0k5lpfVtO31cK3hn6I4VEVGhe3lX8AJBAxXExM=
|
||||||
|
github.com/go-mail/mail v2.3.1+incompatible/go.mod h1:VPWjmmNyRsWXQZHVHT3g0YbIINUkSmuKOiLIDkWbL6M=
|
||||||
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||||
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
|
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
|
||||||
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||||
@@ -21,6 +33,8 @@ github.com/jlaffaye/ftp v0.2.0/go.mod h1:is2Ds5qkhceAPy2xD6RLI6hmp/qysSoymZ+Z2uT
|
|||||||
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
|
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
|
||||||
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
|
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
|
||||||
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
|
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
|
||||||
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
||||||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
||||||
@@ -32,16 +46,64 @@ github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3k
|
|||||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
|
golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
|
||||||
|
golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU=
|
||||||
golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
|
golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
|
||||||
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
|
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
|
||||||
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8=
|
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8=
|
||||||
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY=
|
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY=
|
||||||
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
|
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
|
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
|
||||||
|
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
|
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
||||||
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI=
|
golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI=
|
||||||
golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
|
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
|
||||||
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
|
golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U=
|
||||||
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
|
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
|
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
|
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
||||||
|
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc h1:2gGKlE2+asNV9m7xrywl36YYNnBG5ZQ0r/BOOxqPpmk=
|
||||||
|
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc/go.mod h1:m7x9LTH6d71AHyAX77c9yqWCCa3UKHcVEj9y7hAtKDk=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
|
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
|
||||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
||||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
|||||||
223
pkg/backup.go
223
pkg/backup.go
@@ -20,18 +20,23 @@ import (
|
|||||||
|
|
||||||
func StartBackup(cmd *cobra.Command) {
|
func StartBackup(cmd *cobra.Command) {
|
||||||
intro()
|
intro()
|
||||||
dbConf = initDbConfig(cmd)
|
|
||||||
//Initialize backup configs
|
//Initialize backup configs
|
||||||
config := initBackupConfig(cmd)
|
config := initBackupConfig(cmd)
|
||||||
|
//Load backup configuration file
|
||||||
if config.cronExpression == "" {
|
configFile, err := loadConfigFile()
|
||||||
BackupTask(dbConf, config)
|
if err != nil {
|
||||||
} else {
|
dbConf = initDbConfig(cmd)
|
||||||
if utils.IsValidCronExpression(config.cronExpression) {
|
if config.cronExpression == "" {
|
||||||
scheduledMode(dbConf, config)
|
BackupTask(dbConf, config)
|
||||||
} else {
|
} else {
|
||||||
utils.Fatal("Cron expression is not valid: %s", config.cronExpression)
|
if utils.IsValidCronExpression(config.cronExpression) {
|
||||||
|
scheduledMode(dbConf, config)
|
||||||
|
} else {
|
||||||
|
utils.Fatal("Cron expression is not valid: %s", config.cronExpression)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
startMultiBackup(config, configFile)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -42,8 +47,6 @@ func scheduledMode(db *dbConfig, config *BackupConfig) {
|
|||||||
utils.Info("Backup cron expression: %s", config.cronExpression)
|
utils.Info("Backup cron expression: %s", config.cronExpression)
|
||||||
utils.Info("Storage type %s ", config.storage)
|
utils.Info("Storage type %s ", config.storage)
|
||||||
|
|
||||||
//Test database connexion
|
|
||||||
testDatabaseConnection(db)
|
|
||||||
//Test backup
|
//Test backup
|
||||||
utils.Info("Testing backup configurations...")
|
utils.Info("Testing backup configurations...")
|
||||||
BackupTask(db, config)
|
BackupTask(db, config)
|
||||||
@@ -66,7 +69,8 @@ func scheduledMode(db *dbConfig, config *BackupConfig) {
|
|||||||
select {}
|
select {}
|
||||||
}
|
}
|
||||||
func BackupTask(db *dbConfig, config *BackupConfig) {
|
func BackupTask(db *dbConfig, config *BackupConfig) {
|
||||||
//Generate backup file name
|
utils.Info("Starting backup task...")
|
||||||
|
//Generate file name
|
||||||
backupFileName := fmt.Sprintf("%s_%s.sql.gz", db.dbName, time.Now().Format("20060102_150405"))
|
backupFileName := fmt.Sprintf("%s_%s.sql.gz", db.dbName, time.Now().Format("20060102_150405"))
|
||||||
if config.disableCompression {
|
if config.disableCompression {
|
||||||
backupFileName = fmt.Sprintf("%s_%s.sql", db.dbName, time.Now().Format("20060102_150405"))
|
backupFileName = fmt.Sprintf("%s_%s.sql", db.dbName, time.Now().Format("20060102_150405"))
|
||||||
@@ -75,37 +79,92 @@ func BackupTask(db *dbConfig, config *BackupConfig) {
|
|||||||
switch config.storage {
|
switch config.storage {
|
||||||
case "local":
|
case "local":
|
||||||
localBackup(db, config)
|
localBackup(db, config)
|
||||||
case "s3":
|
case "s3", "S3":
|
||||||
s3Backup(db, config)
|
s3Backup(db, config)
|
||||||
case "ssh", "remote":
|
case "ssh", "SSH", "remote":
|
||||||
sshBackup(db, config)
|
sshBackup(db, config)
|
||||||
case "ftp":
|
case "ftp", "FTP":
|
||||||
ftpBackup(db, config)
|
ftpBackup(db, config)
|
||||||
|
//utils.Fatal("Not supported storage type: %s", config.storage)
|
||||||
default:
|
default:
|
||||||
localBackup(db, config)
|
localBackup(db, config)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
func multiBackupTask(databases []Database, bkConfig *BackupConfig) {
|
||||||
|
for _, db := range databases {
|
||||||
|
//Check if path is defined in config file
|
||||||
|
if db.Path != "" {
|
||||||
|
bkConfig.remotePath = db.Path
|
||||||
|
}
|
||||||
|
BackupTask(getDatabase(db), bkConfig)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
func startMultiBackup(bkConfig *BackupConfig, configFile string) {
|
||||||
|
utils.Info("Starting multiple backup jobs...")
|
||||||
|
var conf = &Config{}
|
||||||
|
conf, err := readConf(configFile)
|
||||||
|
if err != nil {
|
||||||
|
utils.Fatal("Error reading config file: %s", err)
|
||||||
|
}
|
||||||
|
//Check if cronExpression is defined in config file
|
||||||
|
if conf.CronExpression != "" {
|
||||||
|
bkConfig.cronExpression = conf.CronExpression
|
||||||
|
}
|
||||||
|
// Check if cronExpression is defined
|
||||||
|
if bkConfig.cronExpression == "" {
|
||||||
|
multiBackupTask(conf.Databases, bkConfig)
|
||||||
|
} else {
|
||||||
|
// Check if cronExpression is valid
|
||||||
|
if utils.IsValidCronExpression(bkConfig.cronExpression) {
|
||||||
|
utils.Info("Running MultiBackup in Scheduled mode")
|
||||||
|
utils.Info("Backup cron expression: %s", bkConfig.cronExpression)
|
||||||
|
utils.Info("Storage type %s ", bkConfig.storage)
|
||||||
|
|
||||||
|
//Test backup
|
||||||
|
utils.Info("Testing backup configurations...")
|
||||||
|
multiBackupTask(conf.Databases, bkConfig)
|
||||||
|
utils.Info("Testing backup configurations...done")
|
||||||
|
utils.Info("Creating multi backup job...")
|
||||||
|
// Create a new cron instance
|
||||||
|
c := cron.New()
|
||||||
|
|
||||||
|
_, err := c.AddFunc(bkConfig.cronExpression, func() {
|
||||||
|
// Create a channel
|
||||||
|
multiBackupTask(conf.Databases, bkConfig)
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Start the cron scheduler
|
||||||
|
c.Start()
|
||||||
|
utils.Info("Creating multi backup job...done")
|
||||||
|
utils.Info("Backup job started")
|
||||||
|
defer c.Stop()
|
||||||
|
select {}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
utils.Fatal("Cron expression is not valid: %s", bkConfig.cronExpression)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
// BackupDatabase backup database
|
// BackupDatabase backup database
|
||||||
func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool) {
|
func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool) {
|
||||||
|
|
||||||
storagePath = os.Getenv("STORAGE_PATH")
|
storagePath = os.Getenv("STORAGE_PATH")
|
||||||
|
|
||||||
err := utils.CheckEnvVars(dbHVars)
|
|
||||||
if err != nil {
|
|
||||||
utils.Error("Please make sure all required environment variables for database are set")
|
|
||||||
utils.Fatal("Error checking environment variables: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
utils.Info("Starting database backup...")
|
utils.Info("Starting database backup...")
|
||||||
err = os.Setenv("MYSQL_PWD", db.dbPassword)
|
|
||||||
|
err := os.Setenv("MYSQL_PWD", db.dbPassword)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
testDatabaseConnection(db)
|
testDatabaseConnection(db)
|
||||||
|
|
||||||
// Backup Database database
|
// Backup Database database
|
||||||
utils.Info("Backing up database...")
|
utils.Info("Backing up database...")
|
||||||
|
|
||||||
|
// Verify is compression is disabled
|
||||||
if disableCompression {
|
if disableCompression {
|
||||||
// Execute mysqldump
|
// Execute mysqldump
|
||||||
cmd := exec.Command("mysqldump",
|
cmd := exec.Command("mysqldump",
|
||||||
@@ -120,7 +179,7 @@ func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool
|
|||||||
}
|
}
|
||||||
|
|
||||||
// save output
|
// save output
|
||||||
file, err := os.Create(fmt.Sprintf("%s/%s", tmpPath, backupFileName))
|
file, err := os.Create(filepath.Join(tmpPath, backupFileName))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
@@ -141,7 +200,7 @@ func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool
|
|||||||
}
|
}
|
||||||
gzipCmd := exec.Command("gzip")
|
gzipCmd := exec.Command("gzip")
|
||||||
gzipCmd.Stdin = stdout
|
gzipCmd.Stdin = stdout
|
||||||
gzipCmd.Stdout, err = os.Create(fmt.Sprintf("%s/%s", tmpPath, backupFileName))
|
gzipCmd.Stdout, err = os.Create(filepath.Join(tmpPath, backupFileName))
|
||||||
gzipCmd.Start()
|
gzipCmd.Start()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
@@ -159,43 +218,70 @@ func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool
|
|||||||
}
|
}
|
||||||
func localBackup(db *dbConfig, config *BackupConfig) {
|
func localBackup(db *dbConfig, config *BackupConfig) {
|
||||||
utils.Info("Backup database to local storage")
|
utils.Info("Backup database to local storage")
|
||||||
|
startTime = time.Now().Format(utils.TimeFormat())
|
||||||
BackupDatabase(db, config.backupFileName, disableCompression)
|
BackupDatabase(db, config.backupFileName, disableCompression)
|
||||||
finalFileName := config.backupFileName
|
finalFileName := config.backupFileName
|
||||||
if config.encryption {
|
if config.encryption {
|
||||||
encryptBackup(config.backupFileName, config.passphrase)
|
encryptBackup(config)
|
||||||
finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, gpgExtension)
|
finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, gpgExtension)
|
||||||
}
|
}
|
||||||
|
fileInfo, err := os.Stat(filepath.Join(tmpPath, finalFileName))
|
||||||
|
if err != nil {
|
||||||
|
utils.Error("Error:", err)
|
||||||
|
}
|
||||||
|
//Get backup info
|
||||||
|
backupSize = fileInfo.Size()
|
||||||
utils.Info("Backup name is %s", finalFileName)
|
utils.Info("Backup name is %s", finalFileName)
|
||||||
moveToBackup(finalFileName, storagePath)
|
moveToBackup(finalFileName, storagePath)
|
||||||
//Send notification
|
//Send notification
|
||||||
utils.NotifySuccess(finalFileName)
|
utils.NotifySuccess(&utils.NotificationData{
|
||||||
|
File: finalFileName,
|
||||||
|
BackupSize: backupSize,
|
||||||
|
Database: db.dbName,
|
||||||
|
Storage: config.storage,
|
||||||
|
BackupLocation: filepath.Join(config.remotePath, finalFileName),
|
||||||
|
StartTime: startTime,
|
||||||
|
EndTime: time.Now().Format(utils.TimeFormat()),
|
||||||
|
})
|
||||||
//Delete old backup
|
//Delete old backup
|
||||||
if config.prune {
|
if config.prune {
|
||||||
deleteOldBackup(config.backupRetention)
|
deleteOldBackup(config.backupRetention)
|
||||||
}
|
}
|
||||||
//Delete temp
|
//Delete temp
|
||||||
deleteTemp()
|
deleteTemp()
|
||||||
|
utils.Info("Backup completed successfully")
|
||||||
}
|
}
|
||||||
|
|
||||||
func s3Backup(db *dbConfig, config *BackupConfig) {
|
func s3Backup(db *dbConfig, config *BackupConfig) {
|
||||||
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
|
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
|
||||||
s3Path := utils.GetEnvVariable("AWS_S3_PATH", "S3_PATH")
|
s3Path := utils.GetEnvVariable("AWS_S3_PATH", "S3_PATH")
|
||||||
|
if config.remotePath != "" {
|
||||||
|
s3Path = config.remotePath
|
||||||
|
}
|
||||||
utils.Info("Backup database to s3 storage")
|
utils.Info("Backup database to s3 storage")
|
||||||
|
startTime = time.Now().Format(utils.TimeFormat())
|
||||||
|
|
||||||
//Backup database
|
//Backup database
|
||||||
BackupDatabase(db, config.backupFileName, disableCompression)
|
BackupDatabase(db, config.backupFileName, disableCompression)
|
||||||
finalFileName := config.backupFileName
|
finalFileName := config.backupFileName
|
||||||
if config.encryption {
|
if config.encryption {
|
||||||
encryptBackup(config.backupFileName, config.passphrase)
|
encryptBackup(config)
|
||||||
finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg")
|
finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg")
|
||||||
}
|
}
|
||||||
utils.Info("Uploading backup archive to remote storage S3 ... ")
|
utils.Info("Uploading backup archive to remote storage S3 ... ")
|
||||||
|
|
||||||
utils.Info("Backup name is %s", finalFileName)
|
utils.Info("Backup name is %s", finalFileName)
|
||||||
err := UploadFileToS3(tmpPath, finalFileName, bucket, s3Path)
|
err := UploadFileToS3(tmpPath, finalFileName, bucket, s3Path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Fatal("Error uploading file to S3: %s ", err)
|
utils.Fatal("Error uploading backup archive to S3: %s ", err)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
//Get backup info
|
||||||
|
fileInfo, err := os.Stat(filepath.Join(tmpPath, finalFileName))
|
||||||
|
if err != nil {
|
||||||
|
utils.Error("Error:", err)
|
||||||
|
}
|
||||||
|
backupSize = fileInfo.Size()
|
||||||
//Delete backup file from tmp folder
|
//Delete backup file from tmp folder
|
||||||
err = utils.DeleteFile(filepath.Join(tmpPath, config.backupFileName))
|
err = utils.DeleteFile(filepath.Join(tmpPath, config.backupFileName))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -211,19 +297,29 @@ func s3Backup(db *dbConfig, config *BackupConfig) {
|
|||||||
}
|
}
|
||||||
utils.Done("Uploading backup archive to remote storage S3 ... done ")
|
utils.Done("Uploading backup archive to remote storage S3 ... done ")
|
||||||
//Send notification
|
//Send notification
|
||||||
utils.NotifySuccess(finalFileName)
|
utils.NotifySuccess(&utils.NotificationData{
|
||||||
|
File: finalFileName,
|
||||||
|
BackupSize: backupSize,
|
||||||
|
Database: db.dbName,
|
||||||
|
Storage: config.storage,
|
||||||
|
BackupLocation: filepath.Join(config.remotePath, finalFileName),
|
||||||
|
StartTime: startTime,
|
||||||
|
EndTime: time.Now().Format(utils.TimeFormat()),
|
||||||
|
})
|
||||||
//Delete temp
|
//Delete temp
|
||||||
deleteTemp()
|
deleteTemp()
|
||||||
}
|
utils.Info("Backup completed successfully")
|
||||||
|
|
||||||
// sshBackup backup database to SSH remote server
|
}
|
||||||
func sshBackup(db *dbConfig, config *BackupConfig) {
|
func sshBackup(db *dbConfig, config *BackupConfig) {
|
||||||
utils.Info("Backup database to Remote server")
|
utils.Info("Backup database to Remote server")
|
||||||
|
startTime = time.Now().Format(utils.TimeFormat())
|
||||||
|
|
||||||
//Backup database
|
//Backup database
|
||||||
BackupDatabase(db, config.backupFileName, disableCompression)
|
BackupDatabase(db, config.backupFileName, disableCompression)
|
||||||
finalFileName := config.backupFileName
|
finalFileName := config.backupFileName
|
||||||
if config.encryption {
|
if config.encryption {
|
||||||
encryptBackup(config.backupFileName, config.passphrase)
|
encryptBackup(config)
|
||||||
finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg")
|
finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg")
|
||||||
}
|
}
|
||||||
utils.Info("Uploading backup archive to remote storage ... ")
|
utils.Info("Uploading backup archive to remote storage ... ")
|
||||||
@@ -233,11 +329,16 @@ func sshBackup(db *dbConfig, config *BackupConfig) {
|
|||||||
utils.Fatal("Error uploading file to the remote server: %s ", err)
|
utils.Fatal("Error uploading file to the remote server: %s ", err)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
//Get backup info
|
||||||
|
fileInfo, err := os.Stat(filepath.Join(tmpPath, finalFileName))
|
||||||
|
if err != nil {
|
||||||
|
utils.Error("Error:", err)
|
||||||
|
}
|
||||||
|
backupSize = fileInfo.Size()
|
||||||
//Delete backup file from tmp folder
|
//Delete backup file from tmp folder
|
||||||
err = utils.DeleteFile(filepath.Join(tmpPath, finalFileName))
|
err = utils.DeleteFile(filepath.Join(tmpPath, finalFileName))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Println("Error deleting file: ", err)
|
utils.Error("Error deleting file: %v", err)
|
||||||
|
|
||||||
}
|
}
|
||||||
if config.prune {
|
if config.prune {
|
||||||
@@ -248,17 +349,29 @@ func sshBackup(db *dbConfig, config *BackupConfig) {
|
|||||||
|
|
||||||
utils.Done("Uploading backup archive to remote storage ... done ")
|
utils.Done("Uploading backup archive to remote storage ... done ")
|
||||||
//Send notification
|
//Send notification
|
||||||
utils.NotifySuccess(finalFileName)
|
utils.NotifySuccess(&utils.NotificationData{
|
||||||
|
File: finalFileName,
|
||||||
|
BackupSize: backupSize,
|
||||||
|
Database: db.dbName,
|
||||||
|
Storage: config.storage,
|
||||||
|
BackupLocation: filepath.Join(config.remotePath, finalFileName),
|
||||||
|
StartTime: startTime,
|
||||||
|
EndTime: time.Now().Format(utils.TimeFormat()),
|
||||||
|
})
|
||||||
//Delete temp
|
//Delete temp
|
||||||
deleteTemp()
|
deleteTemp()
|
||||||
|
utils.Info("Backup completed successfully")
|
||||||
|
|
||||||
}
|
}
|
||||||
func ftpBackup(db *dbConfig, config *BackupConfig) {
|
func ftpBackup(db *dbConfig, config *BackupConfig) {
|
||||||
utils.Info("Backup database to the remote FTP server")
|
utils.Info("Backup database to the remote FTP server")
|
||||||
|
startTime = time.Now().Format(utils.TimeFormat())
|
||||||
|
|
||||||
//Backup database
|
//Backup database
|
||||||
BackupDatabase(db, config.backupFileName, disableCompression)
|
BackupDatabase(db, config.backupFileName, disableCompression)
|
||||||
finalFileName := config.backupFileName
|
finalFileName := config.backupFileName
|
||||||
if config.encryption {
|
if config.encryption {
|
||||||
encryptBackup(config.backupFileName, config.passphrase)
|
encryptBackup(config)
|
||||||
finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg")
|
finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg")
|
||||||
}
|
}
|
||||||
utils.Info("Uploading backup archive to the remote FTP server ... ")
|
utils.Info("Uploading backup archive to the remote FTP server ... ")
|
||||||
@@ -268,7 +381,12 @@ func ftpBackup(db *dbConfig, config *BackupConfig) {
|
|||||||
utils.Fatal("Error uploading file to the remote FTP server: %s ", err)
|
utils.Fatal("Error uploading file to the remote FTP server: %s ", err)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
//Get backup info
|
||||||
|
fileInfo, err := os.Stat(filepath.Join(tmpPath, finalFileName))
|
||||||
|
if err != nil {
|
||||||
|
utils.Error("Error:", err)
|
||||||
|
}
|
||||||
|
backupSize = fileInfo.Size()
|
||||||
//Delete backup file from tmp folder
|
//Delete backup file from tmp folder
|
||||||
err = utils.DeleteFile(filepath.Join(tmpPath, finalFileName))
|
err = utils.DeleteFile(filepath.Join(tmpPath, finalFileName))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -283,16 +401,33 @@ func ftpBackup(db *dbConfig, config *BackupConfig) {
|
|||||||
|
|
||||||
utils.Done("Uploading backup archive to the remote FTP server ... done ")
|
utils.Done("Uploading backup archive to the remote FTP server ... done ")
|
||||||
//Send notification
|
//Send notification
|
||||||
utils.NotifySuccess(finalFileName)
|
utils.NotifySuccess(&utils.NotificationData{
|
||||||
|
File: finalFileName,
|
||||||
|
BackupSize: backupSize,
|
||||||
|
Database: db.dbName,
|
||||||
|
Storage: config.storage,
|
||||||
|
BackupLocation: filepath.Join(config.remotePath, finalFileName),
|
||||||
|
StartTime: startTime,
|
||||||
|
EndTime: time.Now().Format(utils.TimeFormat()),
|
||||||
|
})
|
||||||
//Delete temp
|
//Delete temp
|
||||||
deleteTemp()
|
deleteTemp()
|
||||||
|
utils.Info("Backup completed successfully")
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// encryptBackup encrypt backup
|
func encryptBackup(config *BackupConfig) {
|
||||||
func encryptBackup(backupFileName, passphrase string) {
|
if config.usingKey {
|
||||||
err := Encrypt(filepath.Join(tmpPath, backupFileName), passphrase)
|
err := encryptWithGPGPublicKey(filepath.Join(tmpPath, config.backupFileName), config.publicKey)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Fatal("Error during encrypting backup %s", err)
|
utils.Fatal("error during encrypting backup %v", err)
|
||||||
|
}
|
||||||
|
} else if config.passphrase != "" {
|
||||||
|
err := encryptWithGPG(filepath.Join(tmpPath, config.backupFileName), config.passphrase)
|
||||||
|
if err != nil {
|
||||||
|
utils.Fatal("error during encrypting backup %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
173
pkg/config.go
173
pkg/config.go
@@ -7,12 +7,24 @@
|
|||||||
package pkg
|
package pkg
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"github.com/jkaninda/mysql-bkup/utils"
|
"github.com/jkaninda/mysql-bkup/utils"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"os"
|
"os"
|
||||||
|
"strconv"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type Database struct {
|
||||||
|
Host string `yaml:"host"`
|
||||||
|
Port string `yaml:"port"`
|
||||||
|
Name string `yaml:"name"`
|
||||||
|
User string `yaml:"user"`
|
||||||
|
Password string `yaml:"password"`
|
||||||
|
Path string `yaml:"path"`
|
||||||
|
}
|
||||||
type Config struct {
|
type Config struct {
|
||||||
|
Databases []Database `yaml:"databases"`
|
||||||
|
CronExpression string `yaml:"cronExpression"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type dbConfig struct {
|
type dbConfig struct {
|
||||||
@@ -29,26 +41,23 @@ type targetDbConfig struct {
|
|||||||
targetDbPassword string
|
targetDbPassword string
|
||||||
targetDbName string
|
targetDbName string
|
||||||
}
|
}
|
||||||
|
type TgConfig struct {
|
||||||
|
Token string
|
||||||
|
ChatId string
|
||||||
|
}
|
||||||
type BackupConfig struct {
|
type BackupConfig struct {
|
||||||
backupFileName string
|
backupFileName string
|
||||||
backupRetention int
|
backupRetention int
|
||||||
disableCompression bool
|
disableCompression bool
|
||||||
prune bool
|
prune bool
|
||||||
encryption bool
|
|
||||||
remotePath string
|
remotePath string
|
||||||
|
encryption bool
|
||||||
|
usingKey bool
|
||||||
passphrase string
|
passphrase string
|
||||||
|
publicKey string
|
||||||
storage string
|
storage string
|
||||||
cronExpression string
|
cronExpression string
|
||||||
}
|
}
|
||||||
type RestoreConfig struct {
|
|
||||||
s3Path string
|
|
||||||
remotePath string
|
|
||||||
storage string
|
|
||||||
file string
|
|
||||||
bucket string
|
|
||||||
gpqPassphrase string
|
|
||||||
}
|
|
||||||
type FTPConfig struct {
|
type FTPConfig struct {
|
||||||
host string
|
host string
|
||||||
user string
|
user string
|
||||||
@@ -57,6 +66,24 @@ type FTPConfig struct {
|
|||||||
remotePath string
|
remotePath string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SSHConfig holds the SSH connection details
|
||||||
|
type SSHConfig struct {
|
||||||
|
user string
|
||||||
|
password string
|
||||||
|
hostName string
|
||||||
|
port string
|
||||||
|
identifyFile string
|
||||||
|
}
|
||||||
|
type AWSConfig struct {
|
||||||
|
endpoint string
|
||||||
|
bucket string
|
||||||
|
accessKey string
|
||||||
|
secretKey string
|
||||||
|
region string
|
||||||
|
disableSsl bool
|
||||||
|
forcePathStyle bool
|
||||||
|
}
|
||||||
|
|
||||||
func initDbConfig(cmd *cobra.Command) *dbConfig {
|
func initDbConfig(cmd *cobra.Command) *dbConfig {
|
||||||
//Set env
|
//Set env
|
||||||
utils.GetEnv(cmd, "dbname", "DB_NAME")
|
utils.GetEnv(cmd, "dbname", "DB_NAME")
|
||||||
@@ -74,14 +101,81 @@ func initDbConfig(cmd *cobra.Command) *dbConfig {
|
|||||||
}
|
}
|
||||||
return &dConf
|
return &dConf
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getDatabase(database Database) *dbConfig {
|
||||||
|
return &dbConfig{
|
||||||
|
dbHost: database.Host,
|
||||||
|
dbPort: database.Port,
|
||||||
|
dbName: database.Name,
|
||||||
|
dbUserName: database.User,
|
||||||
|
dbPassword: database.Password,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadSSHConfig loads the SSH configuration from environment variables
|
||||||
|
func loadSSHConfig() (*SSHConfig, error) {
|
||||||
|
utils.GetEnvVariable("SSH_HOST", "SSH_HOST_NAME")
|
||||||
|
sshVars := []string{"SSH_USER", "SSH_HOST", "SSH_PORT", "REMOTE_PATH"}
|
||||||
|
err := utils.CheckEnvVars(sshVars)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error missing environment variables: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &SSHConfig{
|
||||||
|
user: os.Getenv("SSH_USER"),
|
||||||
|
password: os.Getenv("SSH_PASSWORD"),
|
||||||
|
hostName: os.Getenv("SSH_HOST"),
|
||||||
|
port: os.Getenv("SSH_PORT"),
|
||||||
|
identifyFile: os.Getenv("SSH_IDENTIFY_FILE"),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
func initFtpConfig() *FTPConfig {
|
||||||
|
//Initialize data configs
|
||||||
|
fConfig := FTPConfig{}
|
||||||
|
fConfig.host = utils.GetEnvVariable("FTP_HOST", "FTP_HOST_NAME")
|
||||||
|
fConfig.user = os.Getenv("FTP_USER")
|
||||||
|
fConfig.password = os.Getenv("FTP_PASSWORD")
|
||||||
|
fConfig.port = os.Getenv("FTP_PORT")
|
||||||
|
fConfig.remotePath = os.Getenv("REMOTE_PATH")
|
||||||
|
err := utils.CheckEnvVars(ftpVars)
|
||||||
|
if err != nil {
|
||||||
|
utils.Error("Please make sure all required environment variables for FTP are set")
|
||||||
|
utils.Fatal("Error missing environment variables: %s", err)
|
||||||
|
}
|
||||||
|
return &fConfig
|
||||||
|
}
|
||||||
|
func initAWSConfig() *AWSConfig {
|
||||||
|
//Initialize AWS configs
|
||||||
|
aConfig := AWSConfig{}
|
||||||
|
aConfig.endpoint = utils.GetEnvVariable("AWS_S3_ENDPOINT", "S3_ENDPOINT")
|
||||||
|
aConfig.accessKey = utils.GetEnvVariable("AWS_ACCESS_KEY", "ACCESS_KEY")
|
||||||
|
aConfig.secretKey = utils.GetEnvVariable("AWS_SECRET_KEY", "SECRET_KEY")
|
||||||
|
aConfig.bucket = utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
|
||||||
|
aConfig.region = os.Getenv("AWS_REGION")
|
||||||
|
disableSsl, err := strconv.ParseBool(os.Getenv("AWS_DISABLE_SSL"))
|
||||||
|
if err != nil {
|
||||||
|
utils.Fatal("Unable to parse AWS_DISABLE_SSL env var: %s", err)
|
||||||
|
}
|
||||||
|
forcePathStyle, err := strconv.ParseBool(os.Getenv("AWS_FORCE_PATH_STYLE"))
|
||||||
|
if err != nil {
|
||||||
|
utils.Fatal("Unable to parse AWS_FORCE_PATH_STYLE env var: %s", err)
|
||||||
|
}
|
||||||
|
aConfig.disableSsl = disableSsl
|
||||||
|
aConfig.forcePathStyle = forcePathStyle
|
||||||
|
err = utils.CheckEnvVars(awsVars)
|
||||||
|
if err != nil {
|
||||||
|
utils.Error("Please make sure all required environment variables for AWS S3 are set")
|
||||||
|
utils.Fatal("Error checking environment variables: %s", err)
|
||||||
|
}
|
||||||
|
return &aConfig
|
||||||
|
}
|
||||||
func initBackupConfig(cmd *cobra.Command) *BackupConfig {
|
func initBackupConfig(cmd *cobra.Command) *BackupConfig {
|
||||||
utils.SetEnv("STORAGE_PATH", storagePath)
|
utils.SetEnv("STORAGE_PATH", storagePath)
|
||||||
utils.GetEnv(cmd, "cron-expression", "BACKUP_CRON_EXPRESSION")
|
utils.GetEnv(cmd, "cron-expression", "BACKUP_CRON_EXPRESSION")
|
||||||
utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION")
|
utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION")
|
||||||
utils.GetEnv(cmd, "path", "REMOTE_PATH")
|
utils.GetEnv(cmd, "path", "REMOTE_PATH")
|
||||||
remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH")
|
|
||||||
|
|
||||||
//Get flag value and set env
|
//Get flag value and set env
|
||||||
|
remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH")
|
||||||
storage = utils.GetEnv(cmd, "storage", "STORAGE")
|
storage = utils.GetEnv(cmd, "storage", "STORAGE")
|
||||||
backupRetention, _ := cmd.Flags().GetInt("keep-last")
|
backupRetention, _ := cmd.Flags().GetInt("keep-last")
|
||||||
prune, _ := cmd.Flags().GetBool("prune")
|
prune, _ := cmd.Flags().GetBool("prune")
|
||||||
@@ -91,8 +185,13 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig {
|
|||||||
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
|
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
|
||||||
cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION")
|
cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION")
|
||||||
|
|
||||||
if passphrase != "" {
|
publicKeyFile, err := checkPubKeyFile(os.Getenv("GPG_PUBLIC_KEY"))
|
||||||
|
if err == nil {
|
||||||
encryption = true
|
encryption = true
|
||||||
|
usingKey = true
|
||||||
|
} else if passphrase != "" {
|
||||||
|
encryption = true
|
||||||
|
usingKey = false
|
||||||
}
|
}
|
||||||
//Initialize backup configs
|
//Initialize backup configs
|
||||||
config := BackupConfig{}
|
config := BackupConfig{}
|
||||||
@@ -103,21 +202,41 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig {
|
|||||||
config.encryption = encryption
|
config.encryption = encryption
|
||||||
config.remotePath = remotePath
|
config.remotePath = remotePath
|
||||||
config.passphrase = passphrase
|
config.passphrase = passphrase
|
||||||
|
config.publicKey = publicKeyFile
|
||||||
|
config.usingKey = usingKey
|
||||||
config.cronExpression = cronExpression
|
config.cronExpression = cronExpression
|
||||||
return &config
|
return &config
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type RestoreConfig struct {
|
||||||
|
s3Path string
|
||||||
|
remotePath string
|
||||||
|
storage string
|
||||||
|
file string
|
||||||
|
bucket string
|
||||||
|
usingKey bool
|
||||||
|
passphrase string
|
||||||
|
privateKey string
|
||||||
|
}
|
||||||
|
|
||||||
func initRestoreConfig(cmd *cobra.Command) *RestoreConfig {
|
func initRestoreConfig(cmd *cobra.Command) *RestoreConfig {
|
||||||
utils.SetEnv("STORAGE_PATH", storagePath)
|
utils.SetEnv("STORAGE_PATH", storagePath)
|
||||||
utils.GetEnv(cmd, "path", "REMOTE_PATH")
|
utils.GetEnv(cmd, "path", "REMOTE_PATH")
|
||||||
remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH")
|
|
||||||
|
|
||||||
//Get flag value and set env
|
//Get flag value and set env
|
||||||
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
|
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
|
||||||
|
remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH")
|
||||||
storage = utils.GetEnv(cmd, "storage", "STORAGE")
|
storage = utils.GetEnv(cmd, "storage", "STORAGE")
|
||||||
file = utils.GetEnv(cmd, "file", "FILE_NAME")
|
file = utils.GetEnv(cmd, "file", "FILE_NAME")
|
||||||
_, _ = cmd.Flags().GetString("mode")
|
|
||||||
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
|
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
|
||||||
gpqPassphrase := os.Getenv("GPG_PASSPHRASE")
|
passphrase := os.Getenv("GPG_PASSPHRASE")
|
||||||
|
privateKeyFile, err := checkPrKeyFile(os.Getenv("GPG_PRIVATE_KEY"))
|
||||||
|
if err == nil {
|
||||||
|
usingKey = true
|
||||||
|
} else if passphrase != "" {
|
||||||
|
usingKey = false
|
||||||
|
}
|
||||||
|
|
||||||
//Initialize restore configs
|
//Initialize restore configs
|
||||||
rConfig := RestoreConfig{}
|
rConfig := RestoreConfig{}
|
||||||
rConfig.s3Path = s3Path
|
rConfig.s3Path = s3Path
|
||||||
@@ -126,7 +245,9 @@ func initRestoreConfig(cmd *cobra.Command) *RestoreConfig {
|
|||||||
rConfig.bucket = bucket
|
rConfig.bucket = bucket
|
||||||
rConfig.file = file
|
rConfig.file = file
|
||||||
rConfig.storage = storage
|
rConfig.storage = storage
|
||||||
rConfig.gpqPassphrase = gpqPassphrase
|
rConfig.passphrase = passphrase
|
||||||
|
rConfig.usingKey = usingKey
|
||||||
|
rConfig.privateKey = privateKeyFile
|
||||||
return &rConfig
|
return &rConfig
|
||||||
}
|
}
|
||||||
func initTargetDbConfig() *targetDbConfig {
|
func initTargetDbConfig() *targetDbConfig {
|
||||||
@@ -144,18 +265,10 @@ func initTargetDbConfig() *targetDbConfig {
|
|||||||
}
|
}
|
||||||
return &tdbConfig
|
return &tdbConfig
|
||||||
}
|
}
|
||||||
func initFtpConfig() *FTPConfig {
|
func loadConfigFile() (string, error) {
|
||||||
//Initialize backup configs
|
backupConfigFile, err := checkConfigFile(os.Getenv("BACKUP_CONFIG_FILE"))
|
||||||
fConfig := FTPConfig{}
|
if err == nil {
|
||||||
fConfig.host = os.Getenv("FTP_HOST_NAME")
|
return backupConfigFile, nil
|
||||||
fConfig.user = os.Getenv("FTP_USER")
|
|
||||||
fConfig.password = os.Getenv("FTP_PASSWORD")
|
|
||||||
fConfig.port = os.Getenv("FTP_PORT")
|
|
||||||
fConfig.remotePath = os.Getenv("REMOTE_PATH")
|
|
||||||
err := utils.CheckEnvVars(ftpVars)
|
|
||||||
if err != nil {
|
|
||||||
utils.Error("Please make sure all required environment variables for FTP are set")
|
|
||||||
utils.Fatal("Error checking environment variables: %s", err)
|
|
||||||
}
|
}
|
||||||
return &fConfig
|
return "", fmt.Errorf("backup config file not found")
|
||||||
}
|
}
|
||||||
|
|||||||
169
pkg/encrypt.go
169
pkg/encrypt.go
@@ -7,54 +7,173 @@
|
|||||||
package pkg
|
package pkg
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||||
"github.com/jkaninda/mysql-bkup/utils"
|
"github.com/jkaninda/mysql-bkup/utils"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Decrypt(inputFile string, passphrase string) error {
|
// decryptWithGPG decrypts backup file using a passphrase
|
||||||
utils.Info("Decrypting backup file: " + inputFile + " ...")
|
func decryptWithGPG(inputFile string, passphrase string) error {
|
||||||
//Create gpg home dir
|
utils.Info("Decrypting backup using passphrase...")
|
||||||
err := utils.MakeDirAll(gpgHome)
|
// Read the encrypted file
|
||||||
|
encFileContent, err := os.ReadFile(inputFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return errors.New(fmt.Sprintf("Error reading encrypted file: %s", err))
|
||||||
}
|
}
|
||||||
utils.SetEnv("GNUPGHOME", gpgHome)
|
// Define the passphrase used to encrypt the file
|
||||||
cmd := exec.Command("gpg", "--batch", "--passphrase", passphrase, "--output", RemoveLastExtension(inputFile), "--decrypt", inputFile)
|
_passphrase := []byte(passphrase)
|
||||||
cmd.Stdout = os.Stdout
|
// Create a PGP message object from the encrypted file content
|
||||||
cmd.Stderr = os.Stderr
|
encryptedMessage := crypto.NewPGPMessage(encFileContent)
|
||||||
|
// Decrypt the message using the passphrase
|
||||||
err = cmd.Run()
|
plainMessage, err := crypto.DecryptMessageWithPassword(encryptedMessage, _passphrase)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return errors.New(fmt.Sprintf("Error decrypting file: %s", err))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Save the decrypted file (restore it)
|
||||||
|
err = os.WriteFile(RemoveLastExtension(inputFile), plainMessage.GetBinary(), 0644)
|
||||||
|
if err != nil {
|
||||||
|
return errors.New(fmt.Sprintf("Error saving decrypted file: %s", err))
|
||||||
|
}
|
||||||
|
utils.Info("Decrypting backup using passphrase...done")
|
||||||
utils.Info("Backup file decrypted successful!")
|
utils.Info("Backup file decrypted successful!")
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func Encrypt(inputFile string, passphrase string) error {
|
// encryptWithGPG encrypts backup using a passphrase
|
||||||
utils.Info("Encrypting backup...")
|
func encryptWithGPG(inputFile string, passphrase string) error {
|
||||||
//Create gpg home dir
|
utils.Info("Encrypting backup using passphrase...")
|
||||||
err := utils.MakeDirAll(gpgHome)
|
// Read the file to be encrypted
|
||||||
|
plainFileContent, err := os.ReadFile(inputFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return errors.New(fmt.Sprintf("Error reading file: %s", err))
|
||||||
}
|
}
|
||||||
utils.SetEnv("GNUPGHOME", gpgHome)
|
// Define the passphrase to encrypt the file
|
||||||
cmd := exec.Command("gpg", "--batch", "--passphrase", passphrase, "--symmetric", "--cipher-algo", algorithm, inputFile)
|
_passphrase := []byte(passphrase)
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
|
|
||||||
err = cmd.Run()
|
// Create a message object from the file content
|
||||||
|
message := crypto.NewPlainMessage(plainFileContent)
|
||||||
|
// Encrypt the message using the passphrase
|
||||||
|
encryptedMessage, err := crypto.EncryptMessageWithPassword(message, _passphrase)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return errors.New(fmt.Sprintf("Error encrypting backup file: %s", err))
|
||||||
}
|
}
|
||||||
|
// Save the encrypted .tar file
|
||||||
|
err = os.WriteFile(fmt.Sprintf("%s.%s", inputFile, gpgExtension), encryptedMessage.GetBinary(), 0644)
|
||||||
|
if err != nil {
|
||||||
|
return errors.New(fmt.Sprintf("Error saving encrypted filee: %s", err))
|
||||||
|
}
|
||||||
|
utils.Info("Encrypting backup using passphrase...done")
|
||||||
utils.Info("Backup file encrypted successful!")
|
utils.Info("Backup file encrypted successful!")
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// encryptWithGPGPublicKey encrypts backup using a public key
|
||||||
|
func encryptWithGPGPublicKey(inputFile string, publicKey string) error {
|
||||||
|
utils.Info("Encrypting backup using public key...")
|
||||||
|
// Read the public key
|
||||||
|
pubKeyBytes, err := os.ReadFile(publicKey)
|
||||||
|
if err != nil {
|
||||||
|
return errors.New(fmt.Sprintf("Error reading public key: %s", err))
|
||||||
|
}
|
||||||
|
// Create a new keyring with the public key
|
||||||
|
publicKeyObj, err := crypto.NewKeyFromArmored(string(pubKeyBytes))
|
||||||
|
if err != nil {
|
||||||
|
return errors.New(fmt.Sprintf("Error parsing public key: %s", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
keyRing, err := crypto.NewKeyRing(publicKeyObj)
|
||||||
|
if err != nil {
|
||||||
|
|
||||||
|
return errors.New(fmt.Sprintf("Error creating key ring: %v", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read the file to encryptWithGPGPublicKey
|
||||||
|
fileContent, err := os.ReadFile(inputFile)
|
||||||
|
if err != nil {
|
||||||
|
return errors.New(fmt.Sprintf("Error reading file: %v", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// encryptWithGPG the file
|
||||||
|
message := crypto.NewPlainMessage(fileContent)
|
||||||
|
encMessage, err := keyRing.Encrypt(message, nil)
|
||||||
|
if err != nil {
|
||||||
|
return errors.New(fmt.Sprintf("Error encrypting file: %v", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save the encrypted file
|
||||||
|
err = os.WriteFile(fmt.Sprintf("%s.%s", inputFile, gpgExtension), encMessage.GetBinary(), 0644)
|
||||||
|
if err != nil {
|
||||||
|
return errors.New(fmt.Sprintf("Error saving encrypted file: %v", err))
|
||||||
|
}
|
||||||
|
utils.Info("Encrypting backup using public key...done")
|
||||||
|
utils.Info("Backup file encrypted successful!")
|
||||||
|
return nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// decryptWithGPGPrivateKey decrypts backup file using a private key and passphrase.
|
||||||
|
// privateKey GPG private key
|
||||||
|
// passphrase GPG passphrase
|
||||||
|
func decryptWithGPGPrivateKey(inputFile, privateKey, passphrase string) error {
|
||||||
|
utils.Info("Encrypting backup using private key...")
|
||||||
|
|
||||||
|
// Read the private key
|
||||||
|
priKeyBytes, err := os.ReadFile(privateKey)
|
||||||
|
if err != nil {
|
||||||
|
return errors.New(fmt.Sprintf("Error reading private key: %s", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read the password for the private key (if it’s password-protected)
|
||||||
|
password := []byte(passphrase)
|
||||||
|
|
||||||
|
// Create a key object from the armored private key
|
||||||
|
privateKeyObj, err := crypto.NewKeyFromArmored(string(priKeyBytes))
|
||||||
|
if err != nil {
|
||||||
|
return errors.New(fmt.Sprintf("Error parsing private key: %s", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unlock the private key with the password
|
||||||
|
if passphrase != "" {
|
||||||
|
// Unlock the private key with the password
|
||||||
|
_, err = privateKeyObj.Unlock(password)
|
||||||
|
if err != nil {
|
||||||
|
return errors.New(fmt.Sprintf("Error unlocking private key: %s", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new keyring with the private key
|
||||||
|
keyRing, err := crypto.NewKeyRing(privateKeyObj)
|
||||||
|
if err != nil {
|
||||||
|
return errors.New(fmt.Sprintf("Error creating key ring: %v", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read the encrypted file
|
||||||
|
encFileContent, err := os.ReadFile(inputFile)
|
||||||
|
if err != nil {
|
||||||
|
return errors.New(fmt.Sprintf("Error reading encrypted file: %s", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// decryptWithGPG the file
|
||||||
|
encryptedMessage := crypto.NewPGPMessage(encFileContent)
|
||||||
|
message, err := keyRing.Decrypt(encryptedMessage, nil, 0)
|
||||||
|
if err != nil {
|
||||||
|
return errors.New(fmt.Sprintf("Error decrypting file: %s", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save the decrypted file
|
||||||
|
err = os.WriteFile(RemoveLastExtension(inputFile), message.GetBinary(), 0644)
|
||||||
|
if err != nil {
|
||||||
|
return errors.New(fmt.Sprintf("Error saving decrypted file: %s", err))
|
||||||
|
}
|
||||||
|
utils.Info("Encrypting backup using public key...done")
|
||||||
|
fmt.Println("File successfully decrypted!")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
func RemoveLastExtension(filename string) string {
|
func RemoveLastExtension(filename string) string {
|
||||||
if idx := strings.LastIndex(filename, "."); idx != -1 {
|
if idx := strings.LastIndex(filename, "."); idx != -1 {
|
||||||
return filename[:idx]
|
return filename[:idx]
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/jkaninda/mysql-bkup/utils"
|
"github.com/jkaninda/mysql-bkup/utils"
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
@@ -129,3 +130,84 @@ func intro() {
|
|||||||
utils.Info("Starting MySQL Backup...")
|
utils.Info("Starting MySQL Backup...")
|
||||||
utils.Info("Copyright (c) 2024 Jonas Kaninda ")
|
utils.Info("Copyright (c) 2024 Jonas Kaninda ")
|
||||||
}
|
}
|
||||||
|
func checkPubKeyFile(pubKey string) (string, error) {
|
||||||
|
// Define possible key file names
|
||||||
|
keyFiles := []string{filepath.Join(gpgHome, "public_key.asc"), filepath.Join(gpgHome, "public_key.gpg"), pubKey}
|
||||||
|
|
||||||
|
// Loop through key file names and check if they exist
|
||||||
|
for _, keyFile := range keyFiles {
|
||||||
|
if _, err := os.Stat(keyFile); err == nil {
|
||||||
|
// File exists
|
||||||
|
return keyFile, nil
|
||||||
|
} else if os.IsNotExist(err) {
|
||||||
|
// File does not exist, continue to the next one
|
||||||
|
continue
|
||||||
|
} else {
|
||||||
|
// An unexpected error occurred
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return an error if neither file exists
|
||||||
|
return "", fmt.Errorf("no public key file found")
|
||||||
|
}
|
||||||
|
func checkPrKeyFile(prKey string) (string, error) {
|
||||||
|
// Define possible key file names
|
||||||
|
keyFiles := []string{filepath.Join(gpgHome, "private_key.asc"), filepath.Join(gpgHome, "private_key.gpg"), prKey}
|
||||||
|
|
||||||
|
// Loop through key file names and check if they exist
|
||||||
|
for _, keyFile := range keyFiles {
|
||||||
|
if _, err := os.Stat(keyFile); err == nil {
|
||||||
|
// File exists
|
||||||
|
return keyFile, nil
|
||||||
|
} else if os.IsNotExist(err) {
|
||||||
|
// File does not exist, continue to the next one
|
||||||
|
continue
|
||||||
|
} else {
|
||||||
|
// An unexpected error occurred
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return an error if neither file exists
|
||||||
|
return "", fmt.Errorf("no public key file found")
|
||||||
|
}
|
||||||
|
func readConf(configFile string) (*Config, error) {
|
||||||
|
//configFile := filepath.Join("./", filename)
|
||||||
|
if utils.FileExists(configFile) {
|
||||||
|
buf, err := os.ReadFile(configFile)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
c := &Config{}
|
||||||
|
err = yaml.Unmarshal(buf, c)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("in file %q: %w", configFile, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return c, err
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("config file %q not found", configFile)
|
||||||
|
}
|
||||||
|
func checkConfigFile(filePath string) (string, error) {
|
||||||
|
// Define possible config file names
|
||||||
|
configFiles := []string{filepath.Join(workingDir, "config.yaml"), filepath.Join(workingDir, "config.yml"), filePath}
|
||||||
|
|
||||||
|
// Loop through config file names and check if they exist
|
||||||
|
for _, configFile := range configFiles {
|
||||||
|
if _, err := os.Stat(configFile); err == nil {
|
||||||
|
// File exists
|
||||||
|
return configFile, nil
|
||||||
|
} else if os.IsNotExist(err) {
|
||||||
|
// File does not exist, continue to the next one
|
||||||
|
continue
|
||||||
|
} else {
|
||||||
|
// An unexpected error occurred
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return an error if neither file exists
|
||||||
|
return "", fmt.Errorf("no config file found")
|
||||||
|
}
|
||||||
|
|||||||
@@ -30,11 +30,13 @@ func StartMigration(cmd *cobra.Command) {
|
|||||||
|
|
||||||
//Generate file name
|
//Generate file name
|
||||||
backupFileName := fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20060102_150405"))
|
backupFileName := fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20060102_150405"))
|
||||||
|
conf := &RestoreConfig{}
|
||||||
|
conf.file = backupFileName
|
||||||
//Backup source Database
|
//Backup source Database
|
||||||
BackupDatabase(dbConf, backupFileName, true)
|
BackupDatabase(dbConf, backupFileName, true)
|
||||||
//Restore source database into target database
|
//Restore source database into target database
|
||||||
utils.Info("Restoring [%s] database into [%s] database...", dbConf.dbName, targetDbConf.targetDbName)
|
utils.Info("Restoring [%s] database into [%s] database...", dbConf.dbName, targetDbConf.targetDbName)
|
||||||
RestoreDatabase(&newDbConfig, backupFileName)
|
RestoreDatabase(&newDbConfig, conf)
|
||||||
utils.Info("[%s] database has been restored into [%s] database", dbConf.dbName, targetDbConf.targetDbName)
|
utils.Info("[%s] database has been restored into [%s] database", dbConf.dbName, targetDbConf.targetDbName)
|
||||||
utils.Info("Database migration completed.")
|
utils.Info("Database migration completed.")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,6 @@
|
|||||||
package pkg
|
package pkg
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"github.com/jkaninda/mysql-bkup/utils"
|
"github.com/jkaninda/mysql-bkup/utils"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"os"
|
"os"
|
||||||
@@ -21,90 +20,91 @@ func StartRestore(cmd *cobra.Command) {
|
|||||||
restoreConf := initRestoreConfig(cmd)
|
restoreConf := initRestoreConfig(cmd)
|
||||||
|
|
||||||
switch restoreConf.storage {
|
switch restoreConf.storage {
|
||||||
case "s3":
|
|
||||||
restoreFromS3(dbConf, restoreConf.file, restoreConf.bucket, restoreConf.s3Path)
|
|
||||||
case "local":
|
case "local":
|
||||||
utils.Info("Restore database from local")
|
utils.Info("Restore database from local")
|
||||||
copyToTmp(storagePath, restoreConf.file)
|
copyToTmp(storagePath, restoreConf.file)
|
||||||
RestoreDatabase(dbConf, restoreConf.file)
|
RestoreDatabase(dbConf, restoreConf)
|
||||||
case "ssh":
|
case "s3", "S3":
|
||||||
restoreFromRemote(dbConf, restoreConf.file, restoreConf.remotePath)
|
restoreFromS3(dbConf, restoreConf)
|
||||||
case "ftp":
|
case "ssh", "SSH", "remote":
|
||||||
restoreFromFTP(dbConf, restoreConf.file, restoreConf.remotePath)
|
restoreFromRemote(dbConf, restoreConf)
|
||||||
|
case "ftp", "FTP":
|
||||||
|
restoreFromFTP(dbConf, restoreConf)
|
||||||
default:
|
default:
|
||||||
utils.Info("Restore database from local")
|
utils.Info("Restore database from local")
|
||||||
copyToTmp(storagePath, restoreConf.file)
|
copyToTmp(storagePath, restoreConf.file)
|
||||||
RestoreDatabase(dbConf, restoreConf.file)
|
RestoreDatabase(dbConf, restoreConf)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func restoreFromS3(db *dbConfig, file, bucket, s3Path string) {
|
func restoreFromS3(db *dbConfig, conf *RestoreConfig) {
|
||||||
utils.Info("Restore database from s3")
|
utils.Info("Restore database from s3")
|
||||||
err := DownloadFile(tmpPath, file, bucket, s3Path)
|
err := DownloadFile(tmpPath, conf.file, conf.bucket, conf.s3Path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Fatal("Error download file from s3 %s %v", file, err)
|
utils.Fatal("Error download file from s3 %s %v ", conf.file, err)
|
||||||
}
|
}
|
||||||
RestoreDatabase(db, file)
|
RestoreDatabase(db, conf)
|
||||||
}
|
}
|
||||||
func restoreFromRemote(db *dbConfig, file, remotePath string) {
|
func restoreFromRemote(db *dbConfig, conf *RestoreConfig) {
|
||||||
utils.Info("Restore database from remote server")
|
utils.Info("Restore database from remote server")
|
||||||
err := CopyFromRemote(file, remotePath)
|
err := CopyFromRemote(conf.file, conf.remotePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Fatal("Error download file from remote server: %s %v ", filepath.Join(remotePath, file), err)
|
utils.Fatal("Error download file from remote server: %s %v", filepath.Join(conf.remotePath, conf.file), err)
|
||||||
}
|
}
|
||||||
RestoreDatabase(db, file)
|
RestoreDatabase(db, conf)
|
||||||
}
|
}
|
||||||
func restoreFromFTP(db *dbConfig, file, remotePath string) {
|
func restoreFromFTP(db *dbConfig, conf *RestoreConfig) {
|
||||||
utils.Info("Restore database from FTP server")
|
utils.Info("Restore database from FTP server")
|
||||||
err := CopyFromFTP(file, remotePath)
|
err := CopyFromFTP(conf.file, conf.remotePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Fatal("Error download file from FTP server: %s %v", filepath.Join(remotePath, file), err)
|
utils.Fatal("Error download file from FTP server: %s %v", filepath.Join(conf.remotePath, conf.file), err)
|
||||||
}
|
}
|
||||||
RestoreDatabase(db, file)
|
RestoreDatabase(db, conf)
|
||||||
}
|
}
|
||||||
|
|
||||||
// RestoreDatabase restore database
|
// RestoreDatabase restore database
|
||||||
func RestoreDatabase(db *dbConfig, file string) {
|
func RestoreDatabase(db *dbConfig, conf *RestoreConfig) {
|
||||||
gpgPassphrase := os.Getenv("GPG_PASSPHRASE")
|
if conf.file == "" {
|
||||||
if file == "" {
|
|
||||||
utils.Fatal("Error, file required")
|
utils.Fatal("Error, file required")
|
||||||
}
|
}
|
||||||
|
extension := filepath.Ext(filepath.Join(tmpPath, conf.file))
|
||||||
err := utils.CheckEnvVars(dbHVars)
|
|
||||||
if err != nil {
|
|
||||||
utils.Error("Please make sure all required environment variables for database are set")
|
|
||||||
utils.Fatal("Error checking environment variables: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
extension := filepath.Ext(fmt.Sprintf("%s/%s", tmpPath, file))
|
|
||||||
if extension == ".gpg" {
|
if extension == ".gpg" {
|
||||||
if gpgPassphrase == "" {
|
|
||||||
utils.Fatal("Error: GPG passphrase is required, your file seems to be a GPG file.\nYou need to provide GPG keys. GPG_PASSPHRASE environment variable is required.")
|
|
||||||
|
|
||||||
} else {
|
if conf.usingKey {
|
||||||
//Decrypt file
|
utils.Warn("Backup decryption using a private key is not fully supported")
|
||||||
err := Decrypt(filepath.Join(tmpPath, file), gpgPassphrase)
|
err := decryptWithGPGPrivateKey(filepath.Join(tmpPath, conf.file), conf.privateKey, conf.passphrase)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Fatal("Error decrypting file %s %v", file, err)
|
utils.Fatal("error during decrypting backup %v", err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if conf.passphrase == "" {
|
||||||
|
utils.Error("Error, passphrase or private key required")
|
||||||
|
utils.Fatal("Your file seems to be a GPG file.\nYou need to provide GPG keys. GPG_PASSPHRASE or GPG_PRIVATE_KEY environment variable is required.")
|
||||||
|
} else {
|
||||||
|
//decryptWithGPG file
|
||||||
|
err := decryptWithGPG(filepath.Join(tmpPath, conf.file), conf.passphrase)
|
||||||
|
if err != nil {
|
||||||
|
utils.Fatal("Error decrypting file %s %v", file, err)
|
||||||
|
}
|
||||||
|
//Update file name
|
||||||
|
conf.file = RemoveLastExtension(file)
|
||||||
}
|
}
|
||||||
//Update file name
|
|
||||||
file = RemoveLastExtension(file)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if utils.FileExists(fmt.Sprintf("%s/%s", tmpPath, file)) {
|
if utils.FileExists(filepath.Join(tmpPath, conf.file)) {
|
||||||
err = os.Setenv("MYSQL_PWD", db.dbPassword)
|
err := os.Setenv("MYSQL_PWD", db.dbPassword)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
testDatabaseConnection(db)
|
testDatabaseConnection(db)
|
||||||
utils.Info("Restoring database...")
|
utils.Info("Restoring database...")
|
||||||
|
|
||||||
extension := filepath.Ext(fmt.Sprintf("%s/%s", tmpPath, file))
|
extension := filepath.Ext(filepath.Join(tmpPath, conf.file))
|
||||||
// Restore from compressed file / .sql.gz
|
// Restore from compressed file / .sql.gz
|
||||||
if extension == ".gz" {
|
if extension == ".gz" {
|
||||||
str := "zcat " + filepath.Join(tmpPath, file) + " | mysql -h " + db.dbHost + " -P " + db.dbPort + " -u " + db.dbUserName + " " + db.dbName
|
str := "zcat " + filepath.Join(tmpPath, conf.file) + " | mysql -h " + db.dbHost + " -P " + db.dbPort + " -u " + db.dbUserName + " " + db.dbName
|
||||||
_, err := exec.Command("sh", "-c", str).Output()
|
_, err := exec.Command("sh", "-c", str).Output()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Fatal("Error, in restoring the database %v", err)
|
utils.Fatal("Error, in restoring the database %v", err)
|
||||||
@@ -116,7 +116,7 @@ func RestoreDatabase(db *dbConfig, file string) {
|
|||||||
|
|
||||||
} else if extension == ".sql" {
|
} else if extension == ".sql" {
|
||||||
//Restore from sql file
|
//Restore from sql file
|
||||||
str := "cat " + filepath.Join(tmpPath, file) + " | mysql -h " + db.dbHost + " -P " + db.dbPort + " -u " + db.dbUserName + " " + db.dbName
|
str := "cat " + filepath.Join(tmpPath, conf.file) + " | mysql -h " + db.dbHost + " -P " + db.dbPort + " -u " + db.dbUserName + " " + db.dbName
|
||||||
_, err := exec.Command("sh", "-c", str).Output()
|
_, err := exec.Command("sh", "-c", str).Output()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Fatal("Error in restoring the database %v", err)
|
utils.Fatal("Error in restoring the database %v", err)
|
||||||
@@ -130,6 +130,6 @@ func RestoreDatabase(db *dbConfig, file string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
utils.Fatal("File not found in %s", filepath.Join(tmpPath, file))
|
utils.Fatal("File not found in %s", filepath.Join(tmpPath, conf.file))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
56
pkg/s3.go
56
pkg/s3.go
@@ -1,4 +1,4 @@
|
|||||||
// Package utils /
|
// Package pkg
|
||||||
/*****
|
/*****
|
||||||
@author Jonas Kaninda
|
@author Jonas Kaninda
|
||||||
@license MIT License <https://opensource.org/licenses/MIT>
|
@license MIT License <https://opensource.org/licenses/MIT>
|
||||||
@@ -8,56 +8,28 @@ package pkg
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
|
||||||
"github.com/aws/aws-sdk-go/aws"
|
"github.com/aws/aws-sdk-go/aws"
|
||||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||||
"github.com/aws/aws-sdk-go/aws/session"
|
"github.com/aws/aws-sdk-go/aws/session"
|
||||||
"github.com/aws/aws-sdk-go/service/s3"
|
"github.com/aws/aws-sdk-go/service/s3"
|
||||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||||
"github.com/jkaninda/mysql-bkup/utils"
|
"github.com/jkaninda/mysql-bkup/utils"
|
||||||
"log"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strconv"
|
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
// CreateSession creates a new AWS session
|
// CreateSession creates a new AWS session
|
||||||
func CreateSession() (*session.Session, error) {
|
func CreateSession() (*session.Session, error) {
|
||||||
// AwsVars Required environment variables for AWS S3 storage
|
awsConfig := initAWSConfig()
|
||||||
var awsVars = []string{
|
// Configure to use MinIO Server
|
||||||
"AWS_S3_ENDPOINT",
|
|
||||||
"AWS_S3_BUCKET_NAME",
|
|
||||||
"AWS_ACCESS_KEY",
|
|
||||||
"AWS_SECRET_KEY",
|
|
||||||
"AWS_REGION",
|
|
||||||
"AWS_REGION",
|
|
||||||
"AWS_REGION",
|
|
||||||
}
|
|
||||||
|
|
||||||
endPoint := utils.GetEnvVariable("AWS_S3_ENDPOINT", "S3_ENDPOINT")
|
|
||||||
accessKey := utils.GetEnvVariable("AWS_ACCESS_KEY", "ACCESS_KEY")
|
|
||||||
secretKey := utils.GetEnvVariable("AWS_SECRET_KEY", "SECRET_KEY")
|
|
||||||
_ = utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
|
|
||||||
|
|
||||||
region := os.Getenv("AWS_REGION")
|
|
||||||
awsDisableSsl, err := strconv.ParseBool(os.Getenv("AWS_DISABLE_SSL"))
|
|
||||||
if err != nil {
|
|
||||||
utils.Fatal("Unable to parse AWS_DISABLE_SSL env var: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = utils.CheckEnvVars(awsVars)
|
|
||||||
if err != nil {
|
|
||||||
utils.Fatal("Error checking environment variables\n: %s", err)
|
|
||||||
}
|
|
||||||
// S3 Config
|
|
||||||
s3Config := &aws.Config{
|
s3Config := &aws.Config{
|
||||||
Credentials: credentials.NewStaticCredentials(accessKey, secretKey, ""),
|
Credentials: credentials.NewStaticCredentials(awsConfig.accessKey, awsConfig.secretKey, ""),
|
||||||
Endpoint: aws.String(endPoint),
|
Endpoint: aws.String(awsConfig.endpoint),
|
||||||
Region: aws.String(region),
|
Region: aws.String(awsConfig.region),
|
||||||
DisableSSL: aws.Bool(awsDisableSsl),
|
DisableSSL: aws.Bool(awsConfig.disableSsl),
|
||||||
S3ForcePathStyle: aws.Bool(true),
|
S3ForcePathStyle: aws.Bool(awsConfig.forcePathStyle),
|
||||||
}
|
}
|
||||||
return session.NewSession(s3Config)
|
return session.NewSession(s3Config)
|
||||||
|
|
||||||
@@ -109,10 +81,10 @@ func DownloadFile(destinationPath, key, bucket, prefix string) error {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
utils.Info("Download backup from S3 storage...")
|
utils.Info("Download data from S3 storage...")
|
||||||
file, err := os.Create(filepath.Join(destinationPath, key))
|
file, err := os.Create(filepath.Join(destinationPath, key))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Println("Failed to create file", err)
|
utils.Error("Failed to create file", err)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
@@ -159,18 +131,18 @@ func DeleteOldBackup(bucket, prefix string, retention int) error {
|
|||||||
Key: object.Key,
|
Key: object.Key,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("Failed to delete object %s: %v", *object.Key, err)
|
utils.Info("Failed to delete object %s: %v", *object.Key, err)
|
||||||
} else {
|
} else {
|
||||||
fmt.Printf("Deleted object %s\n", *object.Key)
|
utils.Info("Deleted object %s\n", *object.Key)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return !lastPage
|
return !lastPage
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("Failed to list objects: %v", err)
|
utils.Error("Failed to list objects: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println("Finished deleting old files.")
|
utils.Info("Finished deleting old files.")
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
100
pkg/scp.go
100
pkg/scp.go
@@ -18,83 +18,73 @@ import (
|
|||||||
"path/filepath"
|
"path/filepath"
|
||||||
)
|
)
|
||||||
|
|
||||||
func CopyToRemote(fileName, remotePath string) error {
|
// createSSHClientConfig sets up the SSH client configuration based on the provided SSHConfig
|
||||||
sshUser := os.Getenv("SSH_USER")
|
func createSSHClientConfig(sshConfig *SSHConfig) (ssh.ClientConfig, error) {
|
||||||
sshPassword := os.Getenv("SSH_PASSWORD")
|
if sshConfig.identifyFile != "" && utils.FileExists(sshConfig.identifyFile) {
|
||||||
sshHostName := os.Getenv("SSH_HOST_NAME")
|
return auth.PrivateKey(sshConfig.user, sshConfig.identifyFile, ssh.InsecureIgnoreHostKey())
|
||||||
sshPort := os.Getenv("SSH_PORT")
|
|
||||||
sshIdentifyFile := os.Getenv("SSH_IDENTIFY_FILE")
|
|
||||||
|
|
||||||
err := utils.CheckEnvVars(sshHVars)
|
|
||||||
if err != nil {
|
|
||||||
utils.Error("Error checking environment variables: %s", err)
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
clientConfig, _ := auth.PasswordKey(sshUser, sshPassword, ssh.InsecureIgnoreHostKey())
|
|
||||||
if sshIdentifyFile != "" && utils.FileExists(sshIdentifyFile) {
|
|
||||||
clientConfig, _ = auth.PrivateKey(sshUser, sshIdentifyFile, ssh.InsecureIgnoreHostKey())
|
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
if sshPassword == "" {
|
if sshConfig.password == "" {
|
||||||
return errors.New("SSH_PASSWORD environment variable is required if SSH_IDENTIFY_FILE is empty")
|
return ssh.ClientConfig{}, errors.New("SSH_PASSWORD environment variable is required if SSH_IDENTIFY_FILE is empty")
|
||||||
}
|
}
|
||||||
utils.Warn("Accessing the remote server using password, password is not recommended")
|
utils.Warn("Accessing the remote server using password, which is not recommended.")
|
||||||
clientConfig, _ = auth.PasswordKey(sshUser, sshPassword, ssh.InsecureIgnoreHostKey())
|
return auth.PasswordKey(sshConfig.user, sshConfig.password, ssh.InsecureIgnoreHostKey())
|
||||||
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CopyToRemote copies a file to a remote server via SCP
|
||||||
|
func CopyToRemote(fileName, remotePath string) error {
|
||||||
|
// Load environment variables
|
||||||
|
sshConfig, err := loadSSHConfig()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to load SSH configuration: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize SSH client config
|
||||||
|
clientConfig, err := createSSHClientConfig(sshConfig)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create SSH client config: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
// Create a new SCP client
|
// Create a new SCP client
|
||||||
client := scp.NewClient(fmt.Sprintf("%s:%s", sshHostName, sshPort), &clientConfig)
|
client := scp.NewClient(fmt.Sprintf("%s:%s", sshConfig.hostName, sshConfig.port), &clientConfig)
|
||||||
|
|
||||||
// Connect to the remote server
|
// Connect to the remote server
|
||||||
err = client.Connect()
|
err = client.Connect()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.New("Couldn't establish a connection to the remote server")
|
return errors.New("Couldn't establish a connection to the remote server\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Open a file
|
// Open the local file
|
||||||
file, _ := os.Open(filepath.Join(tmpPath, fileName))
|
filePath := filepath.Join(tmpPath, fileName)
|
||||||
|
file, err := os.Open(filePath)
|
||||||
// Close client connection after the file has been copied
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to open file %s: %w", filePath, err)
|
||||||
|
}
|
||||||
defer client.Close()
|
defer client.Close()
|
||||||
// Close the file after it has been copied
|
// Copy file to the remote server
|
||||||
defer file.Close()
|
|
||||||
// the context can be adjusted to provide time-outs or inherit from other contexts if this is embedded in a larger application.
|
|
||||||
err = client.CopyFromFile(context.Background(), *file, filepath.Join(remotePath, fileName), "0655")
|
err = client.CopyFromFile(context.Background(), *file, filepath.Join(remotePath, fileName), "0655")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Println("Error while copying file ")
|
return fmt.Errorf("failed to copy file to remote server: %w", err)
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func CopyFromRemote(fileName, remotePath string) error {
|
func CopyFromRemote(fileName, remotePath string) error {
|
||||||
sshUser := os.Getenv("SSH_USER")
|
// Load environment variables
|
||||||
sshPassword := os.Getenv("SSH_PASSWORD")
|
sshConfig, err := loadSSHConfig()
|
||||||
sshHostName := os.Getenv("SSH_HOST_NAME")
|
|
||||||
sshPort := os.Getenv("SSH_PORT")
|
|
||||||
sshIdentifyFile := os.Getenv("SSH_IDENTIFY_FILE")
|
|
||||||
|
|
||||||
err := utils.CheckEnvVars(sshHVars)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Error("Error checking environment variables\n: %s", err)
|
return fmt.Errorf("failed to load SSH configuration: %w", err)
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
clientConfig, _ := auth.PasswordKey(sshUser, sshPassword, ssh.InsecureIgnoreHostKey())
|
// Initialize SSH client config
|
||||||
if sshIdentifyFile != "" && utils.FileExists(sshIdentifyFile) {
|
clientConfig, err := createSSHClientConfig(sshConfig)
|
||||||
clientConfig, _ = auth.PrivateKey(sshUser, sshIdentifyFile, ssh.InsecureIgnoreHostKey())
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create SSH client config: %w", err)
|
||||||
} else {
|
|
||||||
if sshPassword == "" {
|
|
||||||
return errors.New("SSH_PASSWORD environment variable is required if SSH_IDENTIFY_FILE is empty\n")
|
|
||||||
}
|
|
||||||
utils.Warn("Accessing the remote server using password, password is not recommended")
|
|
||||||
clientConfig, _ = auth.PasswordKey(sshUser, sshPassword, ssh.InsecureIgnoreHostKey())
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a new SCP client
|
// Create a new SCP client
|
||||||
client := scp.NewClient(fmt.Sprintf("%s:%s", sshHostName, sshPort), &clientConfig)
|
client := scp.NewClient(fmt.Sprintf("%s:%s", sshConfig.hostName, sshConfig.port), &clientConfig)
|
||||||
|
|
||||||
// Connect to the remote server
|
// Connect to the remote server
|
||||||
err = client.Connect()
|
err = client.Connect()
|
||||||
@@ -113,7 +103,7 @@ func CopyFromRemote(fileName, remotePath string) error {
|
|||||||
err = client.CopyFromRemote(context.Background(), file, filepath.Join(remotePath, fileName))
|
err = client.CopyFromRemote(context.Background(), file, filepath.Join(remotePath, fileName))
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Println("Error while copying file ", err)
|
utils.Error("Error while copying file %s ", err)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
23
pkg/var.go
23
pkg/var.go
@@ -11,13 +11,17 @@ const tmpPath = "/tmp/backup"
|
|||||||
const algorithm = "aes256"
|
const algorithm = "aes256"
|
||||||
const gpgHome = "/config/gnupg"
|
const gpgHome = "/config/gnupg"
|
||||||
const gpgExtension = "gpg"
|
const gpgExtension = "gpg"
|
||||||
|
const workingDir = "/config"
|
||||||
|
|
||||||
var (
|
var (
|
||||||
storage = "local"
|
storage = "local"
|
||||||
file = ""
|
file = ""
|
||||||
storagePath = "/backup"
|
storagePath = "/backup"
|
||||||
disableCompression = false
|
disableCompression = false
|
||||||
encryption = false
|
encryption = false
|
||||||
|
usingKey = false
|
||||||
|
backupSize int64 = 0
|
||||||
|
startTime string
|
||||||
)
|
)
|
||||||
|
|
||||||
// dbHVars Required environment variables for database
|
// dbHVars Required environment variables for database
|
||||||
@@ -51,3 +55,12 @@ var ftpVars = []string{
|
|||||||
"FTP_PASSWORD",
|
"FTP_PASSWORD",
|
||||||
"FTP_PORT",
|
"FTP_PORT",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AwsVars Required environment variables for AWS S3 storage
|
||||||
|
var awsVars = []string{
|
||||||
|
"AWS_S3_ENDPOINT",
|
||||||
|
"AWS_S3_BUCKET_NAME",
|
||||||
|
"AWS_ACCESS_KEY",
|
||||||
|
"AWS_SECRET_KEY",
|
||||||
|
"AWS_REGION",
|
||||||
|
}
|
||||||
|
|||||||
18
templates/email-error.template
Normal file
18
templates/email-error.template
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>🔴 Urgent: Database Backup Failure Notification</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h2>Hi,</h2>
|
||||||
|
<p>An error occurred during database backup.</p>
|
||||||
|
<h3>Failure Details:</h3>
|
||||||
|
<ul>
|
||||||
|
<li>Error Message: {{.Error}}</li>
|
||||||
|
<li>Date: {{.EndTime}}</li>
|
||||||
|
<li>Backup Reference: {{.BackupReference}} </li>
|
||||||
|
</ul>
|
||||||
|
<p>©2024 <a href="github.com/jkaninda/mysql-bkup">mysql-bkup</a></p>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
24
templates/email.template
Normal file
24
templates/email.template
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>✅ Database Backup Notification – {{.Database}}</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h2>Hi,</h2>
|
||||||
|
<p>Backup of the {{.Database}} database has been successfully completed on {{.EndTime}}.</p>
|
||||||
|
<h3>Backup Details:</h3>
|
||||||
|
<ul>
|
||||||
|
<li>Database Name: {{.Database}}</li>
|
||||||
|
<li>Backup Start Time: {{.StartTime}}</li>
|
||||||
|
<li>Backup End Time: {{.EndTime}}</li>
|
||||||
|
<li>Backup Storage: {{.Storage}}</li>
|
||||||
|
<li>Backup Location: {{.BackupLocation}}</li>
|
||||||
|
<li>Backup Size: {{.BackupSize}} bytes</li>
|
||||||
|
<li>Backup Reference: {{.BackupReference}} </li>
|
||||||
|
</ul>
|
||||||
|
<p>Best regards,</p>
|
||||||
|
<p>©2024 <a href="github.com/jkaninda/mysql-bkup">mysql-bkup</a></p>
|
||||||
|
<href>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
8
templates/telegram-error.template
Normal file
8
templates/telegram-error.template
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
🔴 Urgent: Database Backup Failure Notification
|
||||||
|
Hi,
|
||||||
|
An error occurred during database backup.
|
||||||
|
Failure Details:
|
||||||
|
- Date: {{.EndTime}}
|
||||||
|
- Backup Reference: {{.BackupReference}}
|
||||||
|
- Error Message: {{.Error}}
|
||||||
|
|
||||||
12
templates/telegram.template
Normal file
12
templates/telegram.template
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
[✅ Database Backup Notification – {{.Database}}
|
||||||
|
Hi,
|
||||||
|
Backup of the {{.Database}} database has been successfully completed on {{.EndTime}}.
|
||||||
|
|
||||||
|
Backup Details:
|
||||||
|
- Database Name: {{.Database}}
|
||||||
|
- Backup Start Time: {{.StartTime}}
|
||||||
|
- Backup EndTime: {{.EndTime}}
|
||||||
|
- Backup Storage: {{.Storage}}
|
||||||
|
- Backup Location: {{.BackupLocation}}
|
||||||
|
- Backup Size: {{.BackupSize}} bytes
|
||||||
|
- Backup Reference: {{.BackupReference}}
|
||||||
59
utils/config.go
Normal file
59
utils/config.go
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
package utils
|
||||||
|
|
||||||
|
import "os"
|
||||||
|
|
||||||
|
type MailConfig struct {
|
||||||
|
MailHost string
|
||||||
|
MailPort int
|
||||||
|
MailUserName string
|
||||||
|
MailPassword string
|
||||||
|
MailTo string
|
||||||
|
MailFrom string
|
||||||
|
SkipTls bool
|
||||||
|
}
|
||||||
|
type NotificationData struct {
|
||||||
|
File string
|
||||||
|
BackupSize int64
|
||||||
|
Database string
|
||||||
|
StartTime string
|
||||||
|
EndTime string
|
||||||
|
Storage string
|
||||||
|
BackupLocation string
|
||||||
|
BackupReference string
|
||||||
|
}
|
||||||
|
type ErrorMessage struct {
|
||||||
|
Database string
|
||||||
|
EndTime string
|
||||||
|
Error string
|
||||||
|
BackupReference string
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadMailConfig gets mail environment variables and returns MailConfig
|
||||||
|
func loadMailConfig() *MailConfig {
|
||||||
|
return &MailConfig{
|
||||||
|
MailHost: os.Getenv("MAIL_HOST"),
|
||||||
|
MailPort: GetIntEnv("MAIL_PORT"),
|
||||||
|
MailUserName: os.Getenv("MAIL_USERNAME"),
|
||||||
|
MailPassword: os.Getenv("MAIL_PASSWORD"),
|
||||||
|
MailTo: os.Getenv("MAIL_TO"),
|
||||||
|
MailFrom: os.Getenv("MAIL_FROM"),
|
||||||
|
SkipTls: os.Getenv("MAIL_SKIP_TLS") == "false",
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// TimeFormat returns the format of the time
|
||||||
|
func TimeFormat() string {
|
||||||
|
format := os.Getenv("TIME_FORMAT")
|
||||||
|
if format == "" {
|
||||||
|
return "2006-01-02 at 15:04:05"
|
||||||
|
|
||||||
|
}
|
||||||
|
return format
|
||||||
|
}
|
||||||
|
|
||||||
|
func backupReference() string {
|
||||||
|
return os.Getenv("BACKUP_REFERENCE")
|
||||||
|
}
|
||||||
|
|
||||||
|
const templatePath = "/config/templates"
|
||||||
@@ -6,9 +6,9 @@
|
|||||||
**/
|
**/
|
||||||
package utils
|
package utils
|
||||||
|
|
||||||
const RestoreExample = "mysql-bkup restore --dbname database --file db_20231219_022941.sql.gz\n" +
|
const RestoreExample = "restore --dbname database --file db_20231219_022941.sql.gz\n" +
|
||||||
"restore --dbname database --storage s3 --path /custom-path --file db_20231219_022941.sql.gz"
|
"restore --dbname database --storage s3 --path /custom-path --file db_20231219_022941.sql.gz"
|
||||||
const BackupExample = "mysql-bkup backup --dbname database --disable-compression\n" +
|
const BackupExample = "backup --dbname database --disable-compression\n" +
|
||||||
"backup --dbname database --storage s3 --path /custom-path --disable-compression"
|
"backup --dbname database --storage s3 --path /custom-path --disable-compression"
|
||||||
|
|
||||||
const MainExample = "mysql-bkup backup --dbname database --disable-compression\n" +
|
const MainExample = "mysql-bkup backup --dbname database --disable-compression\n" +
|
||||||
|
|||||||
@@ -12,9 +12,8 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
var currentTime = time.Now().Format("2006/01/02 15:04:05")
|
|
||||||
|
|
||||||
func Info(msg string, args ...any) {
|
func Info(msg string, args ...any) {
|
||||||
|
var currentTime = time.Now().Format("2006/01/02 15:04:05")
|
||||||
formattedMessage := fmt.Sprintf(msg, args...)
|
formattedMessage := fmt.Sprintf(msg, args...)
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
fmt.Printf("%s INFO: %s\n", currentTime, msg)
|
fmt.Printf("%s INFO: %s\n", currentTime, msg)
|
||||||
@@ -25,6 +24,7 @@ func Info(msg string, args ...any) {
|
|||||||
|
|
||||||
// Warn warning message
|
// Warn warning message
|
||||||
func Warn(msg string, args ...any) {
|
func Warn(msg string, args ...any) {
|
||||||
|
var currentTime = time.Now().Format("2006/01/02 15:04:05")
|
||||||
formattedMessage := fmt.Sprintf(msg, args...)
|
formattedMessage := fmt.Sprintf(msg, args...)
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
fmt.Printf("%s WARN: %s\n", currentTime, msg)
|
fmt.Printf("%s WARN: %s\n", currentTime, msg)
|
||||||
@@ -33,6 +33,7 @@ func Warn(msg string, args ...any) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
func Error(msg string, args ...any) {
|
func Error(msg string, args ...any) {
|
||||||
|
var currentTime = time.Now().Format("2006/01/02 15:04:05")
|
||||||
formattedMessage := fmt.Sprintf(msg, args...)
|
formattedMessage := fmt.Sprintf(msg, args...)
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
fmt.Printf("%s ERROR: %s\n", currentTime, msg)
|
fmt.Printf("%s ERROR: %s\n", currentTime, msg)
|
||||||
@@ -41,6 +42,7 @@ func Error(msg string, args ...any) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
func Done(msg string, args ...any) {
|
func Done(msg string, args ...any) {
|
||||||
|
var currentTime = time.Now().Format("2006/01/02 15:04:05")
|
||||||
formattedMessage := fmt.Sprintf(msg, args...)
|
formattedMessage := fmt.Sprintf(msg, args...)
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
fmt.Printf("%s INFO: %s\n", currentTime, msg)
|
fmt.Printf("%s INFO: %s\n", currentTime, msg)
|
||||||
@@ -51,6 +53,7 @@ func Done(msg string, args ...any) {
|
|||||||
|
|
||||||
// Fatal logs an error message and exits the program
|
// Fatal logs an error message and exits the program
|
||||||
func Fatal(msg string, args ...any) {
|
func Fatal(msg string, args ...any) {
|
||||||
|
var currentTime = time.Now().Format("2006/01/02 15:04:05")
|
||||||
// Fatal logs an error message and exits the program.
|
// Fatal logs an error message and exits the program.
|
||||||
formattedMessage := fmt.Sprintf(msg, args...)
|
formattedMessage := fmt.Sprintf(msg, args...)
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
@@ -63,5 +66,4 @@ func Fatal(msg string, args ...any) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
os.Kill.Signal()
|
|
||||||
}
|
}
|
||||||
|
|||||||
183
utils/notification.go
Normal file
183
utils/notification.go
Normal file
@@ -0,0 +1,183 @@
|
|||||||
|
package utils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/tls"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"github.com/go-mail/mail"
|
||||||
|
"github.com/robfig/cron/v3"
|
||||||
|
"html/template"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func parseTemplate[T any](data T, fileName string) (string, error) {
|
||||||
|
// Open the file
|
||||||
|
tmpl, err := template.ParseFiles(filepath.Join(templatePath, fileName))
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
if err = tmpl.Execute(&buf, data); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func SendEmail(subject, body string) error {
|
||||||
|
Info("Start sending email notification....")
|
||||||
|
config := loadMailConfig()
|
||||||
|
emails := strings.Split(config.MailTo, ",")
|
||||||
|
m := mail.NewMessage()
|
||||||
|
m.SetHeader("From", config.MailFrom)
|
||||||
|
m.SetHeader("To", emails...)
|
||||||
|
m.SetHeader("Subject", subject)
|
||||||
|
m.SetBody("text/html", body)
|
||||||
|
d := mail.NewDialer(config.MailHost, config.MailPort, config.MailUserName, config.MailPassword)
|
||||||
|
d.TLSConfig = &tls.Config{InsecureSkipVerify: config.SkipTls}
|
||||||
|
|
||||||
|
if err := d.DialAndSend(m); err != nil {
|
||||||
|
Error("Error could not send email : %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
Info("Email notification has been sent")
|
||||||
|
return nil
|
||||||
|
|
||||||
|
}
|
||||||
|
func sendMessage(msg string) error {
|
||||||
|
|
||||||
|
Info("Sending Telegram notification... ")
|
||||||
|
chatId := os.Getenv("TG_CHAT_ID")
|
||||||
|
body, _ := json.Marshal(map[string]string{
|
||||||
|
"chat_id": chatId,
|
||||||
|
"text": msg,
|
||||||
|
})
|
||||||
|
url := fmt.Sprintf("%s/sendMessage", getTgUrl())
|
||||||
|
// Create an HTTP post request
|
||||||
|
request, err := http.NewRequest("POST", url, bytes.NewBuffer(body))
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
request.Header.Add("Content-Type", "application/json")
|
||||||
|
client := &http.Client{}
|
||||||
|
response, err := client.Do(request)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
code := response.StatusCode
|
||||||
|
if code == 200 {
|
||||||
|
Info("Telegram notification has been sent")
|
||||||
|
return nil
|
||||||
|
} else {
|
||||||
|
body, _ := ioutil.ReadAll(response.Body)
|
||||||
|
Error("Error could not send message, error: %s", string(body))
|
||||||
|
return fmt.Errorf("error could not send message %s", string(body))
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
func NotifySuccess(notificationData *NotificationData) {
|
||||||
|
notificationData.BackupReference = backupReference()
|
||||||
|
var vars = []string{
|
||||||
|
"TG_TOKEN",
|
||||||
|
"TG_CHAT_ID",
|
||||||
|
}
|
||||||
|
var mailVars = []string{
|
||||||
|
"MAIL_HOST",
|
||||||
|
"MAIL_PORT",
|
||||||
|
"MAIL_USERNAME",
|
||||||
|
"MAIL_PASSWORD",
|
||||||
|
"MAIL_FROM",
|
||||||
|
"MAIL_TO",
|
||||||
|
}
|
||||||
|
|
||||||
|
//Email notification
|
||||||
|
err := CheckEnvVars(mailVars)
|
||||||
|
if err == nil {
|
||||||
|
body, err := parseTemplate(*notificationData, "email.template")
|
||||||
|
if err != nil {
|
||||||
|
Error("Could not parse email template: %v", err)
|
||||||
|
}
|
||||||
|
err = SendEmail(fmt.Sprintf("✅ Database Backup Notification – %s", notificationData.Database), body)
|
||||||
|
if err != nil {
|
||||||
|
Error("Could not send email: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//Telegram notification
|
||||||
|
err = CheckEnvVars(vars)
|
||||||
|
if err == nil {
|
||||||
|
message, err := parseTemplate(*notificationData, "telegram.template")
|
||||||
|
if err != nil {
|
||||||
|
Error("Could not parse telegram template: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = sendMessage(message)
|
||||||
|
if err != nil {
|
||||||
|
Error("Could not send Telegram message: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
func NotifyError(error string) {
|
||||||
|
var vars = []string{
|
||||||
|
"TG_TOKEN",
|
||||||
|
"TG_CHAT_ID",
|
||||||
|
}
|
||||||
|
var mailVars = []string{
|
||||||
|
"MAIL_HOST",
|
||||||
|
"MAIL_PORT",
|
||||||
|
"MAIL_USERNAME",
|
||||||
|
"MAIL_PASSWORD",
|
||||||
|
"MAIL_FROM",
|
||||||
|
"MAIL_TO",
|
||||||
|
}
|
||||||
|
|
||||||
|
//Email notification
|
||||||
|
err := CheckEnvVars(mailVars)
|
||||||
|
if err == nil {
|
||||||
|
body, err := parseTemplate(ErrorMessage{
|
||||||
|
Error: error,
|
||||||
|
EndTime: time.Now().Format(TimeFormat()),
|
||||||
|
BackupReference: os.Getenv("BACKUP_REFERENCE"),
|
||||||
|
}, "email-error.template")
|
||||||
|
if err != nil {
|
||||||
|
Error("Could not parse error template: %v", err)
|
||||||
|
}
|
||||||
|
err = SendEmail(fmt.Sprintf("🔴 Urgent: Database Backup Failure Notification"), body)
|
||||||
|
if err != nil {
|
||||||
|
Error("Could not send email: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//Telegram notification
|
||||||
|
err = CheckEnvVars(vars)
|
||||||
|
if err == nil {
|
||||||
|
message, err := parseTemplate(ErrorMessage{
|
||||||
|
Error: error,
|
||||||
|
EndTime: time.Now().Format(TimeFormat()),
|
||||||
|
BackupReference: os.Getenv("BACKUP_REFERENCE"),
|
||||||
|
}, "telegram-error.template")
|
||||||
|
if err != nil {
|
||||||
|
Error("Could not parse error template: %v", err)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
err = sendMessage(message)
|
||||||
|
if err != nil {
|
||||||
|
Error("Could not send telegram message: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getTgUrl() string {
|
||||||
|
return fmt.Sprintf("https://api.telegram.org/bot%s", os.Getenv("TG_TOKEN"))
|
||||||
|
|
||||||
|
}
|
||||||
|
func IsValidCronExpression(cronExpr string) bool {
|
||||||
|
_, err := cron.ParseStandard(cronExpr)
|
||||||
|
return err == nil
|
||||||
|
}
|
||||||
@@ -7,19 +7,15 @@
|
|||||||
package utils
|
package utils
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/robfig/cron/v3"
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"io"
|
"io"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
"io/ioutil"
|
|
||||||
"net/http"
|
|
||||||
"os"
|
"os"
|
||||||
"strconv"
|
"strconv"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// FileExists checks if the file does exist
|
||||||
func FileExists(filename string) bool {
|
func FileExists(filename string) bool {
|
||||||
info, err := os.Stat(filename)
|
info, err := os.Stat(filename)
|
||||||
if os.IsNotExist(err) {
|
if os.IsNotExist(err) {
|
||||||
@@ -133,14 +129,11 @@ func GetEnvVariable(envName, oldEnvName string) string {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return value
|
return value
|
||||||
}
|
}
|
||||||
Warn("%s is deprecated, please use %s instead!", oldEnvName, envName)
|
Warn("%s is deprecated, please use %s instead! ", oldEnvName, envName)
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return value
|
return value
|
||||||
}
|
}
|
||||||
func ShowHistory() {
|
|
||||||
}
|
|
||||||
|
|
||||||
// CheckEnvVars checks if all the specified environment variables are set
|
// CheckEnvVars checks if all the specified environment variables are set
|
||||||
func CheckEnvVars(vars []string) error {
|
func CheckEnvVars(vars []string) error {
|
||||||
@@ -187,71 +180,3 @@ func GetIntEnv(envName string) int {
|
|||||||
}
|
}
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
func sendMessage(msg string) {
|
|
||||||
|
|
||||||
Info("Sending notification... ")
|
|
||||||
chatId := os.Getenv("TG_CHAT_ID")
|
|
||||||
body, _ := json.Marshal(map[string]string{
|
|
||||||
"chat_id": chatId,
|
|
||||||
"text": msg,
|
|
||||||
})
|
|
||||||
url := fmt.Sprintf("%s/sendMessage", getTgUrl())
|
|
||||||
// Create an HTTP post request
|
|
||||||
request, err := http.NewRequest("POST", url, bytes.NewBuffer(body))
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
request.Header.Add("Content-Type", "application/json")
|
|
||||||
client := &http.Client{}
|
|
||||||
response, err := client.Do(request)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
code := response.StatusCode
|
|
||||||
if code == 200 {
|
|
||||||
Info("Notification has been sent")
|
|
||||||
} else {
|
|
||||||
body, _ := ioutil.ReadAll(response.Body)
|
|
||||||
Error("Message not sent, error: %s", string(body))
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
func NotifySuccess(fileName string) {
|
|
||||||
var vars = []string{
|
|
||||||
"TG_TOKEN",
|
|
||||||
"TG_CHAT_ID",
|
|
||||||
}
|
|
||||||
|
|
||||||
//Telegram notification
|
|
||||||
err := CheckEnvVars(vars)
|
|
||||||
if err == nil {
|
|
||||||
message := "[✅ MySQL Backup ]\n" +
|
|
||||||
"Database has been backed up \n" +
|
|
||||||
"Backup name is " + fileName
|
|
||||||
sendMessage(message)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
func NotifyError(error string) {
|
|
||||||
var vars = []string{
|
|
||||||
"TG_TOKEN",
|
|
||||||
"TG_CHAT_ID",
|
|
||||||
}
|
|
||||||
|
|
||||||
//Telegram notification
|
|
||||||
err := CheckEnvVars(vars)
|
|
||||||
if err == nil {
|
|
||||||
message := "[🔴 MySQL Backup ]\n" +
|
|
||||||
"An error occurred during database backup \n" +
|
|
||||||
"Error: " + error
|
|
||||||
sendMessage(message)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func getTgUrl() string {
|
|
||||||
return fmt.Sprintf("https://api.telegram.org/bot%s", os.Getenv("TG_TOKEN"))
|
|
||||||
|
|
||||||
}
|
|
||||||
func IsValidCronExpression(cronExpr string) bool {
|
|
||||||
_, err := cron.ParseStandard(cronExpr)
|
|
||||||
return err == nil
|
|
||||||
}
|
|
||||||
|
|||||||
Reference in New Issue
Block a user