chore: define gpg home directory, update scheduled deployment doc
This commit is contained in:
@@ -48,7 +48,7 @@ networks:
|
|||||||
### Recurring backups to S3
|
### Recurring backups to S3
|
||||||
|
|
||||||
As explained above, you need just to add AWS environment variables and specify the storage type `--storage s3`.
|
As explained above, you need just to add AWS environment variables and specify the storage type `--storage s3`.
|
||||||
In case you need to use recurring backups, you can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below.
|
In case you need to use recurring backups, you can use `--cron-expression "0 1 * * *"` flag or `BACKUP_CRON_EXPRESSION=0 1 * * *` as described below.
|
||||||
|
|
||||||
```yml
|
```yml
|
||||||
services:
|
services:
|
||||||
@@ -72,6 +72,7 @@ services:
|
|||||||
- AWS_REGION="us-west-2"
|
- AWS_REGION="us-west-2"
|
||||||
- AWS_ACCESS_KEY=xxxx
|
- AWS_ACCESS_KEY=xxxx
|
||||||
- AWS_SECRET_KEY=xxxxx
|
- AWS_SECRET_KEY=xxxxx
|
||||||
|
# - BACKUP_CRON_EXPRESSION=0 1 * * * # Optional
|
||||||
## In case you are using S3 alternative such as Minio and your Minio instance is not secured, you change it to true
|
## In case you are using S3 alternative such as Minio and your Minio instance is not secured, you change it to true
|
||||||
- AWS_DISABLE_SSL="false"
|
- AWS_DISABLE_SSL="false"
|
||||||
# pg-bkup container must be connected to the same network with your database
|
# pg-bkup container must be connected to the same network with your database
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ networks:
|
|||||||
### Recurring backups to SSH remote server
|
### Recurring backups to SSH remote server
|
||||||
|
|
||||||
As explained above, you need just to add required environment variables and specify the storage type `--storage ssh`.
|
As explained above, you need just to add required environment variables and specify the storage type `--storage ssh`.
|
||||||
You can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below.
|
You can use `--cron-expression "* * * * *"` or `BACKUP_CRON_EXPRESSION=0 1 * * *` as described below.
|
||||||
|
|
||||||
```yml
|
```yml
|
||||||
services:
|
services:
|
||||||
@@ -63,10 +63,7 @@ services:
|
|||||||
# for a list of available releases.
|
# for a list of available releases.
|
||||||
image: jkaninda/pg-bkup
|
image: jkaninda/pg-bkup
|
||||||
container_name: pg-bkup
|
container_name: pg-bkup
|
||||||
command:
|
command: backup -d database --storage ssh --cron-expression "0 1 * * *"
|
||||||
- /bin/sh
|
|
||||||
- -c
|
|
||||||
- pg-bkup backup -d database --storage ssh --mode scheduled --period "0 1 * * *"
|
|
||||||
volumes:
|
volumes:
|
||||||
- ./id_ed25519:/tmp/id_ed25519"
|
- ./id_ed25519:/tmp/id_ed25519"
|
||||||
environment:
|
environment:
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ networks:
|
|||||||
jkaninda/pg-bkup backup -d database_name
|
jkaninda/pg-bkup backup -d database_name
|
||||||
```
|
```
|
||||||
|
|
||||||
In case you need to use recurring backups, you can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below.
|
In case you need to use recurring backups, you can use `--cron-expression "0 1 * * *"` flag or `BACKUP_CRON_EXPRESSION=0 1 * * *` as described below.
|
||||||
|
|
||||||
```yml
|
```yml
|
||||||
services:
|
services:
|
||||||
@@ -67,7 +67,7 @@ services:
|
|||||||
# for a list of available releases.
|
# for a list of available releases.
|
||||||
image: jkaninda/pg-bkup
|
image: jkaninda/pg-bkup
|
||||||
container_name: pg-bkup
|
container_name: pg-bkup
|
||||||
#command: backup -d database --mode scheduled --period "0 1 * * *"
|
command: backup -d database --cron-expression "0 1 * * *"
|
||||||
volumes:
|
volumes:
|
||||||
- ./backup:/backup
|
- ./backup:/backup
|
||||||
environment:
|
environment:
|
||||||
@@ -76,6 +76,7 @@ services:
|
|||||||
- DB_NAME=database
|
- DB_NAME=database
|
||||||
- DB_USERNAME=username
|
- DB_USERNAME=username
|
||||||
- DB_PASSWORD=password
|
- DB_PASSWORD=password
|
||||||
|
- BACKUP_CRON_EXPRESSION=0 1 * * *
|
||||||
# pg-bkup container must be connected to the same network with your database
|
# pg-bkup container must be connected to the same network with your database
|
||||||
networks:
|
networks:
|
||||||
- web
|
- web
|
||||||
|
|||||||
@@ -11,6 +11,9 @@ The image supports encrypting backups using GPG out of the box. In case a `GPG_P
|
|||||||
{: .warning }
|
{: .warning }
|
||||||
To restore an encrypted backup, you need to provide the same GPG passphrase used during backup process.
|
To restore an encrypted backup, you need to provide the same GPG passphrase used during backup process.
|
||||||
|
|
||||||
|
- GPG home directory `/config/gnupg`
|
||||||
|
- Cipher algorithm `aes256`
|
||||||
|
-
|
||||||
To decrypt manually, you need to install `gnupg`
|
To decrypt manually, you need to install `gnupg`
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
|
|||||||
@@ -25,51 +25,50 @@ Backup, restore and migrate targets, schedule and retention are configured using
|
|||||||
| --path | | AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup` |
|
| --path | | AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup` |
|
||||||
| --dbname | -d | Database name |
|
| --dbname | -d | Database name |
|
||||||
| --port | -p | Database port (default: 5432) |
|
| --port | -p | Database port (default: 5432) |
|
||||||
| --mode | -m | Execution mode. default or scheduled (default: default) |
|
|
||||||
| --disable-compression | | Disable database backup compression |
|
| --disable-compression | | Disable database backup compression |
|
||||||
| --prune | | Delete old backup, default disabled |
|
| --prune | | Delete old backup, default disabled |
|
||||||
| --keep-last | | Delete old backup created more than specified days ago, default 7 days |
|
| --keep-last | | Delete old backup created more than specified days ago, default 7 days |
|
||||||
| --period | | Crontab period for scheduled mode only. (default: "0 1 * * *") |
|
| --cron-expression | | Backup cron expression, eg: (* * * * *) or @daily |
|
||||||
| --help | -h | Print this help message and exit |
|
| --help | -h | Print this help message and exit |
|
||||||
| --version | -V | Print version information and exit |
|
| --version | -V | Print version information and exit |
|
||||||
|
|
||||||
## Environment variables
|
## Environment variables
|
||||||
|
|
||||||
| Name | Requirement | Description |
|
| Name | Requirement | Description |
|
||||||
|------------------------|----------------------------------------------------|------------------------------------------------------|
|
|------------------------|---------------------------------------------------------------|------------------------------------------------------|
|
||||||
| DB_PORT | Optional, default 5432 | Database port number |
|
| DB_PORT | Optional, default 5432 | Database port number |
|
||||||
| DB_HOST | Required | Database host |
|
| DB_HOST | Required | Database host |
|
||||||
| DB_NAME | Optional if it was provided from the -d flag | Database name |
|
| DB_NAME | Optional if it was provided from the -d flag | Database name |
|
||||||
| DB_USERNAME | Required | Database user name |
|
| DB_USERNAME | Required | Database user name |
|
||||||
| DB_PASSWORD | Required | Database password |
|
| DB_PASSWORD | Required | Database password |
|
||||||
| AWS_ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key |
|
| AWS_ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key |
|
||||||
| AWS_SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key |
|
| AWS_SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key |
|
||||||
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
|
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
|
||||||
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
|
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
|
||||||
| AWS_REGION | Optional, required for S3 storage | AWS Region |
|
| AWS_REGION | Optional, required for S3 storage | AWS Region |
|
||||||
| AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL |
|
| AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL |
|
||||||
| FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) |
|
| FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) |
|
||||||
| GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase |
|
| GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase |
|
||||||
| BACKUP_CRON_EXPRESSION | Optional if it was provided from the --period flag | Backup cron expression for docker in scheduled mode |
|
| BACKUP_CRON_EXPRESSION | Optional if it was provided from the `--cron-expression` flag | Backup cron expression for docker in scheduled mode |
|
||||||
| SSH_HOST_NAME | Optional, required for SSH storage | ssh remote hostname or ip |
|
| SSH_HOST_NAME | Optional, required for SSH storage | ssh remote hostname or ip |
|
||||||
| SSH_USER | Optional, required for SSH storage | ssh remote user |
|
| SSH_USER | Optional, required for SSH storage | ssh remote user |
|
||||||
| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password |
|
| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password |
|
||||||
| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key |
|
| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key |
|
||||||
| SSH_PORT | Optional, required for SSH storage | ssh remote server port |
|
| SSH_PORT | Optional, required for SSH storage | ssh remote server port |
|
||||||
| SSH_REMOTE_PATH | Optional, required for SSH storage | ssh remote path (/home/toto/backup) |
|
| SSH_REMOTE_PATH | Optional, required for SSH storage | ssh remote path (/home/toto/backup) |
|
||||||
| TARGET_DB_HOST | Optional, required for database migration | Target database host |
|
| TARGET_DB_HOST | Optional, required for database migration | Target database host |
|
||||||
| TARGET_DB_PORT | Optional, required for database migration | Target database port |
|
| TARGET_DB_PORT | Optional, required for database migration | Target database port |
|
||||||
| TARGET_DB_NAME | Optional, required for database migration | Target database name |
|
| TARGET_DB_NAME | Optional, required for database migration | Target database name |
|
||||||
| TARGET_DB_USERNAME | Optional, required for database migration | Target database username |
|
| TARGET_DB_USERNAME | Optional, required for database migration | Target database username |
|
||||||
| TARGET_DB_PASSWORD | Optional, required for database migration | Target database password |
|
| TARGET_DB_PASSWORD | Optional, required for database migration | Target database password |
|
||||||
| TG_TOKEN | Optional, required for Telegram notification | Telegram token |
|
| TG_TOKEN | Optional, required for Telegram notification | Telegram token (`BOT-ID:BOT-TOKEN`) |
|
||||||
| TG_CHAT_ID | Optional, required for Telegram notification | Telegram Chat ID |
|
| TG_CHAT_ID | Optional, required for Telegram notification | Telegram Chat ID |
|
||||||
|
|
||||||
---
|
---
|
||||||
## Run in Scheduled mode
|
## Run in Scheduled mode
|
||||||
|
|
||||||
This image can be run as CronJob in Kubernetes for a regular backup which makes deployment on Kubernetes easy as Kubernetes has CronJob resources.
|
This image can be run as CronJob in Kubernetes for a regular backup which makes deployment on Kubernetes easy as Kubernetes has CronJob resources.
|
||||||
For Docker, you need to run it in scheduled mode by adding `--mode scheduled` flag and specify the periodical backup time by adding `--period "0 1 * * *"` flag.
|
For Docker, you need to run it in scheduled mode by adding `--cron-expression "* * * * *"` flag or by defining `BACKUP_CRON_EXPRESSION=0 1 * * *` environment variable.
|
||||||
|
|
||||||
## Syntax of crontab (field description)
|
## Syntax of crontab (field description)
|
||||||
|
|
||||||
@@ -112,3 +111,21 @@ Easy to remember format:
|
|||||||
```conf
|
```conf
|
||||||
0 1 * * *
|
0 1 * * *
|
||||||
```
|
```
|
||||||
|
## Predefined schedules
|
||||||
|
You may use one of several pre-defined schedules in place of a cron expression.
|
||||||
|
|
||||||
|
| Entry | Description | Equivalent To |
|
||||||
|
|------------------------|--------------------------------------------|---------------|
|
||||||
|
| @yearly (or @annually) | Run once a year, midnight, Jan. 1st | 0 0 1 1 * |
|
||||||
|
| @monthly | Run once a month, midnight, first of month | 0 0 1 * * |
|
||||||
|
| @weekly | Run once a week, midnight between Sat/Sun | 0 0 * * 0 |
|
||||||
|
| @daily (or @midnight) | Run once a day, midnight | 0 0 * * * |
|
||||||
|
| @hourly | Run once an hour, beginning of hour | 0 * * * * |
|
||||||
|
|
||||||
|
### Intervals
|
||||||
|
You may also schedule a job to execute at fixed intervals, starting at the time it's added or cron is run. This is supported by formatting the cron spec like this:
|
||||||
|
|
||||||
|
@every <duration>
|
||||||
|
where "duration" is a string accepted by time.
|
||||||
|
|
||||||
|
For example, "@every 1h30m10s" would indicate a schedule that activates after 1 hour, 30 minutes, 10 seconds, and then every interval after that.
|
||||||
@@ -73,7 +73,7 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig {
|
|||||||
backupRetention, _ := cmd.Flags().GetInt("keep-last")
|
backupRetention, _ := cmd.Flags().GetInt("keep-last")
|
||||||
prune, _ := cmd.Flags().GetBool("prune")
|
prune, _ := cmd.Flags().GetBool("prune")
|
||||||
disableCompression, _ = cmd.Flags().GetBool("disable-compression")
|
disableCompression, _ = cmd.Flags().GetBool("disable-compression")
|
||||||
executionMode, _ = cmd.Flags().GetString("mode")
|
_, _ = cmd.Flags().GetString("mode")
|
||||||
gpqPassphrase := os.Getenv("GPG_PASSPHRASE")
|
gpqPassphrase := os.Getenv("GPG_PASSPHRASE")
|
||||||
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
|
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
|
||||||
cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION")
|
cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION")
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ import (
|
|||||||
func Decrypt(inputFile string, passphrase string) error {
|
func Decrypt(inputFile string, passphrase string) error {
|
||||||
utils.Info("Decrypting backup file: %s...", inputFile)
|
utils.Info("Decrypting backup file: %s...", inputFile)
|
||||||
//Create gpg home dir
|
//Create gpg home dir
|
||||||
err := utils.MakeDir(gpgHome)
|
err := utils.MakeDirAll(gpgHome)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -37,7 +37,7 @@ func Decrypt(inputFile string, passphrase string) error {
|
|||||||
func Encrypt(inputFile string, passphrase string) error {
|
func Encrypt(inputFile string, passphrase string) error {
|
||||||
utils.Info("Encrypting backup...")
|
utils.Info("Encrypting backup...")
|
||||||
//Create gpg home dir
|
//Create gpg home dir
|
||||||
err := utils.MakeDir(gpgHome)
|
err := utils.MakeDirAll(gpgHome)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
10
pkg/var.go
10
pkg/var.go
@@ -6,17 +6,15 @@
|
|||||||
**/
|
**/
|
||||||
package pkg
|
package pkg
|
||||||
|
|
||||||
const cronLogFile = "/var/log/pg-bkup.log"
|
|
||||||
const tmpPath = "/tmp/backup"
|
const tmpPath = "/tmp/backup"
|
||||||
const backupCronFile = "/usr/local/bin/backup_cron.sh"
|
const gpgHome = "/config/gnupg"
|
||||||
const gpgHome = "gnupg"
|
|
||||||
const algorithm = "aes256"
|
const algorithm = "aes256"
|
||||||
const gpgExtension = "gpg"
|
const gpgExtension = "gpg"
|
||||||
|
|
||||||
var (
|
var (
|
||||||
storage = "local"
|
storage = "local"
|
||||||
file = ""
|
file = ""
|
||||||
executionMode = "default"
|
|
||||||
storagePath = "/backup"
|
storagePath = "/backup"
|
||||||
disableCompression = false
|
disableCompression = false
|
||||||
encryption = false
|
encryption = false
|
||||||
|
|||||||
Reference in New Issue
Block a user