chore: define gpg home directory, update scheduled deployment doc

This commit is contained in:
Jonas Kaninda
2024-09-28 09:48:37 +02:00
parent 39890e71a9
commit 91f4a46226
8 changed files with 67 additions and 50 deletions

View File

@@ -48,7 +48,7 @@ networks:
### Recurring backups to S3
As explained above, you need just to add AWS environment variables and specify the storage type `--storage s3`.
In case you need to use recurring backups, you can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below.
In case you need to use recurring backups, you can use `--cron-expression "0 1 * * *"` flag or `BACKUP_CRON_EXPRESSION=0 1 * * *` as described below.
```yml
services:
@@ -72,6 +72,7 @@ services:
- AWS_REGION="us-west-2"
- AWS_ACCESS_KEY=xxxx
- AWS_SECRET_KEY=xxxxx
# - BACKUP_CRON_EXPRESSION=0 1 * * * # Optional
## In case you are using S3 alternative such as Minio and your Minio instance is not secured, you change it to true
- AWS_DISABLE_SSL="false"
# pg-bkup container must be connected to the same network with your database

View File

@@ -52,7 +52,7 @@ networks:
### Recurring backups to SSH remote server
As explained above, you need just to add required environment variables and specify the storage type `--storage ssh`.
You can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below.
You can use `--cron-expression "* * * * *"` or `BACKUP_CRON_EXPRESSION=0 1 * * *` as described below.
```yml
services:
@@ -63,10 +63,7 @@ services:
# for a list of available releases.
image: jkaninda/pg-bkup
container_name: pg-bkup
command:
- /bin/sh
- -c
- pg-bkup backup -d database --storage ssh --mode scheduled --period "0 1 * * *"
command: backup -d database --storage ssh --cron-expression "0 1 * * *"
volumes:
- ./id_ed25519:/tmp/id_ed25519"
environment:

View File

@@ -56,7 +56,7 @@ networks:
jkaninda/pg-bkup backup -d database_name
```
In case you need to use recurring backups, you can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below.
In case you need to use recurring backups, you can use `--cron-expression "0 1 * * *"` flag or `BACKUP_CRON_EXPRESSION=0 1 * * *` as described below.
```yml
services:
@@ -67,7 +67,7 @@ services:
# for a list of available releases.
image: jkaninda/pg-bkup
container_name: pg-bkup
#command: backup -d database --mode scheduled --period "0 1 * * *"
command: backup -d database --cron-expression "0 1 * * *"
volumes:
- ./backup:/backup
environment:
@@ -76,6 +76,7 @@ services:
- DB_NAME=database
- DB_USERNAME=username
- DB_PASSWORD=password
- BACKUP_CRON_EXPRESSION=0 1 * * *
# pg-bkup container must be connected to the same network with your database
networks:
- web

View File

@@ -11,6 +11,9 @@ The image supports encrypting backups using GPG out of the box. In case a `GPG_P
{: .warning }
To restore an encrypted backup, you need to provide the same GPG passphrase used during backup process.
- GPG home directory `/config/gnupg`
- Cipher algorithm `aes256`
-
To decrypt manually, you need to install `gnupg`
```shell

View File

@@ -25,51 +25,50 @@ Backup, restore and migrate targets, schedule and retention are configured using
| --path | | AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup` |
| --dbname | -d | Database name |
| --port | -p | Database port (default: 5432) |
| --mode | -m | Execution mode. default or scheduled (default: default) |
| --disable-compression | | Disable database backup compression |
| --prune | | Delete old backup, default disabled |
| --keep-last | | Delete old backup created more than specified days ago, default 7 days |
| --period | | Crontab period for scheduled mode only. (default: "0 1 * * *") |
| --cron-expression | | Backup cron expression, eg: (* * * * *) or @daily |
| --help | -h | Print this help message and exit |
| --version | -V | Print version information and exit |
## Environment variables
| Name | Requirement | Description |
|------------------------|----------------------------------------------------|------------------------------------------------------|
| DB_PORT | Optional, default 5432 | Database port number |
| DB_HOST | Required | Database host |
| DB_NAME | Optional if it was provided from the -d flag | Database name |
| DB_USERNAME | Required | Database user name |
| DB_PASSWORD | Required | Database password |
| AWS_ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key |
| AWS_SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key |
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
| AWS_REGION | Optional, required for S3 storage | AWS Region |
| AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL |
| FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) |
| GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase |
| BACKUP_CRON_EXPRESSION | Optional if it was provided from the --period flag | Backup cron expression for docker in scheduled mode |
| SSH_HOST_NAME | Optional, required for SSH storage | ssh remote hostname or ip |
| SSH_USER | Optional, required for SSH storage | ssh remote user |
| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password |
| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key |
| SSH_PORT | Optional, required for SSH storage | ssh remote server port |
| SSH_REMOTE_PATH | Optional, required for SSH storage | ssh remote path (/home/toto/backup) |
| TARGET_DB_HOST | Optional, required for database migration | Target database host |
| TARGET_DB_PORT | Optional, required for database migration | Target database port |
| TARGET_DB_NAME | Optional, required for database migration | Target database name |
| TARGET_DB_USERNAME | Optional, required for database migration | Target database username |
| TARGET_DB_PASSWORD | Optional, required for database migration | Target database password |
| TG_TOKEN | Optional, required for Telegram notification | Telegram token |
| TG_CHAT_ID | Optional, required for Telegram notification | Telegram Chat ID |
| Name | Requirement | Description |
|------------------------|---------------------------------------------------------------|------------------------------------------------------|
| DB_PORT | Optional, default 5432 | Database port number |
| DB_HOST | Required | Database host |
| DB_NAME | Optional if it was provided from the -d flag | Database name |
| DB_USERNAME | Required | Database user name |
| DB_PASSWORD | Required | Database password |
| AWS_ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key |
| AWS_SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key |
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
| AWS_REGION | Optional, required for S3 storage | AWS Region |
| AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL |
| FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) |
| GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase |
| BACKUP_CRON_EXPRESSION | Optional if it was provided from the `--cron-expression` flag | Backup cron expression for docker in scheduled mode |
| SSH_HOST_NAME | Optional, required for SSH storage | ssh remote hostname or ip |
| SSH_USER | Optional, required for SSH storage | ssh remote user |
| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password |
| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key |
| SSH_PORT | Optional, required for SSH storage | ssh remote server port |
| SSH_REMOTE_PATH | Optional, required for SSH storage | ssh remote path (/home/toto/backup) |
| TARGET_DB_HOST | Optional, required for database migration | Target database host |
| TARGET_DB_PORT | Optional, required for database migration | Target database port |
| TARGET_DB_NAME | Optional, required for database migration | Target database name |
| TARGET_DB_USERNAME | Optional, required for database migration | Target database username |
| TARGET_DB_PASSWORD | Optional, required for database migration | Target database password |
| TG_TOKEN | Optional, required for Telegram notification | Telegram token (`BOT-ID:BOT-TOKEN`) |
| TG_CHAT_ID | Optional, required for Telegram notification | Telegram Chat ID |
---
## Run in Scheduled mode
This image can be run as CronJob in Kubernetes for a regular backup which makes deployment on Kubernetes easy as Kubernetes has CronJob resources.
For Docker, you need to run it in scheduled mode by adding `--mode scheduled` flag and specify the periodical backup time by adding `--period "0 1 * * *"` flag.
For Docker, you need to run it in scheduled mode by adding `--cron-expression "* * * * *"` flag or by defining `BACKUP_CRON_EXPRESSION=0 1 * * *` environment variable.
## Syntax of crontab (field description)
@@ -111,4 +110,22 @@ Easy to remember format:
```conf
0 1 * * *
```
```
## Predefined schedules
You may use one of several pre-defined schedules in place of a cron expression.
| Entry | Description | Equivalent To |
|------------------------|--------------------------------------------|---------------|
| @yearly (or @annually) | Run once a year, midnight, Jan. 1st | 0 0 1 1 * |
| @monthly | Run once a month, midnight, first of month | 0 0 1 * * |
| @weekly | Run once a week, midnight between Sat/Sun | 0 0 * * 0 |
| @daily (or @midnight) | Run once a day, midnight | 0 0 * * * |
| @hourly | Run once an hour, beginning of hour | 0 * * * * |
### Intervals
You may also schedule a job to execute at fixed intervals, starting at the time it's added or cron is run. This is supported by formatting the cron spec like this:
@every <duration>
where "duration" is a string accepted by time.
For example, "@every 1h30m10s" would indicate a schedule that activates after 1 hour, 30 minutes, 10 seconds, and then every interval after that.

View File

@@ -73,7 +73,7 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig {
backupRetention, _ := cmd.Flags().GetInt("keep-last")
prune, _ := cmd.Flags().GetBool("prune")
disableCompression, _ = cmd.Flags().GetBool("disable-compression")
executionMode, _ = cmd.Flags().GetString("mode")
_, _ = cmd.Flags().GetString("mode")
gpqPassphrase := os.Getenv("GPG_PASSPHRASE")
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION")

View File

@@ -16,7 +16,7 @@ import (
func Decrypt(inputFile string, passphrase string) error {
utils.Info("Decrypting backup file: %s...", inputFile)
//Create gpg home dir
err := utils.MakeDir(gpgHome)
err := utils.MakeDirAll(gpgHome)
if err != nil {
return err
}
@@ -37,7 +37,7 @@ func Decrypt(inputFile string, passphrase string) error {
func Encrypt(inputFile string, passphrase string) error {
utils.Info("Encrypting backup...")
//Create gpg home dir
err := utils.MakeDir(gpgHome)
err := utils.MakeDirAll(gpgHome)
if err != nil {
return err
}

View File

@@ -6,17 +6,15 @@
**/
package pkg
const cronLogFile = "/var/log/pg-bkup.log"
const tmpPath = "/tmp/backup"
const backupCronFile = "/usr/local/bin/backup_cron.sh"
const gpgHome = "gnupg"
const gpgHome = "/config/gnupg"
const algorithm = "aes256"
const gpgExtension = "gpg"
var (
storage = "local"
file = ""
executionMode = "default"
storage = "local"
file = ""
storagePath = "/backup"
disableCompression = false
encryption = false