Merge pull request #79 from jkaninda/develop

Develop
This commit is contained in:
2024-09-28 10:27:29 +02:00
committed by GitHub
18 changed files with 217 additions and 238 deletions

View File

@@ -30,7 +30,8 @@ func init() {
BackupCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3")
BackupCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`")
BackupCmd.PersistentFlags().StringP("mode", "m", "default", "Execution mode. default or scheduled")
BackupCmd.PersistentFlags().StringP("period", "", "0 1 * * *", "Schedule period time")
BackupCmd.PersistentFlags().StringP("period", "", "", "Schedule period time | Deprecated")
BackupCmd.PersistentFlags().StringP("cron-expression", "", "", "Backup cron expression")
BackupCmd.PersistentFlags().BoolP("prune", "", false, "Delete old backup, default disabled")
BackupCmd.PersistentFlags().IntP("keep-last", "", 7, "Delete files created more than specified days ago, default 7 days")
BackupCmd.PersistentFlags().BoolP("disable-compression", "", false, "Disable backup compression")

View File

@@ -36,7 +36,7 @@ ENV TARGET_DB_NAME=""
ENV TARGET_DB_USERNAME=""
ENV TARGET_DB_PASSWORD=""
ARG DEBIAN_FRONTEND=noninteractive
ENV VERSION="v1.2.5"
ENV VERSION="v1.2.8"
ENV BACKUP_CRON_EXPRESSION=""
ENV TG_TOKEN=""
ENV TG_CHAT_ID=""
@@ -50,7 +50,7 @@ LABEL author="Jonas Kaninda"
RUN apt-get update -qq
RUN apt install postgresql-client supervisor cron gnupg -y
RUN apt install postgresql-client cron gnupg -y
# Clear cache
RUN apt-get clean && rm -rf /var/lib/apt/lists/*
@@ -71,7 +71,6 @@ RUN chmod +x /usr/local/bin/pg-bkup
RUN ln -s /usr/local/bin/pg-bkup /usr/local/bin/bkup
ADD docker/supervisord.conf /etc/supervisor/supervisord.conf
# Create the backup script and make it executable
RUN echo '#!/bin/sh\n/usr/local/bin/pg-bkup backup "$@"' > /usr/local/bin/backup && \
chmod +x /usr/local/bin/backup

View File

@@ -48,7 +48,7 @@ networks:
### Recurring backups to S3
As explained above, you need just to add AWS environment variables and specify the storage type `--storage s3`.
In case you need to use recurring backups, you can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below.
In case you need to use recurring backups, you can use `--cron-expression "0 1 * * *"` flag or `BACKUP_CRON_EXPRESSION=0 1 * * *` as described below.
```yml
services:
@@ -72,6 +72,7 @@ services:
- AWS_REGION="us-west-2"
- AWS_ACCESS_KEY=xxxx
- AWS_SECRET_KEY=xxxxx
# - BACKUP_CRON_EXPRESSION=0 1 * * * # Optional
## In case you are using S3 alternative such as Minio and your Minio instance is not secured, you change it to true
- AWS_DISABLE_SSL="false"
# pg-bkup container must be connected to the same network with your database

View File

@@ -52,7 +52,7 @@ networks:
### Recurring backups to SSH remote server
As explained above, you need just to add required environment variables and specify the storage type `--storage ssh`.
You can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below.
You can use `--cron-expression "* * * * *"` or `BACKUP_CRON_EXPRESSION=0 1 * * *` as described below.
```yml
services:
@@ -63,10 +63,7 @@ services:
# for a list of available releases.
image: jkaninda/pg-bkup
container_name: pg-bkup
command:
- /bin/sh
- -c
- pg-bkup backup -d database --storage ssh --mode scheduled --period "0 1 * * *"
command: backup -d database --storage ssh --cron-expression "0 1 * * *"
volumes:
- ./id_ed25519:/tmp/id_ed25519"
environment:

View File

@@ -56,7 +56,7 @@ networks:
jkaninda/pg-bkup backup -d database_name
```
In case you need to use recurring backups, you can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below.
In case you need to use recurring backups, you can use `--cron-expression "0 1 * * *"` flag or `BACKUP_CRON_EXPRESSION=0 1 * * *` as described below.
```yml
services:
@@ -67,7 +67,7 @@ services:
# for a list of available releases.
image: jkaninda/pg-bkup
container_name: pg-bkup
#command: backup -d database --mode scheduled --period "0 1 * * *"
command: backup -d database --cron-expression "0 1 * * *"
volumes:
- ./backup:/backup
environment:
@@ -76,6 +76,7 @@ services:
- DB_NAME=database
- DB_USERNAME=username
- DB_PASSWORD=password
- BACKUP_CRON_EXPRESSION=0 1 * * *
# pg-bkup container must be connected to the same network with your database
networks:
- web

View File

@@ -11,6 +11,9 @@ The image supports encrypting backups using GPG out of the box. In case a `GPG_P
{: .warning }
To restore an encrypted backup, you need to provide the same GPG passphrase used during backup process.
- GPG home directory `/config/gnupg`
- Cipher algorithm `aes256`
-
To decrypt manually, you need to install `gnupg`
```shell

View File

@@ -25,51 +25,50 @@ Backup, restore and migrate targets, schedule and retention are configured using
| --path | | AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup` |
| --dbname | -d | Database name |
| --port | -p | Database port (default: 5432) |
| --mode | -m | Execution mode. default or scheduled (default: default) |
| --disable-compression | | Disable database backup compression |
| --prune | | Delete old backup, default disabled |
| --keep-last | | Delete old backup created more than specified days ago, default 7 days |
| --period | | Crontab period for scheduled mode only. (default: "0 1 * * *") |
| --cron-expression | | Backup cron expression, eg: (* * * * *) or @daily |
| --help | -h | Print this help message and exit |
| --version | -V | Print version information and exit |
## Environment variables
| Name | Requirement | Description |
|------------------------|----------------------------------------------------|------------------------------------------------------|
| DB_PORT | Optional, default 5432 | Database port number |
| DB_HOST | Required | Database host |
| DB_NAME | Optional if it was provided from the -d flag | Database name |
| DB_USERNAME | Required | Database user name |
| DB_PASSWORD | Required | Database password |
| AWS_ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key |
| AWS_SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key |
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
| AWS_REGION | Optional, required for S3 storage | AWS Region |
| AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL |
| FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) |
| GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase |
| BACKUP_CRON_EXPRESSION | Optional if it was provided from the --period flag | Backup cron expression for docker in scheduled mode |
| SSH_HOST_NAME | Optional, required for SSH storage | ssh remote hostname or ip |
| SSH_USER | Optional, required for SSH storage | ssh remote user |
| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password |
| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key |
| SSH_PORT | Optional, required for SSH storage | ssh remote server port |
| SSH_REMOTE_PATH | Optional, required for SSH storage | ssh remote path (/home/toto/backup) |
| TARGET_DB_HOST | Optional, required for database migration | Target database host |
| TARGET_DB_PORT | Optional, required for database migration | Target database port |
| TARGET_DB_NAME | Optional, required for database migration | Target database name |
| TARGET_DB_USERNAME | Optional, required for database migration | Target database username |
| TARGET_DB_PASSWORD | Optional, required for database migration | Target database password |
| TG_TOKEN | Optional, required for Telegram notification | Telegram token |
| TG_CHAT_ID | Optional, required for Telegram notification | Telegram Chat ID |
| Name | Requirement | Description |
|------------------------|---------------------------------------------------------------|------------------------------------------------------|
| DB_PORT | Optional, default 5432 | Database port number |
| DB_HOST | Required | Database host |
| DB_NAME | Optional if it was provided from the -d flag | Database name |
| DB_USERNAME | Required | Database user name |
| DB_PASSWORD | Required | Database password |
| AWS_ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key |
| AWS_SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key |
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
| AWS_REGION | Optional, required for S3 storage | AWS Region |
| AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL |
| FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) |
| GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase |
| BACKUP_CRON_EXPRESSION | Optional if it was provided from the `--cron-expression` flag | Backup cron expression for docker in scheduled mode |
| SSH_HOST_NAME | Optional, required for SSH storage | ssh remote hostname or ip |
| SSH_USER | Optional, required for SSH storage | ssh remote user |
| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password |
| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key |
| SSH_PORT | Optional, required for SSH storage | ssh remote server port |
| SSH_REMOTE_PATH | Optional, required for SSH storage | ssh remote path (/home/toto/backup) |
| TARGET_DB_HOST | Optional, required for database migration | Target database host |
| TARGET_DB_PORT | Optional, required for database migration | Target database port |
| TARGET_DB_NAME | Optional, required for database migration | Target database name |
| TARGET_DB_USERNAME | Optional, required for database migration | Target database username |
| TARGET_DB_PASSWORD | Optional, required for database migration | Target database password |
| TG_TOKEN | Optional, required for Telegram notification | Telegram token (`BOT-ID:BOT-TOKEN`) |
| TG_CHAT_ID | Optional, required for Telegram notification | Telegram Chat ID |
---
## Run in Scheduled mode
This image can be run as CronJob in Kubernetes for a regular backup which makes deployment on Kubernetes easy as Kubernetes has CronJob resources.
For Docker, you need to run it in scheduled mode by adding `--mode scheduled` flag and specify the periodical backup time by adding `--period "0 1 * * *"` flag.
For Docker, you need to run it in scheduled mode by adding `--cron-expression "* * * * *"` flag or by defining `BACKUP_CRON_EXPRESSION=0 1 * * *` environment variable.
## Syntax of crontab (field description)
@@ -111,4 +110,22 @@ Easy to remember format:
```conf
0 1 * * *
```
```
## Predefined schedules
You may use one of several pre-defined schedules in place of a cron expression.
| Entry | Description | Equivalent To |
|------------------------|--------------------------------------------|---------------|
| @yearly (or @annually) | Run once a year, midnight, Jan. 1st | 0 0 1 1 * |
| @monthly | Run once a month, midnight, first of month | 0 0 1 * * |
| @weekly | Run once a week, midnight between Sat/Sun | 0 0 * * 0 |
| @daily (or @midnight) | Run once a day, midnight | 0 0 * * * |
| @hourly | Run once an hour, beginning of hour | 0 * * * * |
### Intervals
You may also schedule a job to execute at fixed intervals, starting at the time it's added or cron is run. This is supported by formatting the cron spec like this:
@every <duration>
where "duration" is a string accepted by time.
For example, "@every 1h30m10s" would indicate a schedule that activates after 1 hour, 30 minutes, 10 seconds, and then every interval after that.

2
go.mod
View File

@@ -12,9 +12,9 @@ require (
github.com/bramvdbogaerde/go-scp v1.5.0 // indirect
github.com/hpcloud/tail v1.0.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/robfig/cron/v3 v3.0.1 // indirect
golang.org/x/crypto v0.18.0 // indirect
golang.org/x/sys v0.22.0 // indirect
gopkg.in/fsnotify.v1 v1.4.7 // indirect
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect
)

2
go.sum
View File

@@ -29,6 +29,8 @@ github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEp
github.com/minio/minio-go/v7 v7.0.74 h1:fTo/XlPBTSpo3BAMshlwKL5RspXRv9us5UeHEGYCFe0=
github.com/minio/minio-go/v7 v7.0.74/go.mod h1:qydcVzV8Hqtj1VtEocfxbmVFa2siu6HGa+LDEPogjD8=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc=
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=

View File

@@ -8,8 +8,8 @@ package pkg
import (
"fmt"
"github.com/hpcloud/tail"
"github.com/jkaninda/pg-bkup/utils"
"github.com/robfig/cron/v3"
"github.com/spf13/cobra"
"log"
"os"
@@ -20,104 +20,67 @@ import (
func StartBackup(cmd *cobra.Command) {
intro()
//Set env
utils.SetEnv("STORAGE_PATH", storagePath)
utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION")
dbConf = initDbConfig(cmd)
//Initialize backup configs
config := initBackupConfig(cmd)
//Get flag value and set env
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
storage = utils.GetEnv(cmd, "storage", "STORAGE")
file = utils.GetEnv(cmd, "file", "FILE_NAME")
backupRetention, _ := cmd.Flags().GetInt("keep-last")
prune, _ := cmd.Flags().GetBool("prune")
disableCompression, _ = cmd.Flags().GetBool("disable-compression")
executionMode, _ = cmd.Flags().GetString("mode")
gpgPassphrase := os.Getenv("GPG_PASSPHRASE")
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
dbConf = getDbConfig(cmd)
//
if gpgPassphrase != "" {
encryption = true
}
//Generate file name
backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbConf.dbName, time.Now().Format("20060102_150405"))
if disableCompression {
backupFileName = fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20060102_150405"))
}
if executionMode == "default" {
switch storage {
case "s3":
s3Backup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
case "local":
localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
case "ssh", "remote":
sshBackup(dbConf, backupFileName, remotePath, disableCompression, prune, backupRetention, encryption)
case "ftp":
utils.Fatal("Not supported storage type: %s", storage)
default:
localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
}
} else if executionMode == "scheduled" {
scheduledMode(dbConf, storage)
if config.cronExpression == "" {
BackupTask(dbConf, config)
} else {
utils.Fatal("Error, unknown execution mode!")
if utils.IsValidCronExpression(config.cronExpression) {
scheduledMode(dbConf, config)
} else {
utils.Fatal("Cron expression is not valid: %s", config.cronExpression)
}
}
}
// Run in scheduled mode
func scheduledMode(db *dbConfig, storage string) {
fmt.Println()
fmt.Println("**********************************")
fmt.Println(" Starting PostgreSQL Bkup... ")
fmt.Println("***********************************")
func scheduledMode(db *dbConfig, config *BackupConfig) {
utils.Info("Running in Scheduled mode")
utils.Info("Execution period %s ", os.Getenv("BACKUP_CRON_EXPRESSION"))
utils.Info("Storage type %s ", storage)
utils.Info("Backup cron expression: %s", config.cronExpression)
utils.Info("Storage type %s ", config.storage)
//Test database connexion
testDatabaseConnection(db)
utils.Info("Creating backup job...")
CreateCrontabScript(disableCompression, storage)
utils.Info("Creating cron job...")
// Create a new cron instance
c := cron.New()
supervisorConfig := "/etc/supervisor/supervisord.conf"
// Start Supervisor
cmd := exec.Command("supervisord", "-c", supervisorConfig)
err := cmd.Start()
_, err := c.AddFunc(config.cronExpression, func() {
BackupTask(db, config)
})
if err != nil {
utils.Fatal("Failed to start supervisord: %v", err)
return
}
utils.Info("Backup job started")
defer func() {
if err := cmd.Process.Kill(); err != nil {
utils.Info("Failed to kill supervisord process: %v", err)
} else {
utils.Info("Supervisor stopped.")
}
}()
if _, err := os.Stat(cronLogFile); os.IsNotExist(err) {
utils.Fatal(fmt.Sprintf("Log file %s does not exist.", cronLogFile))
// Start the cron scheduler
c.Start()
utils.Info("Creating cron job...done")
defer c.Stop()
select {}
}
func BackupTask(db *dbConfig, config *BackupConfig) {
utils.Info("Starting backup task...")
//Generate file name
backupFileName := fmt.Sprintf("%s_%s.sql.gz", db.dbName, time.Now().Format("20240102_150405"))
if config.disableCompression {
backupFileName = fmt.Sprintf("%s_%s.sql", db.dbName, time.Now().Format("20240102_150405"))
}
t, err := tail.TailFile(cronLogFile, tail.Config{Follow: true})
if err != nil {
utils.Fatal("Failed to tail file: %v", err)
config.backupFileName = backupFileName
switch config.storage {
case "s3":
s3Backup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption)
case "local":
localBackup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption)
case "ssh", "remote":
sshBackup(db, config.backupFileName, config.remotePath, config.disableCompression, config.prune, config.backupRetention, config.encryption)
case "ftp":
utils.Fatal("Not supported storage type: %s", config.storage)
default:
localBackup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption)
}
// Read and print new lines from the log file
for line := range t.Lines {
fmt.Println(line.Text)
}
}
func intro() {
utils.Info("Starting PostgreSQL Backup...")

View File

@@ -33,8 +33,19 @@ type TgConfig struct {
Token string
ChatId string
}
type BackupConfig struct {
backupFileName string
backupRetention int
disableCompression bool
prune bool
encryption bool
remotePath string
gpqPassphrase string
storage string
cronExpression string
}
func getDbConfig(cmd *cobra.Command) *dbConfig {
func initDbConfig(cmd *cobra.Command) *dbConfig {
//Set env
utils.GetEnv(cmd, "dbname", "DB_NAME")
dConf := dbConfig{}
@@ -51,7 +62,71 @@ func getDbConfig(cmd *cobra.Command) *dbConfig {
}
return &dConf
}
func getTargetDbConfig() *targetDbConfig {
func initBackupConfig(cmd *cobra.Command) *BackupConfig {
utils.SetEnv("STORAGE_PATH", storagePath)
utils.GetEnv(cmd, "cron-expression", "BACKUP_CRON_EXPRESSION")
utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION")
//Get flag value and set env
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
storage = utils.GetEnv(cmd, "storage", "STORAGE")
backupRetention, _ := cmd.Flags().GetInt("keep-last")
prune, _ := cmd.Flags().GetBool("prune")
disableCompression, _ = cmd.Flags().GetBool("disable-compression")
_, _ = cmd.Flags().GetString("mode")
gpqPassphrase := os.Getenv("GPG_PASSPHRASE")
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION")
if gpqPassphrase != "" {
encryption = true
}
//Initialize backup configs
config := BackupConfig{}
config.backupRetention = backupRetention
config.disableCompression = disableCompression
config.prune = prune
config.storage = storage
config.encryption = encryption
config.remotePath = remotePath
config.gpqPassphrase = gpqPassphrase
config.cronExpression = cronExpression
return &config
}
type RestoreConfig struct {
s3Path string
remotePath string
storage string
file string
bucket string
gpqPassphrase string
}
func initRestoreConfig(cmd *cobra.Command) *RestoreConfig {
utils.SetEnv("STORAGE_PATH", storagePath)
//Get flag value and set env
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
storage = utils.GetEnv(cmd, "storage", "STORAGE")
file = utils.GetEnv(cmd, "file", "FILE_NAME")
_, _ = cmd.Flags().GetString("mode")
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
gpqPassphrase := os.Getenv("GPG_PASSPHRASE")
//Initialize restore configs
rConfig := RestoreConfig{}
rConfig.s3Path = s3Path
rConfig.remotePath = remotePath
rConfig.storage = storage
rConfig.bucket = bucket
rConfig.file = file
rConfig.storage = storage
rConfig.gpqPassphrase = gpqPassphrase
return &rConfig
}
func initTargetDbConfig() *targetDbConfig {
tdbConfig := targetDbConfig{}
tdbConfig.targetDbHost = os.Getenv("TARGET_DB_HOST")
tdbConfig.targetDbPort = os.Getenv("TARGET_DB_PORT")

View File

@@ -16,7 +16,7 @@ import (
func Decrypt(inputFile string, passphrase string) error {
utils.Info("Decrypting backup file: %s...", inputFile)
//Create gpg home dir
err := utils.MakeDir(gpgHome)
err := utils.MakeDirAll(gpgHome)
if err != nil {
return err
}
@@ -37,7 +37,7 @@ func Decrypt(inputFile string, passphrase string) error {
func Encrypt(inputFile string, passphrase string) error {
utils.Info("Encrypting backup...")
//Create gpg home dir
err := utils.MakeDir(gpgHome)
err := utils.MakeDirAll(gpgHome)
if err != nil {
return err
}

View File

@@ -17,8 +17,8 @@ func StartMigration(cmd *cobra.Command) {
intro()
utils.Info("Starting database migration...")
//Get DB config
dbConf = getDbConfig(cmd)
targetDbConf = getTargetDbConfig()
dbConf = initDbConfig(cmd)
targetDbConf = initTargetDbConfig()
//Defining the target database variables
newDbConfig := dbConfig{}

View File

@@ -17,34 +17,24 @@ import (
func StartRestore(cmd *cobra.Command) {
intro()
//Set env
utils.SetEnv("STORAGE_PATH", storagePath)
dbConf = initDbConfig(cmd)
restoreConf := initRestoreConfig(cmd)
//Get flag value and set env
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
storage = utils.GetEnv(cmd, "storage", "STORAGE")
file = utils.GetEnv(cmd, "file", "FILE_NAME")
executionMode, _ = cmd.Flags().GetString("mode")
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
dbConf = getDbConfig(cmd)
switch storage {
switch restoreConf.storage {
case "s3":
restoreFromS3(dbConf, file, bucket, s3Path)
restoreFromS3(dbConf, restoreConf.file, restoreConf.bucket, restoreConf.s3Path)
case "local":
utils.Info("Restore database from local")
copyToTmp(storagePath, file)
RestoreDatabase(dbConf, file)
copyToTmp(storagePath, restoreConf.file)
RestoreDatabase(dbConf, restoreConf.file)
case "ssh":
restoreFromRemote(dbConf, file, remotePath)
restoreFromRemote(dbConf, restoreConf.file, restoreConf.remotePath)
case "ftp":
utils.Fatal("Restore from FTP is not yet supported")
default:
utils.Info("Restore database from local")
copyToTmp(storagePath, file)
RestoreDatabase(dbConf, file)
copyToTmp(storagePath, restoreConf.file)
RestoreDatabase(dbConf, restoreConf.file)
}
}

View File

@@ -1,73 +0,0 @@
package pkg
// Package pkg /
/*****
@author Jonas Kaninda
@license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda
**/
import (
"fmt"
"github.com/jkaninda/pg-bkup/utils"
"os"
"os/exec"
)
func CreateCrontabScript(disableCompression bool, storage string) {
//task := "/usr/local/bin/backup_cron.sh"
touchCmd := exec.Command("touch", backupCronFile)
if err := touchCmd.Run(); err != nil {
utils.Fatal("Error creating file %s: %v\n", backupCronFile, err)
}
var disableC = ""
if disableCompression {
disableC = "--disable-compression"
}
var scriptContent string
scriptContent = fmt.Sprintf(`#!/usr/bin/env bash
set -e
/usr/local/bin/pg-bkup backup --dbname %s --storage %s %v
`, os.Getenv("DB_NAME"), storage, disableC)
if err := utils.WriteToFile(backupCronFile, scriptContent); err != nil {
utils.Fatal("Error writing to %s: %v\n", backupCronFile, err)
}
chmodCmd := exec.Command("chmod", "+x", "/usr/local/bin/backup_cron.sh")
if err := chmodCmd.Run(); err != nil {
utils.Fatal("Error changing permissions of %s: %v\n", backupCronFile, err)
}
lnCmd := exec.Command("ln", "-s", "/usr/local/bin/backup_cron.sh", "/usr/local/bin/backup_cron")
if err := lnCmd.Run(); err != nil {
utils.Fatal("Error creating symbolic link: %v\n", err)
}
touchLogCmd := exec.Command("touch", cronLogFile)
if err := touchLogCmd.Run(); err != nil {
utils.Fatal("Error creating file %s: %v\n", cronLogFile, err)
}
cronJob := "/etc/cron.d/backup_cron"
touchCronCmd := exec.Command("touch", cronJob)
if err := touchCronCmd.Run(); err != nil {
utils.Fatal("Error creating file %s: %v\n", cronJob, err)
}
cronContent := fmt.Sprintf(`%s root exec /bin/bash -c ". /run/supervisord.env; /usr/local/bin/backup_cron.sh >> %s"
`, os.Getenv("BACKUP_CRON_EXPRESSION"), cronLogFile)
if err := utils.WriteToFile(cronJob, cronContent); err != nil {
utils.Fatal("Error writing to %s: %v\n", cronJob, err)
}
utils.ChangePermission("/etc/cron.d/backup_cron", 0644)
crontabCmd := exec.Command("crontab", "/etc/cron.d/backup_cron")
if err := crontabCmd.Run(); err != nil {
utils.Fatal("Error updating crontab: ", err)
}
utils.Info("Backup job created.")
}

View File

@@ -6,17 +6,15 @@
**/
package pkg
const cronLogFile = "/var/log/pg-bkup.log"
const tmpPath = "/tmp/backup"
const backupCronFile = "/usr/local/bin/backup_cron.sh"
const gpgHome = "gnupg"
const gpgHome = "/config/gnupg"
const algorithm = "aes256"
const gpgExtension = "gpg"
var (
storage = "local"
file = ""
executionMode = "default"
storage = "local"
file = ""
storagePath = "/backup"
disableCompression = false
encryption = false

View File

@@ -7,10 +7,10 @@
package utils
const RestoreExample = "pg-bkup restore --dbname database --file db_20231219_022941.sql.gz\n" +
"bkup restore --dbname database --storage s3 --path /custom-path --file db_20231219_022941.sql.gz"
"restore --dbname database --storage s3 --path /custom-path --file db_20231219_022941.sql.gz"
const BackupExample = "pg-bkup backup --dbname database --disable-compression\n" +
"pg-bkup backup --dbname database --storage s3 --path /custom-path --disable-compression"
"backup --dbname database --storage s3 --path /custom-path --disable-compression"
const MainExample = "pg-bkup backup --dbname database --disable-compression\n" +
"pg-bkup backup --dbname database --storage s3 --path /custom-path\n" +
"pg-bkup restore --dbname database --file db_20231219_022941.sql.gz"
"backup --dbname database --storage s3 --path /custom-path\n" +
"restore --dbname database --file db_20231219_022941.sql.gz"

View File

@@ -10,6 +10,7 @@ import (
"bytes"
"encoding/json"
"fmt"
"github.com/robfig/cron/v3"
"github.com/spf13/cobra"
"io"
"io/fs"
@@ -222,7 +223,7 @@ func NotifySuccess(fileName string) {
//Telegram notification
err := CheckEnvVars(vars)
if err == nil {
message := "PostgreSQL Backup \n" +
message := "[✅ PostgreSQL Backup ]\n" +
"Database has been backed up \n" +
"Backup name is " + fileName
sendMessage(message)
@@ -237,7 +238,7 @@ func NotifyError(error string) {
//Telegram notification
err := CheckEnvVars(vars)
if err == nil {
message := "PostgreSQL Backup \n" +
message := "[🔴 PostgreSQL Backup ]\n" +
"An error occurred during database backup \n" +
"Error: " + error
sendMessage(message)
@@ -248,3 +249,7 @@ func getTgUrl() string {
return fmt.Sprintf("https://api.telegram.org/bot%s", os.Getenv("TG_TOKEN"))
}
func IsValidCronExpression(cronExpr string) bool {
_, err := cron.ParseStandard(cronExpr)
return err == nil
}