feat: add migrate database from a source to a target databse
This commit is contained in:
@@ -21,6 +21,8 @@ var BackupCmd = &cobra.Command{
|
|||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
//Backup
|
//Backup
|
||||||
|
BackupCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3")
|
||||||
|
BackupCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`")
|
||||||
BackupCmd.PersistentFlags().StringP("mode", "m", "default", "Execution mode. default or scheduled")
|
BackupCmd.PersistentFlags().StringP("mode", "m", "default", "Execution mode. default or scheduled")
|
||||||
BackupCmd.PersistentFlags().StringP("period", "", "0 1 * * *", "Schedule period time")
|
BackupCmd.PersistentFlags().StringP("period", "", "0 1 * * *", "Schedule period time")
|
||||||
BackupCmd.PersistentFlags().BoolP("prune", "", false, "Delete old backup, default disabled")
|
BackupCmd.PersistentFlags().BoolP("prune", "", false, "Delete old backup, default disabled")
|
||||||
|
|||||||
21
cmd/migrate.go
Normal file
21
cmd/migrate.go
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/jkaninda/pg-bkup/pkg"
|
||||||
|
"github.com/jkaninda/pg-bkup/utils"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var MigrateCmd = &cobra.Command{
|
||||||
|
Use: "migrate",
|
||||||
|
Short: "Migrate database from a source database to a target database",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
if len(args) == 0 {
|
||||||
|
pkg.StartMigration(cmd)
|
||||||
|
} else {
|
||||||
|
utils.Fatal("Error, no argument required")
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -24,5 +24,7 @@ var RestoreCmd = &cobra.Command{
|
|||||||
func init() {
|
func init() {
|
||||||
//Restore
|
//Restore
|
||||||
RestoreCmd.PersistentFlags().StringP("file", "f", "", "File name of database")
|
RestoreCmd.PersistentFlags().StringP("file", "f", "", "File name of database")
|
||||||
|
RestoreCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3")
|
||||||
|
RestoreCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`")
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,7 +18,6 @@ var rootCmd = &cobra.Command{
|
|||||||
Example: utils.MainExample,
|
Example: utils.MainExample,
|
||||||
Version: appVersion,
|
Version: appVersion,
|
||||||
}
|
}
|
||||||
var operation = ""
|
|
||||||
|
|
||||||
// Execute adds all child commands to the root command and sets flags appropriately.
|
// Execute adds all child commands to the root command and sets flags appropriately.
|
||||||
// This is called by main.main(). It only needs to happen once to the rootCmd.
|
// This is called by main.main(). It only needs to happen once to the rootCmd.
|
||||||
@@ -30,12 +29,10 @@ func Execute() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
rootCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3")
|
|
||||||
rootCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`")
|
|
||||||
rootCmd.PersistentFlags().StringP("dbname", "d", "", "Database name")
|
rootCmd.PersistentFlags().StringP("dbname", "d", "", "Database name")
|
||||||
rootCmd.PersistentFlags().IntP("port", "p", 5432, "Database port")
|
rootCmd.PersistentFlags().IntP("port", "p", 5432, "Database port")
|
||||||
rootCmd.PersistentFlags().StringVarP(&operation, "operation", "o", "", "Set operation, for old version only")
|
|
||||||
rootCmd.AddCommand(VersionCmd)
|
rootCmd.AddCommand(VersionCmd)
|
||||||
rootCmd.AddCommand(BackupCmd)
|
rootCmd.AddCommand(BackupCmd)
|
||||||
rootCmd.AddCommand(RestoreCmd)
|
rootCmd.AddCommand(RestoreCmd)
|
||||||
|
rootCmd.AddCommand(MigrateCmd)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ ENV DB_HOST=""
|
|||||||
ENV DB_NAME=""
|
ENV DB_NAME=""
|
||||||
ENV DB_USERNAME=""
|
ENV DB_USERNAME=""
|
||||||
ENV DB_PASSWORD=""
|
ENV DB_PASSWORD=""
|
||||||
ENV DB_PORT="5432"
|
ENV DB_PORT=5432
|
||||||
ENV STORAGE=local
|
ENV STORAGE=local
|
||||||
ENV AWS_S3_ENDPOINT=""
|
ENV AWS_S3_ENDPOINT=""
|
||||||
ENV AWS_S3_BUCKET_NAME=""
|
ENV AWS_S3_BUCKET_NAME=""
|
||||||
@@ -30,8 +30,13 @@ ENV SSH_PASSWORD=""
|
|||||||
ENV SSH_HOST_NAME=""
|
ENV SSH_HOST_NAME=""
|
||||||
ENV SSH_IDENTIFY_FILE=""
|
ENV SSH_IDENTIFY_FILE=""
|
||||||
ENV SSH_PORT="22"
|
ENV SSH_PORT="22"
|
||||||
|
ENV SOURCE_DB_HOST=""
|
||||||
|
ENV SOURCE_DB_PORT=5432
|
||||||
|
ENV SOURCE_DB_NAME=""
|
||||||
|
ENV SOURCE_DB_USERNAME=""
|
||||||
|
ENV SOURCE_DB_PASSWORD=""
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
ENV VERSION="v1.2.2"
|
ENV VERSION="v1.2.3"
|
||||||
ENV BACKUP_CRON_EXPRESSION=""
|
ENV BACKUP_CRON_EXPRESSION=""
|
||||||
ENV GNUPGHOME="/tmp/gnupg"
|
ENV GNUPGHOME="/tmp/gnupg"
|
||||||
ARG WORKDIR="/app"
|
ARG WORKDIR="/app"
|
||||||
|
|||||||
75
docs/how-tos/migrate.md
Normal file
75
docs/how-tos/migrate.md
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
---
|
||||||
|
title: Migrate database
|
||||||
|
layout: default
|
||||||
|
parent: How Tos
|
||||||
|
nav_order: 9
|
||||||
|
---
|
||||||
|
|
||||||
|
# Migrate database
|
||||||
|
|
||||||
|
To migrate the database, you need to add `migrate` command.
|
||||||
|
|
||||||
|
{: .note }
|
||||||
|
The pg backup has another great feature: migrating your database from a source database to another.
|
||||||
|
|
||||||
|
As you know, to restore a database from a source to a target database, you need 2 operations: to start by backing up the source database and then restoring the source backed database to the target database.
|
||||||
|
Instead of proceeding like that, you can use the integrated feature `(migrate)` that will help you to migrate your database by doing only one operation.
|
||||||
|
|
||||||
|
|
||||||
|
### Docker compose
|
||||||
|
```yml
|
||||||
|
services:
|
||||||
|
pg-bkup:
|
||||||
|
# In production, it is advised to lock your image tag to a proper
|
||||||
|
# release version instead of using `latest`.
|
||||||
|
# Check https://github.com/jkaninda/pg-bkup/releases
|
||||||
|
# for a list of available releases.
|
||||||
|
image: jkaninda/pg-bkup
|
||||||
|
container_name: pg-bkup
|
||||||
|
command: migrate
|
||||||
|
volumes:
|
||||||
|
- ./backup:/backup
|
||||||
|
environment:
|
||||||
|
## Target database
|
||||||
|
- DB_PORT=5432
|
||||||
|
- DB_HOST=postgres
|
||||||
|
- DB_NAME=database
|
||||||
|
- DB_USERNAME=username
|
||||||
|
- DB_PASSWORD=password
|
||||||
|
## Source database
|
||||||
|
- SOURCE_DB_HOST=postgres
|
||||||
|
- SOURCE_DB_PORT=5432
|
||||||
|
- SOURCE_DB_NAME=sourcedb
|
||||||
|
- SOURCE_DB_USERNAME=jonas
|
||||||
|
- SOURCE_DB_PASSWORD=password
|
||||||
|
# pg-bkup container must be connected to the same network with your database
|
||||||
|
networks:
|
||||||
|
- web
|
||||||
|
networks:
|
||||||
|
web:
|
||||||
|
```
|
||||||
|
|
||||||
|
### Migrate database using Docker CLI
|
||||||
|
|
||||||
|
```env
|
||||||
|
## Target database
|
||||||
|
DB_PORT=5432
|
||||||
|
DB_HOST=postgres
|
||||||
|
DB_NAME=targetdb
|
||||||
|
DB_USERNAME=targetuser
|
||||||
|
DB_PASSWORD=password
|
||||||
|
|
||||||
|
## Source database
|
||||||
|
SOURCE_DB_HOST=postgres
|
||||||
|
SOURCE_DB_PORT=5432
|
||||||
|
SOURCE_DB_NAME=sourcedb
|
||||||
|
SOURCE_DB_USERNAME=sourceuser
|
||||||
|
SOURCE_DB_PASSWORD=password
|
||||||
|
```
|
||||||
|
|
||||||
|
```shell
|
||||||
|
docker run --rm --network your_network_name \
|
||||||
|
--env-file your-env
|
||||||
|
-v $PWD/backup:/backup/ \
|
||||||
|
jkaninda/pg-bkup migrate -d database_name
|
||||||
|
```
|
||||||
@@ -17,11 +17,10 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func StartBackup(cmd *cobra.Command) {
|
func StartBackup(cmd *cobra.Command) {
|
||||||
_, _ = cmd.Flags().GetString("operation")
|
|
||||||
//Set env
|
//Set env
|
||||||
utils.SetEnv("STORAGE_PATH", storagePath)
|
utils.SetEnv("STORAGE_PATH", storagePath)
|
||||||
utils.GetEnv(cmd, "dbname", "DB_NAME")
|
//utils.GetEnv(cmd, "dbname", "DB_NAME")
|
||||||
utils.GetEnv(cmd, "port", "DB_PORT")
|
//utils.GetEnv(cmd, "port", "DB_PORT")
|
||||||
utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION")
|
utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION")
|
||||||
|
|
||||||
//Get flag value and set env
|
//Get flag value and set env
|
||||||
@@ -32,37 +31,38 @@ func StartBackup(cmd *cobra.Command) {
|
|||||||
prune, _ := cmd.Flags().GetBool("prune")
|
prune, _ := cmd.Flags().GetBool("prune")
|
||||||
disableCompression, _ = cmd.Flags().GetBool("disable-compression")
|
disableCompression, _ = cmd.Flags().GetBool("disable-compression")
|
||||||
executionMode, _ = cmd.Flags().GetString("mode")
|
executionMode, _ = cmd.Flags().GetString("mode")
|
||||||
dbName = os.Getenv("DB_NAME")
|
|
||||||
gpgPassphrase := os.Getenv("GPG_PASSPHRASE")
|
gpgPassphrase := os.Getenv("GPG_PASSPHRASE")
|
||||||
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
|
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
|
||||||
|
|
||||||
|
dbConf = getDbConfig(cmd)
|
||||||
|
|
||||||
//
|
//
|
||||||
if gpgPassphrase != "" {
|
if gpgPassphrase != "" {
|
||||||
encryption = true
|
encryption = true
|
||||||
}
|
}
|
||||||
|
|
||||||
//Generate file name
|
//Generate file name
|
||||||
backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbName, time.Now().Format("20060102_150405"))
|
backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbConf.dbName, time.Now().Format("20060102_150405"))
|
||||||
if disableCompression {
|
if disableCompression {
|
||||||
backupFileName = fmt.Sprintf("%s_%s.sql", dbName, time.Now().Format("20060102_150405"))
|
backupFileName = fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20060102_150405"))
|
||||||
}
|
}
|
||||||
|
|
||||||
if executionMode == "default" {
|
if executionMode == "default" {
|
||||||
switch storage {
|
switch storage {
|
||||||
case "s3":
|
case "s3":
|
||||||
s3Backup(backupFileName, disableCompression, prune, backupRetention, encryption)
|
s3Backup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
|
||||||
case "local":
|
case "local":
|
||||||
localBackup(backupFileName, disableCompression, prune, backupRetention, encryption)
|
localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
|
||||||
case "ssh", "remote":
|
case "ssh", "remote":
|
||||||
sshBackup(backupFileName, remotePath, disableCompression, prune, backupRetention, encryption)
|
sshBackup(dbConf, backupFileName, remotePath, disableCompression, prune, backupRetention, encryption)
|
||||||
case "ftp":
|
case "ftp":
|
||||||
utils.Fatal("Not supported storage type: %s", storage)
|
utils.Fatal("Not supported storage type: %s", storage)
|
||||||
default:
|
default:
|
||||||
localBackup(backupFileName, disableCompression, prune, backupRetention, encryption)
|
localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
|
||||||
}
|
}
|
||||||
|
|
||||||
} else if executionMode == "scheduled" {
|
} else if executionMode == "scheduled" {
|
||||||
scheduledMode(storage)
|
scheduledMode(dbConf, storage)
|
||||||
} else {
|
} else {
|
||||||
utils.Fatal("Error, unknown execution mode!")
|
utils.Fatal("Error, unknown execution mode!")
|
||||||
}
|
}
|
||||||
@@ -70,7 +70,7 @@ func StartBackup(cmd *cobra.Command) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Run in scheduled mode
|
// Run in scheduled mode
|
||||||
func scheduledMode(storage string) {
|
func scheduledMode(db *dbConfig, storage string) {
|
||||||
|
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
fmt.Println("**********************************")
|
fmt.Println("**********************************")
|
||||||
@@ -81,7 +81,7 @@ func scheduledMode(storage string) {
|
|||||||
utils.Info("Storage type %s ", storage)
|
utils.Info("Storage type %s ", storage)
|
||||||
|
|
||||||
//Test database connexion
|
//Test database connexion
|
||||||
utils.TestDatabaseConnection()
|
testDatabaseConnection(db)
|
||||||
|
|
||||||
utils.Info("Creating backup job...")
|
utils.Info("Creating backup job...")
|
||||||
CreateCrontabScript(disableCompression, storage)
|
CreateCrontabScript(disableCompression, storage)
|
||||||
@@ -120,27 +120,17 @@ func scheduledMode(storage string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// BackupDatabase backup database
|
// BackupDatabase backup database
|
||||||
func BackupDatabase(backupFileName string, disableCompression bool) {
|
func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool) {
|
||||||
dbHost = os.Getenv("DB_HOST")
|
|
||||||
dbPassword = os.Getenv("DB_PASSWORD")
|
|
||||||
dbUserName = os.Getenv("DB_USERNAME")
|
|
||||||
dbName = os.Getenv("DB_NAME")
|
|
||||||
dbPort = os.Getenv("DB_PORT")
|
|
||||||
storagePath = os.Getenv("STORAGE_PATH")
|
storagePath = os.Getenv("STORAGE_PATH")
|
||||||
|
|
||||||
utils.Info("Starting database backup...")
|
utils.Info("Starting database backup...")
|
||||||
|
|
||||||
err := utils.CheckEnvVars(dbHVars)
|
err := os.Setenv("PGPASSWORD", db.dbPassword)
|
||||||
if err != nil {
|
|
||||||
utils.Error("Please make sure all required environment variables for database are set")
|
|
||||||
utils.Fatal("Error checking environment variables: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = os.Setenv("PGPASSWORD", dbPassword)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
utils.TestDatabaseConnection()
|
testDatabaseConnection(db)
|
||||||
// Backup Database database
|
// Backup Database database
|
||||||
utils.Info("Backing up database...")
|
utils.Info("Backing up database...")
|
||||||
|
|
||||||
@@ -148,10 +138,10 @@ func BackupDatabase(backupFileName string, disableCompression bool) {
|
|||||||
if disableCompression {
|
if disableCompression {
|
||||||
// Execute pg_dump
|
// Execute pg_dump
|
||||||
cmd := exec.Command("pg_dump",
|
cmd := exec.Command("pg_dump",
|
||||||
"-h", dbHost,
|
"-h", db.dbHost,
|
||||||
"-p", dbPort,
|
"-p", db.dbPort,
|
||||||
"-U", dbUserName,
|
"-U", db.dbUserName,
|
||||||
"-d", dbName,
|
"-d", db.dbName,
|
||||||
)
|
)
|
||||||
output, err := cmd.Output()
|
output, err := cmd.Output()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -172,10 +162,10 @@ func BackupDatabase(backupFileName string, disableCompression bool) {
|
|||||||
} else {
|
} else {
|
||||||
// Execute pg_dump
|
// Execute pg_dump
|
||||||
cmd := exec.Command("pg_dump",
|
cmd := exec.Command("pg_dump",
|
||||||
"-h", dbHost,
|
"-h", db.dbHost,
|
||||||
"-p", dbPort,
|
"-p", db.dbPort,
|
||||||
"-U", dbUserName,
|
"-U", db.dbUserName,
|
||||||
"-d", dbName,
|
"-d", db.dbName,
|
||||||
)
|
)
|
||||||
stdout, err := cmd.StdoutPipe()
|
stdout, err := cmd.StdoutPipe()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -200,9 +190,9 @@ func BackupDatabase(backupFileName string, disableCompression bool) {
|
|||||||
utils.Info("Database has been backed up")
|
utils.Info("Database has been backed up")
|
||||||
|
|
||||||
}
|
}
|
||||||
func localBackup(backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
|
func localBackup(db *dbConfig, backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
|
||||||
utils.Info("Backup database to local storage")
|
utils.Info("Backup database to local storage")
|
||||||
BackupDatabase(backupFileName, disableCompression)
|
BackupDatabase(db, backupFileName, disableCompression)
|
||||||
finalFileName := backupFileName
|
finalFileName := backupFileName
|
||||||
if encrypt {
|
if encrypt {
|
||||||
encryptBackup(backupFileName)
|
encryptBackup(backupFileName)
|
||||||
@@ -218,12 +208,12 @@ func localBackup(backupFileName string, disableCompression bool, prune bool, bac
|
|||||||
deleteTemp()
|
deleteTemp()
|
||||||
}
|
}
|
||||||
|
|
||||||
func s3Backup(backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
|
func s3Backup(db *dbConfig, backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
|
||||||
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
|
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
|
||||||
s3Path := utils.GetEnvVariable("AWS_S3_PATH", "S3_PATH")
|
s3Path := utils.GetEnvVariable("AWS_S3_PATH", "S3_PATH")
|
||||||
utils.Info("Backup database to s3 storage")
|
utils.Info("Backup database to s3 storage")
|
||||||
//Backup database
|
//Backup database
|
||||||
BackupDatabase(backupFileName, disableCompression)
|
BackupDatabase(db, backupFileName, disableCompression)
|
||||||
finalFileName := backupFileName
|
finalFileName := backupFileName
|
||||||
if encrypt {
|
if encrypt {
|
||||||
encryptBackup(backupFileName)
|
encryptBackup(backupFileName)
|
||||||
@@ -255,10 +245,10 @@ func s3Backup(backupFileName string, disableCompression bool, prune bool, backup
|
|||||||
//Delete temp
|
//Delete temp
|
||||||
deleteTemp()
|
deleteTemp()
|
||||||
}
|
}
|
||||||
func sshBackup(backupFileName, remotePath string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
|
func sshBackup(db *dbConfig, backupFileName, remotePath string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
|
||||||
utils.Info("Backup database to Remote server")
|
utils.Info("Backup database to Remote server")
|
||||||
//Backup database
|
//Backup database
|
||||||
BackupDatabase(backupFileName, disableCompression)
|
BackupDatabase(db, backupFileName, disableCompression)
|
||||||
finalFileName := backupFileName
|
finalFileName := backupFileName
|
||||||
if encrypt {
|
if encrypt {
|
||||||
encryptBackup(backupFileName)
|
encryptBackup(backupFileName)
|
||||||
|
|||||||
@@ -1,4 +1,59 @@
|
|||||||
package pkg
|
package pkg
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/jkaninda/pg-bkup/utils"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
type Config struct {
|
type Config struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type dbConfig struct {
|
||||||
|
dbHost string
|
||||||
|
dbPort string
|
||||||
|
dbName string
|
||||||
|
dbUserName string
|
||||||
|
dbPassword string
|
||||||
|
}
|
||||||
|
type dbSourceConfig struct {
|
||||||
|
sourceDbHost string
|
||||||
|
sourceDbPort string
|
||||||
|
sourceDbUserName string
|
||||||
|
sourceDbPassword string
|
||||||
|
sourceDbName string
|
||||||
|
}
|
||||||
|
|
||||||
|
func getDbConfig(cmd *cobra.Command) *dbConfig {
|
||||||
|
//Set env
|
||||||
|
utils.GetEnv(cmd, "dbname", "DB_NAME")
|
||||||
|
utils.GetEnv(cmd, "port", "DB_PORT")
|
||||||
|
dConf := dbConfig{}
|
||||||
|
dConf.dbHost = os.Getenv("DB_HOST")
|
||||||
|
dConf.dbPort = os.Getenv("DB_PORT")
|
||||||
|
dConf.dbName = os.Getenv("DB_NAME")
|
||||||
|
dConf.dbUserName = os.Getenv("DB_USERNAME")
|
||||||
|
dConf.dbPassword = os.Getenv("DB_PASSWORD")
|
||||||
|
|
||||||
|
err := utils.CheckEnvVars(dbHVars)
|
||||||
|
if err != nil {
|
||||||
|
utils.Error("Please make sure all required environment variables for database are set")
|
||||||
|
utils.Fatal("Error checking environment variables: %s", err)
|
||||||
|
}
|
||||||
|
return &dConf
|
||||||
|
}
|
||||||
|
func getSourceDbConfig() *dbSourceConfig {
|
||||||
|
sdbConfig := dbSourceConfig{}
|
||||||
|
sdbConfig.sourceDbHost = os.Getenv("SOURCE_DB_HOST")
|
||||||
|
sdbConfig.sourceDbPort = os.Getenv("SOURCE_DB_PORT")
|
||||||
|
sdbConfig.sourceDbName = os.Getenv("SOURCE_DB_NAME")
|
||||||
|
sdbConfig.sourceDbUserName = os.Getenv("SOURCE_DB_USERNAME")
|
||||||
|
sdbConfig.sourceDbPassword = os.Getenv("SOURCE_DB_PASSWORD")
|
||||||
|
|
||||||
|
err := utils.CheckEnvVars(sdbRVars)
|
||||||
|
if err != nil {
|
||||||
|
utils.Error("Please make sure all required environment variables for source database are set")
|
||||||
|
utils.Fatal("Error checking environment variables: %s", err)
|
||||||
|
}
|
||||||
|
return &sdbConfig
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
package pkg
|
package pkg
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"github.com/jkaninda/pg-bkup/utils"
|
"github.com/jkaninda/pg-bkup/utils"
|
||||||
"os"
|
"os"
|
||||||
|
"os/exec"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
@@ -94,3 +96,38 @@ func deleteTemp() {
|
|||||||
utils.Info("Deleting %s ... done", tmpPath)
|
utils.Info("Deleting %s ... done", tmpPath)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TestDatabaseConnection tests the database connection
|
||||||
|
func testDatabaseConnection(db *dbConfig) {
|
||||||
|
|
||||||
|
utils.Info("Connecting to %s database ...", db.dbName)
|
||||||
|
// Test database connection
|
||||||
|
query := "SELECT version();"
|
||||||
|
|
||||||
|
// Set the environment variable for the database password
|
||||||
|
err := os.Setenv("PGPASSWORD", db.dbPassword)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Prepare the psql command
|
||||||
|
cmd := exec.Command("psql",
|
||||||
|
"-U", db.dbUserName, // database user
|
||||||
|
"-d", db.dbName, // database name
|
||||||
|
"-h", db.dbHost, // host
|
||||||
|
"-p", db.dbPort, // port
|
||||||
|
"-c", query, // SQL command to execute
|
||||||
|
)
|
||||||
|
// Capture the output
|
||||||
|
var out bytes.Buffer
|
||||||
|
cmd.Stdout = &out
|
||||||
|
cmd.Stderr = &out
|
||||||
|
|
||||||
|
// Run the command and capture any errors
|
||||||
|
err = cmd.Run()
|
||||||
|
if err != nil {
|
||||||
|
utils.Error("Error running psql command: %v\nOutput: %s\n", err, out.String())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
utils.Info("Successfully connected to %s database", db.dbName)
|
||||||
|
|
||||||
|
}
|
||||||
|
|||||||
31
pkg/migrate.go
Normal file
31
pkg/migrate.go
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
package pkg
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/jkaninda/pg-bkup/utils"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func StartMigration(cmd *cobra.Command) {
|
||||||
|
utils.Info("Starting database migration...")
|
||||||
|
//Get DB config
|
||||||
|
dbConf = getDbConfig(cmd)
|
||||||
|
sDbConf = getSourceDbConfig()
|
||||||
|
|
||||||
|
//Generate file name
|
||||||
|
backupFileName := fmt.Sprintf("%s_%s.sql", sDbConf.sourceDbName, time.Now().Format("20060102_150405"))
|
||||||
|
//Backup Source Database
|
||||||
|
newDbConfig := dbConfig{}
|
||||||
|
newDbConfig.dbHost = sDbConf.sourceDbHost
|
||||||
|
newDbConfig.dbPort = sDbConf.sourceDbPort
|
||||||
|
newDbConfig.dbName = sDbConf.sourceDbName
|
||||||
|
newDbConfig.dbUserName = sDbConf.sourceDbUserName
|
||||||
|
newDbConfig.dbPassword = sDbConf.sourceDbPassword
|
||||||
|
BackupDatabase(&newDbConfig, backupFileName, true)
|
||||||
|
//Restore source database into target database
|
||||||
|
utils.Info("Restoring [%s] database into [%s] database...", sDbConf.sourceDbName, dbConf.dbName)
|
||||||
|
RestoreDatabase(dbConf, backupFileName)
|
||||||
|
utils.Info("[%s] database has been restored into [%s] database", sDbConf.sourceDbName, dbConf.dbName)
|
||||||
|
utils.Info("Database migration completed!")
|
||||||
|
}
|
||||||
@@ -13,8 +13,6 @@ func StartRestore(cmd *cobra.Command) {
|
|||||||
|
|
||||||
//Set env
|
//Set env
|
||||||
utils.SetEnv("STORAGE_PATH", storagePath)
|
utils.SetEnv("STORAGE_PATH", storagePath)
|
||||||
utils.GetEnv(cmd, "dbname", "DB_NAME")
|
|
||||||
utils.GetEnv(cmd, "port", "DB_PORT")
|
|
||||||
|
|
||||||
//Get flag value and set env
|
//Get flag value and set env
|
||||||
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
|
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
|
||||||
@@ -23,47 +21,46 @@ func StartRestore(cmd *cobra.Command) {
|
|||||||
file = utils.GetEnv(cmd, "file", "FILE_NAME")
|
file = utils.GetEnv(cmd, "file", "FILE_NAME")
|
||||||
executionMode, _ = cmd.Flags().GetString("mode")
|
executionMode, _ = cmd.Flags().GetString("mode")
|
||||||
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
|
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
|
||||||
|
|
||||||
|
dbConf = getDbConfig(cmd)
|
||||||
|
|
||||||
switch storage {
|
switch storage {
|
||||||
case "s3":
|
case "s3":
|
||||||
restoreFromS3(file, bucket, s3Path)
|
restoreFromS3(dbConf, file, bucket, s3Path)
|
||||||
case "local":
|
case "local":
|
||||||
utils.Info("Restore database from local")
|
utils.Info("Restore database from local")
|
||||||
copyToTmp(storagePath, file)
|
copyToTmp(storagePath, file)
|
||||||
RestoreDatabase(file)
|
RestoreDatabase(dbConf, file)
|
||||||
case "ssh":
|
case "ssh":
|
||||||
restoreFromRemote(file, remotePath)
|
restoreFromRemote(dbConf, file, remotePath)
|
||||||
case "ftp":
|
case "ftp":
|
||||||
utils.Fatal("Restore from FTP is not yet supported")
|
utils.Fatal("Restore from FTP is not yet supported")
|
||||||
default:
|
default:
|
||||||
utils.Info("Restore database from local")
|
utils.Info("Restore database from local")
|
||||||
RestoreDatabase(file)
|
copyToTmp(storagePath, file)
|
||||||
|
RestoreDatabase(dbConf, file)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func restoreFromS3(file, bucket, s3Path string) {
|
func restoreFromS3(db *dbConfig, file, bucket, s3Path string) {
|
||||||
utils.Info("Restore database from s3")
|
utils.Info("Restore database from s3")
|
||||||
err := utils.DownloadFile(tmpPath, file, bucket, s3Path)
|
err := utils.DownloadFile(tmpPath, file, bucket, s3Path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Fatal("Error download file from s3 %s %v ", file, err)
|
utils.Fatal("Error download file from s3 %s %v ", file, err)
|
||||||
}
|
}
|
||||||
RestoreDatabase(file)
|
RestoreDatabase(db, file)
|
||||||
}
|
}
|
||||||
func restoreFromRemote(file, remotePath string) {
|
func restoreFromRemote(db *dbConfig, file, remotePath string) {
|
||||||
utils.Info("Restore database from remote server")
|
utils.Info("Restore database from remote server")
|
||||||
err := CopyFromRemote(file, remotePath)
|
err := CopyFromRemote(file, remotePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Fatal("Error download file from remote server: %s %v", filepath.Join(remotePath, file), err)
|
utils.Fatal("Error download file from remote server: %s %v", filepath.Join(remotePath, file), err)
|
||||||
}
|
}
|
||||||
RestoreDatabase(file)
|
RestoreDatabase(db, file)
|
||||||
}
|
}
|
||||||
|
|
||||||
// RestoreDatabase restore database
|
// RestoreDatabase restore database
|
||||||
func RestoreDatabase(file string) {
|
func RestoreDatabase(db *dbConfig, file string) {
|
||||||
dbHost = os.Getenv("DB_HOST")
|
|
||||||
dbPassword = os.Getenv("DB_PASSWORD")
|
|
||||||
dbUserName = os.Getenv("DB_USERNAME")
|
|
||||||
dbName = os.Getenv("DB_NAME")
|
|
||||||
dbPort = os.Getenv("DB_PORT")
|
|
||||||
gpgPassphrase := os.Getenv("GPG_PASSPHRASE")
|
gpgPassphrase := os.Getenv("GPG_PASSPHRASE")
|
||||||
if file == "" {
|
if file == "" {
|
||||||
utils.Fatal("Error, file required")
|
utils.Fatal("Error, file required")
|
||||||
@@ -93,17 +90,17 @@ func RestoreDatabase(file string) {
|
|||||||
|
|
||||||
if utils.FileExists(fmt.Sprintf("%s/%s", tmpPath, file)) {
|
if utils.FileExists(fmt.Sprintf("%s/%s", tmpPath, file)) {
|
||||||
|
|
||||||
err := os.Setenv("PGPASSWORD", dbPassword)
|
err := os.Setenv("PGPASSWORD", db.dbPassword)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
utils.TestDatabaseConnection()
|
testDatabaseConnection(db)
|
||||||
utils.Info("Restoring database...")
|
utils.Info("Restoring database...")
|
||||||
|
|
||||||
extension := filepath.Ext(fmt.Sprintf("%s/%s", tmpPath, file))
|
extension := filepath.Ext(fmt.Sprintf("%s/%s", tmpPath, file))
|
||||||
// Restore from compressed file / .sql.gz
|
// Restore from compressed file / .sql.gz
|
||||||
if extension == ".gz" {
|
if extension == ".gz" {
|
||||||
str := "zcat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + os.Getenv("DB_HOST") + " -p " + os.Getenv("DB_PORT") + " -U " + os.Getenv("DB_USERNAME") + " -v -d " + os.Getenv("DB_NAME")
|
str := "zcat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName
|
||||||
_, err := exec.Command("bash", "-c", str).Output()
|
_, err := exec.Command("bash", "-c", str).Output()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Fatal("Error, in restoring the database %v", err)
|
utils.Fatal("Error, in restoring the database %v", err)
|
||||||
@@ -115,7 +112,7 @@ func RestoreDatabase(file string) {
|
|||||||
|
|
||||||
} else if extension == ".sql" {
|
} else if extension == ".sql" {
|
||||||
//Restore from sql file
|
//Restore from sql file
|
||||||
str := "cat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + os.Getenv("DB_HOST") + " -p " + os.Getenv("DB_PORT") + " -U " + os.Getenv("DB_USERNAME") + " -v -d " + os.Getenv("DB_NAME")
|
str := "cat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName
|
||||||
_, err := exec.Command("bash", "-c", str).Output()
|
_, err := exec.Command("bash", "-c", str).Output()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Fatal("Error in restoring the database %v", err)
|
utils.Fatal("Error in restoring the database %v", err)
|
||||||
|
|||||||
20
pkg/var.go
20
pkg/var.go
@@ -9,11 +9,11 @@ const gpgExtension = "gpg"
|
|||||||
var (
|
var (
|
||||||
storage = "local"
|
storage = "local"
|
||||||
file = ""
|
file = ""
|
||||||
dbPassword = ""
|
//dbPassword = ""
|
||||||
dbUserName = ""
|
//dbUserName = ""
|
||||||
dbName = ""
|
//dbName = ""
|
||||||
dbHost = ""
|
//dbHost = ""
|
||||||
dbPort = "5432"
|
//dbPort = "5432"
|
||||||
executionMode = "default"
|
executionMode = "default"
|
||||||
storagePath = "/backup"
|
storagePath = "/backup"
|
||||||
disableCompression = false
|
disableCompression = false
|
||||||
@@ -27,6 +27,16 @@ var dbHVars = []string{
|
|||||||
"DB_USERNAME",
|
"DB_USERNAME",
|
||||||
"DB_NAME",
|
"DB_NAME",
|
||||||
}
|
}
|
||||||
|
var sdbRVars = []string{
|
||||||
|
"SOURCE_DB_HOST",
|
||||||
|
"SOURCE_DB_PORT",
|
||||||
|
"SOURCE_DB_NAME",
|
||||||
|
"SOURCE_DB_USERNAME",
|
||||||
|
"SOURCE_DB_PASSWORD",
|
||||||
|
}
|
||||||
|
|
||||||
|
var dbConf *dbConfig
|
||||||
|
var sDbConf *dbSourceConfig
|
||||||
|
|
||||||
// sshVars Required environment variables for SSH remote server storage
|
// sshVars Required environment variables for SSH remote server storage
|
||||||
var sshVars = []string{
|
var sshVars = []string{
|
||||||
|
|||||||
@@ -1,19 +1,17 @@
|
|||||||
package utils
|
package utils
|
||||||
|
|
||||||
/*****
|
/*****
|
||||||
* MySQL Backup & Restore
|
* PostgreSQL Backup & Restore
|
||||||
* @author Jonas Kaninda
|
* @author Jonas Kaninda
|
||||||
* @license MIT License <https://opensource.org/licenses/MIT>
|
* @license MIT License <https://opensource.org/licenses/MIT>
|
||||||
* @link https://github.com/jkaninda/mysql-bkup
|
* @link https://github.com/jkaninda/pg-bkup
|
||||||
**/
|
**/
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"io"
|
"io"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func FileExists(filename string) bool {
|
func FileExists(filename string) bool {
|
||||||
@@ -90,49 +88,6 @@ func IsDirEmpty(name string) (bool, error) {
|
|||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestDatabaseConnection tests the database connection
|
|
||||||
func TestDatabaseConnection() {
|
|
||||||
dbHost := os.Getenv("DB_HOST")
|
|
||||||
dbPassword := os.Getenv("DB_PASSWORD")
|
|
||||||
dbUserName := os.Getenv("DB_USERNAME")
|
|
||||||
dbName := os.Getenv("DB_NAME")
|
|
||||||
dbPort := os.Getenv("DB_PORT")
|
|
||||||
|
|
||||||
if os.Getenv("DB_HOST") == "" || os.Getenv("DB_NAME") == "" || os.Getenv("DB_USERNAME") == "" || os.Getenv("DB_PASSWORD") == "" {
|
|
||||||
Fatal("Please make sure all required database environment variables are set")
|
|
||||||
} else {
|
|
||||||
Info("Connecting to database ...")
|
|
||||||
// Test database connection
|
|
||||||
query := "SELECT version();"
|
|
||||||
|
|
||||||
// Set the environment variable for the database password
|
|
||||||
err := os.Setenv("PGPASSWORD", dbPassword)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// Prepare the psql command
|
|
||||||
cmd := exec.Command("psql",
|
|
||||||
"-U", dbUserName, // database user
|
|
||||||
"-d", dbName, // database name
|
|
||||||
"-h", dbHost, // host
|
|
||||||
"-p", dbPort, // port
|
|
||||||
"-c", query, // SQL command to execute
|
|
||||||
)
|
|
||||||
// Capture the output
|
|
||||||
var out bytes.Buffer
|
|
||||||
cmd.Stdout = &out
|
|
||||||
cmd.Stderr = &out
|
|
||||||
|
|
||||||
// Run the command and capture any errors
|
|
||||||
err = cmd.Run()
|
|
||||||
if err != nil {
|
|
||||||
Error("Error running psql command: %v\nOutput: %s\n", err, out.String())
|
|
||||||
return
|
|
||||||
}
|
|
||||||
Info("Successfully connected to database")
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
func GetEnv(cmd *cobra.Command, flagName, envName string) string {
|
func GetEnv(cmd *cobra.Command, flagName, envName string) string {
|
||||||
value, _ := cmd.Flags().GetString(flagName)
|
value, _ := cmd.Flags().GetString(flagName)
|
||||||
if value != "" {
|
if value != "" {
|
||||||
|
|||||||
Reference in New Issue
Block a user