feat: add migrate database from a source to a target databse

fix: gpg encrypt permission warning message, update Kubernetes deployment example
This commit is contained in:
Jonas Kaninda
2024-08-30 19:58:12 +02:00
parent 8fb008151c
commit 662b73579d
18 changed files with 497 additions and 157 deletions

View File

@@ -32,37 +32,38 @@ func StartBackup(cmd *cobra.Command) {
prune, _ := cmd.Flags().GetBool("prune")
disableCompression, _ = cmd.Flags().GetBool("disable-compression")
executionMode, _ = cmd.Flags().GetString("mode")
dbName = os.Getenv("DB_NAME")
gpqPassphrase := os.Getenv("GPG_PASSPHRASE")
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
dbConf = getDbConfig(cmd)
//
if gpqPassphrase != "" {
encryption = true
}
//Generate file name
backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbName, time.Now().Format("20060102_150405"))
backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbConf.dbName, time.Now().Format("20060102_150405"))
if disableCompression {
backupFileName = fmt.Sprintf("%s_%s.sql", dbName, time.Now().Format("20060102_150405"))
backupFileName = fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20060102_150405"))
}
if executionMode == "default" {
switch storage {
case "s3":
s3Backup(backupFileName, disableCompression, prune, backupRetention, encryption)
s3Backup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
case "local":
localBackup(backupFileName, disableCompression, prune, backupRetention, encryption)
localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
case "ssh", "remote":
sshBackup(backupFileName, remotePath, disableCompression, prune, backupRetention, encryption)
sshBackup(dbConf, backupFileName, remotePath, disableCompression, prune, backupRetention, encryption)
case "ftp":
utils.Fatal("Not supported storage type: %s", storage)
default:
localBackup(backupFileName, disableCompression, prune, backupRetention, encryption)
localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
}
} else if executionMode == "scheduled" {
scheduledMode(storage)
scheduledMode(dbConf, storage)
} else {
utils.Fatal("Error, unknown execution mode!")
}
@@ -70,7 +71,7 @@ func StartBackup(cmd *cobra.Command) {
}
// Run in scheduled mode
func scheduledMode(storage string) {
func scheduledMode(db *dbConfig, storage string) {
fmt.Println()
fmt.Println("**********************************")
@@ -81,7 +82,7 @@ func scheduledMode(storage string) {
utils.Info("Storage type %s ", storage)
//Test database connexion
utils.TestDatabaseConnection()
testDatabaseConnection(db)
utils.Info("Creating backup job...")
CreateCrontabScript(disableCompression, storage)
@@ -117,12 +118,7 @@ func scheduledMode(storage string) {
}
// BackupDatabase backup database
func BackupDatabase(backupFileName string, disableCompression bool) {
dbHost = os.Getenv("DB_HOST")
dbPassword = os.Getenv("DB_PASSWORD")
dbUserName = os.Getenv("DB_USERNAME")
dbName = os.Getenv("DB_NAME")
dbPort = os.Getenv("DB_PORT")
func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool) {
storagePath = os.Getenv("STORAGE_PATH")
err := utils.CheckEnvVars(dbHVars)
@@ -132,7 +128,7 @@ func BackupDatabase(backupFileName string, disableCompression bool) {
}
utils.Info("Starting database backup...")
utils.TestDatabaseConnection()
testDatabaseConnection(db)
// Backup Database database
utils.Info("Backing up database...")
@@ -140,11 +136,11 @@ func BackupDatabase(backupFileName string, disableCompression bool) {
if disableCompression {
// Execute mysqldump
cmd := exec.Command("mysqldump",
"-h", dbHost,
"-P", dbPort,
"-u", dbUserName,
"--password="+dbPassword,
dbName,
"-h", db.dbHost,
"-P", db.dbPort,
"-u", db.dbUserName,
"--password="+db.dbPassword,
db.dbName,
)
output, err := cmd.Output()
if err != nil {
@@ -166,7 +162,7 @@ func BackupDatabase(backupFileName string, disableCompression bool) {
} else {
// Execute mysqldump
cmd := exec.Command("mysqldump", "-h", dbHost, "-P", dbPort, "-u", dbUserName, "--password="+dbPassword, dbName)
cmd := exec.Command("mysqldump", "-h", db.dbHost, "-P", db.dbPort, "-u", db.dbUserName, "--password="+db.dbPassword, db.dbName)
stdout, err := cmd.StdoutPipe()
if err != nil {
log.Fatal(err)
@@ -189,9 +185,9 @@ func BackupDatabase(backupFileName string, disableCompression bool) {
}
}
func localBackup(backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
func localBackup(db *dbConfig, backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
utils.Info("Backup database to local storage")
BackupDatabase(backupFileName, disableCompression)
BackupDatabase(db, backupFileName, disableCompression)
finalFileName := backupFileName
if encrypt {
encryptBackup(backupFileName)
@@ -207,12 +203,12 @@ func localBackup(backupFileName string, disableCompression bool, prune bool, bac
deleteTemp()
}
func s3Backup(backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
func s3Backup(db *dbConfig, backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
s3Path := utils.GetEnvVariable("AWS_S3_PATH", "S3_PATH")
utils.Info("Backup database to s3 storage")
//Backup database
BackupDatabase(backupFileName, disableCompression)
BackupDatabase(db, backupFileName, disableCompression)
finalFileName := backupFileName
if encrypt {
encryptBackup(backupFileName)
@@ -243,10 +239,10 @@ func s3Backup(backupFileName string, disableCompression bool, prune bool, backup
//Delete temp
deleteTemp()
}
func sshBackup(backupFileName, remotePath string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
func sshBackup(db *dbConfig, backupFileName, remotePath string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
utils.Info("Backup database to Remote server")
//Backup database
BackupDatabase(backupFileName, disableCompression)
BackupDatabase(db, backupFileName, disableCompression)
finalFileName := backupFileName
if encrypt {
encryptBackup(backupFileName)

View File

@@ -1,4 +1,59 @@
package pkg
import (
"github.com/jkaninda/mysql-bkup/utils"
"github.com/spf13/cobra"
"os"
)
type Config struct {
}
type dbConfig struct {
dbHost string
dbPort string
dbName string
dbUserName string
dbPassword string
}
type dbSourceConfig struct {
sourceDbHost string
sourceDbPort string
sourceDbUserName string
sourceDbPassword string
sourceDbName string
}
func getDbConfig(cmd *cobra.Command) *dbConfig {
//Set env
utils.GetEnv(cmd, "dbname", "DB_NAME")
utils.GetEnv(cmd, "port", "DB_PORT")
dConf := dbConfig{}
dConf.dbHost = os.Getenv("DB_HOST")
dConf.dbPort = os.Getenv("DB_PORT")
dConf.dbName = os.Getenv("DB_NAME")
dConf.dbUserName = os.Getenv("DB_USERNAME")
dConf.dbPassword = os.Getenv("DB_PASSWORD")
err := utils.CheckEnvVars(dbHVars)
if err != nil {
utils.Error("Please make sure all required environment variables for database are set")
utils.Fatal("Error checking environment variables: %s", err)
}
return &dConf
}
func getSourceDbConfig() *dbSourceConfig {
sdbConfig := dbSourceConfig{}
sdbConfig.sourceDbHost = os.Getenv("SOURCE_DB_HOST")
sdbConfig.sourceDbPort = os.Getenv("SOURCE_DB_PORT")
sdbConfig.sourceDbName = os.Getenv("SOURCE_DB_NAME")
sdbConfig.sourceDbUserName = os.Getenv("SOURCE_DB_USERNAME")
sdbConfig.sourceDbPassword = os.Getenv("SOURCE_DB_PASSWORD")
err := utils.CheckEnvVars(sdbRVars)
if err != nil {
utils.Error("Please make sure all required environment variables for source database are set")
utils.Fatal("Error checking environment variables: %s", err)
}
return &sdbConfig
}

View File

@@ -9,11 +9,17 @@ import (
func Decrypt(inputFile string, passphrase string) error {
utils.Info("Decrypting backup file: " + inputFile + " ...")
//Create gpg home dir
err := utils.MakeDir(gpgHome)
if err != nil {
return err
}
utils.SetEnv("GNUPGHOME", gpgHome)
cmd := exec.Command("gpg", "--batch", "--passphrase", passphrase, "--output", RemoveLastExtension(inputFile), "--decrypt", inputFile)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
err := cmd.Run()
err = cmd.Run()
if err != nil {
return err
}
@@ -24,11 +30,17 @@ func Decrypt(inputFile string, passphrase string) error {
func Encrypt(inputFile string, passphrase string) error {
utils.Info("Encrypting backup...")
//Create gpg home dir
err := utils.MakeDir(gpgHome)
if err != nil {
return err
}
utils.SetEnv("GNUPGHOME", gpgHome)
cmd := exec.Command("gpg", "--batch", "--passphrase", passphrase, "--symmetric", "--cipher-algo", algorithm, inputFile)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
err := cmd.Run()
err = cmd.Run()
if err != nil {
return err
}

View File

@@ -1,9 +1,11 @@
package pkg
import (
"bytes"
"fmt"
"github.com/jkaninda/mysql-bkup/utils"
"os"
"os/exec"
"path/filepath"
"time"
)
@@ -96,3 +98,24 @@ func deleteTemp() {
utils.Info("Deleting %s ... done", tmpPath)
}
}
// TestDatabaseConnection tests the database connection
func testDatabaseConnection(db *dbConfig) {
utils.Info("Connecting to %s database ...", db.dbName)
cmd := exec.Command("mysql", "-h", db.dbHost, "-P", db.dbPort, "-u", db.dbUserName, "--password="+db.dbPassword, db.dbName, "-e", "quit")
// Capture the output
var out bytes.Buffer
cmd.Stdout = &out
cmd.Stderr = &out
err := cmd.Run()
if err != nil {
utils.Error("Error testing database connection: %v\nOutput: %s", err, out.String())
os.Exit(1)
}
utils.Info("Successfully connected to %s database", db.dbName)
}

31
pkg/migrate.go Normal file
View File

@@ -0,0 +1,31 @@
package pkg
import (
"fmt"
"github.com/jkaninda/mysql-bkup/utils"
"github.com/spf13/cobra"
"time"
)
func StartMigration(cmd *cobra.Command) {
utils.Info("Starting database migration...")
//Get DB config
dbConf = getDbConfig(cmd)
sDbConf = getSourceDbConfig()
//Generate file name
backupFileName := fmt.Sprintf("%s_%s.sql", sDbConf.sourceDbName, time.Now().Format("20060102_150405"))
//Backup Source Database
newDbConfig := dbConfig{}
newDbConfig.dbHost = sDbConf.sourceDbHost
newDbConfig.dbPort = sDbConf.sourceDbPort
newDbConfig.dbName = sDbConf.sourceDbName
newDbConfig.dbUserName = sDbConf.sourceDbUserName
newDbConfig.dbPassword = sDbConf.sourceDbPassword
BackupDatabase(&newDbConfig, backupFileName, true)
//Restore source database into target database
utils.Info("Restoring [%s] database into [%s] database...", sDbConf.sourceDbName, dbConf.dbName)
RestoreDatabase(dbConf, backupFileName)
utils.Info("[%s] database has been restored into [%s] database", sDbConf.sourceDbName, dbConf.dbName)
utils.Info("Database migration completed!")
}

View File

@@ -13,8 +13,6 @@ func StartRestore(cmd *cobra.Command) {
//Set env
utils.SetEnv("STORAGE_PATH", storagePath)
utils.GetEnv(cmd, "dbname", "DB_NAME")
utils.GetEnv(cmd, "port", "DB_PORT")
//Get flag value and set env
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
@@ -23,47 +21,45 @@ func StartRestore(cmd *cobra.Command) {
file = utils.GetEnv(cmd, "file", "FILE_NAME")
executionMode, _ = cmd.Flags().GetString("mode")
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
dbConf = getDbConfig(cmd)
switch storage {
case "s3":
restoreFromS3(file, bucket, s3Path)
restoreFromS3(dbConf, file, bucket, s3Path)
case "local":
utils.Info("Restore database from local")
copyToTmp(storagePath, file)
RestoreDatabase(file)
RestoreDatabase(dbConf, file)
case "ssh":
restoreFromRemote(file, remotePath)
restoreFromRemote(dbConf, file, remotePath)
case "ftp":
utils.Fatal("Restore from FTP is not yet supported")
default:
utils.Info("Restore database from local")
RestoreDatabase(file)
copyToTmp(storagePath, file)
RestoreDatabase(dbConf, file)
}
}
func restoreFromS3(file, bucket, s3Path string) {
func restoreFromS3(db *dbConfig, file, bucket, s3Path string) {
utils.Info("Restore database from s3")
err := utils.DownloadFile(tmpPath, file, bucket, s3Path)
if err != nil {
utils.Fatal("Error download file from s3 %s %v", file, err)
}
RestoreDatabase(file)
RestoreDatabase(db, file)
}
func restoreFromRemote(file, remotePath string) {
func restoreFromRemote(db *dbConfig, file, remotePath string) {
utils.Info("Restore database from remote server")
err := CopyFromRemote(file, remotePath)
if err != nil {
utils.Fatal("Error download file from remote server: %s %v ", filepath.Join(remotePath, file), err)
}
RestoreDatabase(file)
RestoreDatabase(db, file)
}
// RestoreDatabase restore database
func RestoreDatabase(file string) {
dbHost = os.Getenv("DB_HOST")
dbPassword = os.Getenv("DB_PASSWORD")
dbUserName = os.Getenv("DB_USERNAME")
dbName = os.Getenv("DB_NAME")
dbPort = os.Getenv("DB_PORT")
func RestoreDatabase(db *dbConfig, file string) {
gpgPassphrase := os.Getenv("GPG_PASSPHRASE")
if file == "" {
utils.Fatal("Error, file required")
@@ -93,7 +89,7 @@ func RestoreDatabase(file string) {
}
if utils.FileExists(fmt.Sprintf("%s/%s", tmpPath, file)) {
utils.TestDatabaseConnection()
testDatabaseConnection(db)
utils.Info("Restoring database...")
extension := filepath.Ext(fmt.Sprintf("%s/%s", tmpPath, file))

View File

@@ -4,16 +4,12 @@ const cronLogFile = "/var/log/mysql-bkup.log"
const tmpPath = "/tmp/backup"
const backupCronFile = "/usr/local/bin/backup_cron.sh"
const algorithm = "aes256"
const gpgHome = "gnupg"
const gpgExtension = "gpg"
var (
storage = "local"
file = ""
dbPassword = ""
dbUserName = ""
dbName = ""
dbHost = ""
dbPort = "3306"
executionMode = "default"
storagePath = "/backup"
disableCompression = false
@@ -27,6 +23,16 @@ var dbHVars = []string{
"DB_USERNAME",
"DB_NAME",
}
var sdbRVars = []string{
"SOURCE_DB_HOST",
"SOURCE_DB_PORT",
"SOURCE_DB_NAME",
"SOURCE_DB_USERNAME",
"SOURCE_DB_PASSWORD",
}
var dbConf *dbConfig
var sDbConf *dbSourceConfig
// sshHVars Required environment variables for SSH remote server storage
var sshHVars = []string{