Merge pull request #100 from jkaninda/develop

Merge develop
This commit is contained in:
Jonas Kaninda
2024-09-30 00:55:42 +02:00
committed by GitHub
21 changed files with 260 additions and 68 deletions

View File

@@ -167,7 +167,7 @@ While it may work against different implementations, there are no guarantees abo
We decided to publish this image as a simpler and more lightweight alternative because of the following requirements:
- The original image is based on `ubuntu` and requires additional tools, making it heavy.
- The original image is based on `Alpine` and requires additional tools, making it heavy.
- This image is written in Go.
- `arm64` and `arm/v7` architectures are supported.
- Docker in Swarm mode is supported.

View File

@@ -9,8 +9,8 @@ RUN go mod download
# Build
RUN CGO_ENABLED=0 GOOS=linux go build -o /app/mysql-bkup
FROM ubuntu:24.04
ENV DB_HOST="localhost"
FROM alpine:3.20.3
ENV DB_HOST=""
ENV DB_NAME=""
ENV DB_USERNAME=""
ENV DB_PASSWORD=""
@@ -20,49 +20,42 @@ ENV AWS_S3_ENDPOINT=""
ENV AWS_S3_BUCKET_NAME=""
ENV AWS_ACCESS_KEY=""
ENV AWS_SECRET_KEY=""
ENV AWS_REGION="us-west-2"
ENV AWS_S3_PATH=""
ENV AWS_REGION="us-west-2"
ENV AWS_DISABLE_SSL="false"
ENV GPG_PASSPHRASE=""
ENV SSH_USER=""
ENV SSH_REMOTE_PATH=""
ENV SSH_PASSWORD=""
ENV SSH_HOST_NAME=""
ENV SSH_IDENTIFY_FILE=""
ENV SSH_PORT="22"
ENV SSH_PORT=22
ENV REMOTE_PATH=""
ENV FTP_HOST_NAME=""
ENV FTP_PORT=21
ENV FTP_USER=""
ENV FTP_PASSWORD=""
ENV TARGET_DB_HOST=""
ENV TARGET_DB_PORT=3306
ENV TARGET_DB_NAME="localhost"
ENV TARGET_DB_NAME=""
ENV TARGET_DB_USERNAME=""
ENV TARGET_DB_PASSWORD=""
ARG DEBIAN_FRONTEND=noninteractive
ENV VERSION="v1.2.8"
ENV VERSION="v1.2.9"
ENV BACKUP_CRON_EXPRESSION=""
ENV TG_TOKEN=""
ENV TG_CHAT_ID=""
ARG WORKDIR="/config"
ARG BACKUPDIR="/backup"
ARG BACKUP_TMP_DIR="/tmp/backup"
ARG BACKUP_CRON="/etc/cron.d/backup_cron"
ARG BACKUP_CRON_SCRIPT="/usr/local/bin/backup_cron.sh"
LABEL author="Jonas Kaninda"
RUN apt-get update -qq
RUN apt install mysql-client cron gnupg -y
# Clear cache
RUN apt-get clean && rm -rf /var/lib/apt/lists/*
RUN apk --update add mysql-client gnupg
RUN mkdir $WORKDIR
RUN mkdir $BACKUPDIR
RUN mkdir -p $BACKUP_TMP_DIR
RUN chmod 777 $WORKDIR
RUN chmod 777 $BACKUPDIR
RUN chmod 777 $BACKUP_TMP_DIR
RUN touch $BACKUP_CRON && \
touch $BACKUP_CRON_SCRIPT && \
chmod 777 $BACKUP_CRON && \
chmod 777 $BACKUP_CRON_SCRIPT
RUN chmod 777 $WORKDIR
COPY --from=build /app/mysql-bkup /usr/local/bin/mysql-bkup
RUN chmod +x /usr/local/bin/mysql-bkup

View File

@@ -1,13 +0,0 @@
[supervisord]
nodaemon=true
user=root
logfile=/var/log/supervisor/supervisord.log
pidfile=/var/run/supervisord.pid
[program:cron]
command = /bin/bash -c "declare -p | grep -Ev '^declare -[[:alpha:]]*r' > /run/supervisord.env && /usr/sbin/cron -f -L 15"
autostart=true
autorestart=true
user = root
stderr_logfile=/var/log/cron.err.log
stdout_logfile=/var/log/cron.out.log

View File

@@ -0,0 +1,44 @@
---
title: Backup to FTP remote server
layout: default
parent: How Tos
nav_order: 4
---
# Backup to FTP remote server
As described for SSH backup section, to change the storage of your backup and use FTP Remote server as storage. You need to add `--storage ftp`.
You need to add the full remote path by adding `--path /home/jkaninda/backups` flag or using `REMOTE_PATH` environment variable.
{: .note }
These environment variables are required for SSH backup `FTP_HOST_NAME`, `FTP_USER`, `REMOTE_PATH`, `FTP_PORT` or `FTP_PASSWORD`.
```yml
services:
mysql-bkup:
# In production, it is advised to lock your image tag to a proper
# release version instead of using `latest`.
# Check https://github.com/jkaninda/mysql-bkup/releases
# for a list of available releases.
image: jkaninda/mysql-bkup
container_name: mysql-bkup
command: backup --storage ftp -d database
environment:
- DB_PORT=3306
- DB_HOST=postgres
- DB_NAME=database
- DB_USERNAME=username
- DB_PASSWORD=password
## FTP config
- FTP_HOST_NAME="hostname"
- FTP_PORT=21
- FTP_USER=user
- FTP_PASSWORD=password
- REMOTE_PATH=/home/jkaninda/backups
# pg-bkup container must be connected to the same network with your database
networks:
- web
networks:
web:
```

View File

@@ -8,7 +8,7 @@ nav_order: 3
As described for s3 backup section, to change the storage of your backup and use SSH Remote server as storage. You need to add `--storage ssh` or `--storage remote`.
You need to add the full remote path by adding `--path /home/jkaninda/backups` flag or using `SSH_REMOTE_PATH` environment variable.
You need to add the full remote path by adding `--path /home/jkaninda/backups` flag or using `REMOTE_PATH` environment variable.
{: .note }
These environment variables are required for SSH backup `SSH_HOST_NAME`, `SSH_USER`, `SSH_REMOTE_PATH`, `SSH_IDENTIFY_FILE`, `SSH_PORT` or `SSH_PASSWORD` if you dont use a private key to access to your server.
@@ -36,7 +36,7 @@ services:
- SSH_HOST_NAME="hostname"
- SSH_PORT=22
- SSH_USER=user
- SSH_REMOTE_PATH=/home/jkaninda/backups
- REMOTE_PATH=/home/jkaninda/backups
- SSH_IDENTIFY_FILE=/tmp/id_ed25519
## We advise you to use a private jey instead of password
#- SSH_PASSWORD=password
@@ -76,7 +76,7 @@ services:
- SSH_HOST_NAME="hostname"
- SSH_PORT=22
- SSH_USER=user
- SSH_REMOTE_PATH=/home/jkaninda/backups
- REMOTE_PATH=/home/jkaninda/backups
- SSH_IDENTIFY_FILE=/tmp/id_ed25519
# - BACKUP_CRON_EXPRESSION=0 1 * * * # Optional
## We advise you to use a private jey instead of password
@@ -131,7 +131,7 @@ spec:
value: "22"
- name: SSH_USER
value: "xxx"
- name: SSH_REMOTE_PATH
- name: REMOTE_PATH
value: "/home/jkaninda/backups"
- name: AWS_ACCESS_KEY
value: "xxxx"

View File

@@ -2,7 +2,7 @@
title: Deploy on Kubernetes
layout: default
parent: How Tos
nav_order: 8
nav_order: 9
---
## Deploy on Kubernetes

View File

@@ -2,7 +2,7 @@
title: Encrypt backups using GPG
layout: default
parent: How Tos
nav_order: 7
nav_order: 8
---
# Encrypt backup

View File

@@ -2,7 +2,7 @@
title: Migrate database
layout: default
parent: How Tos
nav_order: 9
nav_order: 10
---
# Migrate database

View File

@@ -2,7 +2,7 @@
title: Restore database from AWS S3
layout: default
parent: How Tos
nav_order: 5
nav_order: 6
---
# Restore database from S3 storage

View File

@@ -2,7 +2,7 @@
title: Restore database from SSH
layout: default
parent: How Tos
nav_order: 6
nav_order: 7
---
# Restore database from SSH remote server

View File

@@ -2,7 +2,7 @@
title: Restore database
layout: default
parent: How Tos
nav_order: 4
nav_order: 5
---
# Restore database

View File

@@ -156,7 +156,7 @@ While it may work against different implementations, there are no guarantees abo
We decided to publish this image as a simpler and more lightweight alternative because of the following requirements:
- The original image is based on `ubuntu` and requires additional tools, making it heavy.
- The original image is based on `Alpine` and requires additional tools, making it heavy.
- This image is written in Go.
- `arm64` and `arm/v7` architectures are supported.
- Docker in Swarm mode is supported.

View File

@@ -55,7 +55,11 @@ Backup, restore and migrate targets, schedule and retention are configured using
| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password |
| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key |
| SSH_PORT | Optional, required for SSH storage | ssh remote server port |
| SSH_REMOTE_PATH | Optional, required for SSH storage | ssh remote path (/home/toto/backup) |
| REMOTE_PATH | Optional, required for SSH or FTP storage | remote path (/home/toto/backup) |
| FTP_HOST_NAME | Optional, required for FTP storage | FTP host name |
| FTP_PORT | Optional, required for FTP storage | FTP server port number |
| FTP_USER | Optional, required for FTP storage | FTP user |
| FTP_PASSWORD | Optional, required for FTP storage | FTP user password |
| TARGET_DB_HOST | Optional, required for database migration | Target database host |
| TARGET_DB_PORT | Optional, required for database migration | Target database port |
| TARGET_DB_NAME | Optional, required for database migration | Target database name |

3
go.mod
View File

@@ -13,7 +13,10 @@ require (
)
require (
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jlaffaye/ftp v0.2.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/robfig/cron/v3 v3.0.1 // indirect
golang.org/x/sys v0.22.0 // indirect

7
go.sum
View File

@@ -7,10 +7,17 @@ github.com/bramvdbogaerde/go-scp v1.5.0/go.mod h1:on2aH5AxaFb2G0N5Vsdy6B0Ml7k9Hu
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jlaffaye/ftp v0.2.0 h1:lXNvW7cBu7R/68bknOX3MrRIIqZ61zELs1P2RAiA3lg=
github.com/jlaffaye/ftp v0.2.0/go.mod h1:is2Ds5qkhceAPy2xD6RLI6hmp/qysSoymZ+Z2uTnspI=
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=

View File

@@ -80,7 +80,7 @@ func BackupTask(db *dbConfig, config *BackupConfig) {
case "ssh", "remote":
sshBackup(db, config)
case "ftp":
utils.Fatal("Not supported storage type: %s", config.storage)
ftpBackup(db, config)
default:
localBackup(db, config)
}
@@ -190,7 +190,7 @@ func s3Backup(db *dbConfig, config *BackupConfig) {
}
utils.Info("Uploading backup archive to remote storage S3 ... ")
utils.Info("Backup name is %s", finalFileName)
err := utils.UploadFileToS3(tmpPath, finalFileName, bucket, s3Path)
err := UploadFileToS3(tmpPath, finalFileName, bucket, s3Path)
if err != nil {
utils.Fatal("Error uploading file to S3: %s ", err)
@@ -204,7 +204,7 @@ func s3Backup(db *dbConfig, config *BackupConfig) {
}
// Delete old backup
if config.prune {
err := utils.DeleteOldBackup(bucket, s3Path, config.backupRetention)
err := DeleteOldBackup(bucket, s3Path, config.backupRetention)
if err != nil {
utils.Fatal("Error deleting old backup from S3: %s ", err)
}
@@ -252,6 +252,41 @@ func sshBackup(db *dbConfig, config *BackupConfig) {
//Delete temp
deleteTemp()
}
func ftpBackup(db *dbConfig, config *BackupConfig) {
utils.Info("Backup database to the remote FTP server")
//Backup database
BackupDatabase(db, config.backupFileName, disableCompression)
finalFileName := config.backupFileName
if config.encryption {
encryptBackup(config.backupFileName, config.passphrase)
finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg")
}
utils.Info("Uploading backup archive to the remote FTP server ... ")
utils.Info("Backup name is %s", finalFileName)
err := CopyToFTP(finalFileName, config.remotePath)
if err != nil {
utils.Fatal("Error uploading file to the remote FTP server: %s ", err)
}
//Delete backup file from tmp folder
err = utils.DeleteFile(filepath.Join(tmpPath, finalFileName))
if err != nil {
utils.Error("Error deleting file: %v", err)
}
if config.prune {
//TODO: Delete old backup from remote server
utils.Info("Deleting old backup from a remote server is not implemented yet")
}
utils.Done("Uploading backup archive to the remote FTP server ... done ")
//Send notification
utils.NotifySuccess(finalFileName)
//Delete temp
deleteTemp()
}
// encryptBackup encrypt backup
func encryptBackup(backupFileName, passphrase string) {

View File

@@ -49,6 +49,13 @@ type RestoreConfig struct {
bucket string
gpqPassphrase string
}
type FTPConfig struct {
host string
user string
password string
port string
remotePath string
}
func initDbConfig(cmd *cobra.Command) *dbConfig {
//Set env
@@ -71,9 +78,10 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig {
utils.SetEnv("STORAGE_PATH", storagePath)
utils.GetEnv(cmd, "cron-expression", "BACKUP_CRON_EXPRESSION")
utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION")
utils.GetEnv(cmd, "path", "REMOTE_PATH")
remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH")
//Get flag value and set env
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
storage = utils.GetEnv(cmd, "storage", "STORAGE")
backupRetention, _ := cmd.Flags().GetInt("keep-last")
prune, _ := cmd.Flags().GetBool("prune")
@@ -100,10 +108,11 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig {
}
func initRestoreConfig(cmd *cobra.Command) *RestoreConfig {
utils.SetEnv("STORAGE_PATH", storagePath)
utils.GetEnv(cmd, "path", "REMOTE_PATH")
remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH")
//Get flag value and set env
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
storage = utils.GetEnv(cmd, "storage", "STORAGE")
file = utils.GetEnv(cmd, "file", "FILE_NAME")
_, _ = cmd.Flags().GetString("mode")
@@ -135,3 +144,18 @@ func initTargetDbConfig() *targetDbConfig {
}
return &tdbConfig
}
func initFtpConfig() *FTPConfig {
//Initialize backup configs
fConfig := FTPConfig{}
fConfig.host = os.Getenv("FTP_HOST_NAME")
fConfig.user = os.Getenv("FTP_USER")
fConfig.password = os.Getenv("FTP_PASSWORD")
fConfig.port = os.Getenv("FTP_PORT")
fConfig.remotePath = os.Getenv("REMOTE_PATH")
err := utils.CheckEnvVars(ftpVars)
if err != nil {
utils.Error("Please make sure all required environment variables for FTP are set")
utils.Fatal("Error checking environment variables: %s", err)
}
return &fConfig
}

81
pkg/ftp.go Normal file
View File

@@ -0,0 +1,81 @@
package pkg
import (
"fmt"
"github.com/jlaffaye/ftp"
"io"
"os"
"path/filepath"
"time"
)
// initFtpClient initializes and authenticates an FTP client
func initFtpClient() (*ftp.ServerConn, error) {
ftpConfig := initFtpConfig()
ftpClient, err := ftp.Dial(fmt.Sprintf("%s:%s", ftpConfig.host, ftpConfig.port), ftp.DialWithTimeout(5*time.Second))
if err != nil {
return nil, fmt.Errorf("failed to connect to FTP: %w", err)
}
err = ftpClient.Login(ftpConfig.user, ftpConfig.password)
if err != nil {
return nil, fmt.Errorf("failed to log in to FTP: %w", err)
}
return ftpClient, nil
}
// CopyToFTP uploads a file to the remote FTP server
func CopyToFTP(fileName, remotePath string) (err error) {
ftpConfig := initFtpConfig()
ftpClient, err := initFtpClient()
if err != nil {
return err
}
defer ftpClient.Quit()
filePath := filepath.Join(tmpPath, fileName)
file, err := os.Open(filePath)
if err != nil {
return fmt.Errorf("failed to open file %s: %w", fileName, err)
}
defer file.Close()
remoteFilePath := filepath.Join(ftpConfig.remotePath, fileName)
err = ftpClient.Stor(remoteFilePath, file)
if err != nil {
return fmt.Errorf("failed to upload file %s: %w", fileName, err)
}
return nil
}
// CopyFromFTP downloads a file from the remote FTP server
func CopyFromFTP(fileName, remotePath string) (err error) {
ftpClient, err := initFtpClient()
if err != nil {
return err
}
defer ftpClient.Quit()
remoteFilePath := filepath.Join(remotePath, fileName)
r, err := ftpClient.Retr(remoteFilePath)
if err != nil {
return fmt.Errorf("failed to retrieve file %s: %w", fileName, err)
}
defer r.Close()
localFilePath := filepath.Join(tmpPath, fileName)
outFile, err := os.Create(localFilePath)
if err != nil {
return fmt.Errorf("failed to create local file %s: %w", fileName, err)
}
defer outFile.Close()
_, err = io.Copy(outFile, r)
if err != nil {
return fmt.Errorf("failed to copy data to local file %s: %w", fileName, err)
}
return nil
}

View File

@@ -30,7 +30,7 @@ func StartRestore(cmd *cobra.Command) {
case "ssh":
restoreFromRemote(dbConf, restoreConf.file, restoreConf.remotePath)
case "ftp":
utils.Fatal("Restore from FTP is not yet supported")
restoreFromFTP(dbConf, restoreConf.file, restoreConf.remotePath)
default:
utils.Info("Restore database from local")
copyToTmp(storagePath, restoreConf.file)
@@ -40,7 +40,7 @@ func StartRestore(cmd *cobra.Command) {
func restoreFromS3(db *dbConfig, file, bucket, s3Path string) {
utils.Info("Restore database from s3")
err := utils.DownloadFile(tmpPath, file, bucket, s3Path)
err := DownloadFile(tmpPath, file, bucket, s3Path)
if err != nil {
utils.Fatal("Error download file from s3 %s %v", file, err)
}
@@ -54,6 +54,14 @@ func restoreFromRemote(db *dbConfig, file, remotePath string) {
}
RestoreDatabase(db, file)
}
func restoreFromFTP(db *dbConfig, file, remotePath string) {
utils.Info("Restore database from FTP server")
err := CopyFromFTP(file, remotePath)
if err != nil {
utils.Fatal("Error download file from FTP server: %s %v", filepath.Join(remotePath, file), err)
}
RestoreDatabase(db, file)
}
// RestoreDatabase restore database
func RestoreDatabase(db *dbConfig, file string) {
@@ -97,7 +105,7 @@ func RestoreDatabase(db *dbConfig, file string) {
// Restore from compressed file / .sql.gz
if extension == ".gz" {
str := "zcat " + filepath.Join(tmpPath, file) + " | mysql -h " + db.dbHost + " -P " + db.dbPort + " -u " + db.dbUserName + " " + db.dbName
_, err := exec.Command("bash", "-c", str).Output()
_, err := exec.Command("sh", "-c", str).Output()
if err != nil {
utils.Fatal("Error, in restoring the database %v", err)
}
@@ -109,7 +117,7 @@ func RestoreDatabase(db *dbConfig, file string) {
} else if extension == ".sql" {
//Restore from sql file
str := "cat " + filepath.Join(tmpPath, file) + " | mysql -h " + db.dbHost + " -P " + db.dbPort + " -u " + db.dbUserName + " " + db.dbName
_, err := exec.Command("bash", "-c", str).Output()
_, err := exec.Command("sh", "-c", str).Output()
if err != nil {
utils.Fatal("Error in restoring the database %v", err)
}

View File

@@ -4,7 +4,7 @@
@license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda
**/
package utils
package pkg
import (
"bytes"
@@ -14,6 +14,7 @@ import (
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/jkaninda/mysql-bkup/utils"
"log"
"net/http"
"os"
@@ -35,20 +36,20 @@ func CreateSession() (*session.Session, error) {
"AWS_REGION",
}
endPoint := GetEnvVariable("AWS_S3_ENDPOINT", "S3_ENDPOINT")
accessKey := GetEnvVariable("AWS_ACCESS_KEY", "ACCESS_KEY")
secretKey := GetEnvVariable("AWS_SECRET_KEY", "SECRET_KEY")
_ = GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
endPoint := utils.GetEnvVariable("AWS_S3_ENDPOINT", "S3_ENDPOINT")
accessKey := utils.GetEnvVariable("AWS_ACCESS_KEY", "ACCESS_KEY")
secretKey := utils.GetEnvVariable("AWS_SECRET_KEY", "SECRET_KEY")
_ = utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
region := os.Getenv("AWS_REGION")
awsDisableSsl, err := strconv.ParseBool(os.Getenv("AWS_DISABLE_SSL"))
if err != nil {
Fatal("Unable to parse AWS_DISABLE_SSL env var: %s", err)
utils.Fatal("Unable to parse AWS_DISABLE_SSL env var: %s", err)
}
err = CheckEnvVars(awsVars)
err = utils.CheckEnvVars(awsVars)
if err != nil {
Fatal("Error checking environment variables\n: %s", err)
utils.Fatal("Error checking environment variables\n: %s", err)
}
// S3 Config
s3Config := &aws.Config{
@@ -108,7 +109,7 @@ func DownloadFile(destinationPath, key, bucket, prefix string) error {
if err != nil {
return err
}
Info("Download backup from S3 storage...")
utils.Info("Download backup from S3 storage...")
file, err := os.Create(filepath.Join(destinationPath, key))
if err != nil {
fmt.Println("Failed to create file", err)
@@ -125,10 +126,10 @@ func DownloadFile(destinationPath, key, bucket, prefix string) error {
Key: aws.String(objectKey),
})
if err != nil {
fmt.Println("Failed to download file", err)
utils.Error("Failed to download file %s", key)
return err
}
Info("Backup downloaded: %s bytes size %s ", file.Name(), numBytes)
utils.Info("Backup downloaded: %s bytes size %s ", file.Name(), numBytes)
return nil
}

View File

@@ -8,7 +8,6 @@ package pkg
const cronLogFile = "/var/log/mysql-bkup.log"
const tmpPath = "/tmp/backup"
const backupCronFile = "/usr/local/bin/backup_cron.sh"
const algorithm = "aes256"
const gpgHome = "/config/gnupg"
const gpgExtension = "gpg"
@@ -42,7 +41,13 @@ var targetDbConf *targetDbConfig
// sshHVars Required environment variables for SSH remote server storage
var sshHVars = []string{
"SSH_USER",
"SSH_REMOTE_PATH",
"REMOTE_PATH",
"SSH_HOST_NAME",
"SSH_PORT",
}
var ftpVars = []string{
"FTP_HOST_NAME",
"FTP_USER",
"FTP_PASSWORD",
"FTP_PORT",
}