diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml new file mode 100644 index 0000000..17ae6ed --- /dev/null +++ b/.github/workflows/deploy-docs.yml @@ -0,0 +1,55 @@ +name: Deploy Documenation site to GitHub Pages + +on: + push: + branches: ['main'] + paths: + - 'docs/**' + - '.github/workflows/deploy-docs.yml' + workflow_dispatch: + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: 'pages' + cancel-in-progress: true + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Ruby + uses: ruby/setup-ruby@v1 + with: + ruby-version: '3.2' + bundler-cache: true + cache-version: 0 + working-directory: docs + - name: Setup Pages + id: pages + uses: actions/configure-pages@v2 + - name: Build with Jekyll + working-directory: docs + run: bundle exec jekyll build --baseurl "${{ steps.pages.outputs.base_path }}" + env: + JEKYLL_ENV: production + - name: Upload artifact + uses: actions/upload-pages-artifact@v1 + with: + path: 'docs/_site/' + + deploy: + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + needs: build + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v1 \ No newline at end of file diff --git a/Makefile b/Makefile index e720a31..2fa83c6 100644 --- a/Makefile +++ b/Makefile @@ -17,7 +17,7 @@ docker-build: docker build -f docker/Dockerfile -t jkaninda/pg-bkup:latest . docker-run: docker-build - docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" jkaninda/pg-bkup bkup backup --prune --keep-last 2 + docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" jkaninda/pg-bkup bkup backup --prune --keep-last 2 docker-run-scheduled: docker-build @@ -27,6 +27,10 @@ docker-run-scheduled: docker-build docker-run-scheduled-s3: docker-build docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" jkaninda/pg-bkup bkup backup --storage s3 --mode scheduled --path /custom-path --period "* * * * *" -docker-restore-s3: docker-build - docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" -e "FILE_NAME=${FILE_NAME}" jkaninda/pg-bkup bkup restore --storage s3 --path /custom-path +docker-run-s3: docker-build + docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" -e "REGION=eu2" jkaninda/pg-bkup bkup backup --storage s3 --path /custom-path + + +docker-restore-s3: docker-build + docker run --rm --network internal --privileged --device /dev/fuse --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" -e "REGION=eu2" -e "FILE_NAME=${FILE_NAME}" jkaninda/pg-bkup bkup restore --storage s3 --path /custom-path diff --git a/docker/Dockerfile b/docker/Dockerfile index 38e82b8..bb1e3a5 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -19,6 +19,13 @@ ENV STORAGE=local ENV BUCKET_NAME="" ENV ACCESS_KEY="" ENV SECRET_KEY="" +ENV REGION="" +ENV SSH_USER="" +ENV SSH_PASSWORD="" +ENV SSH_HOST_NAME="" +ENV SSH_IDENTIFY_FILE="/root/.ssh/id_rsa" +ENV GPG_PASS_PHRASE="" +ENV SSH_PORT="22" ENV S3_ENDPOINT=https://s3.amazonaws.com ARG DEBIAN_FRONTEND=noninteractive ENV VERSION="v0.6" @@ -26,7 +33,7 @@ LABEL authors="Jonas Kaninda" RUN apt-get update -qq -RUN apt install s3fs postgresql-client postgresql-client-common libpq-dev supervisor cron -y +RUN apt install postgresql-client postgresql-client-common supervisor cron openssh-client -y # Clear cache RUN apt-get clean && rm -rf /var/lib/apt/lists/* @@ -45,4 +52,5 @@ ADD docker/supervisord.conf /etc/supervisor/supervisord.conf RUN mkdir /backup -WORKDIR /backup \ No newline at end of file +RUN mkdir /tmp/pg-bkup +WORKDIR /root diff --git a/go.mod b/go.mod index 789cfc7..83d89a2 100644 --- a/go.mod +++ b/go.mod @@ -8,8 +8,9 @@ require ( ) require ( + github.com/aws/aws-sdk-go v1.55.3 // indirect github.com/hpcloud/tail v1.0.0 // indirect - github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect golang.org/x/sys v0.22.0 // indirect gopkg.in/fsnotify.v1 v1.4.7 // indirect gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect diff --git a/go.sum b/go.sum index 426c610..2d2cfef 100644 --- a/go.sum +++ b/go.sum @@ -1,18 +1,53 @@ +github.com/aws/aws-sdk-go v1.55.3 h1:0B5hOX+mIx7I5XPOrjrHlKSDQV/+ypFZpIHOx5LOk3E= +github.com/aws/aws-sdk-go v1.55.3/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A= +github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= +github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA= +github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA= +github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= +github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM= +github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34= +github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM= +github.com/minio/minio-go/v7 v7.0.74 h1:fTo/XlPBTSpo3BAMshlwKL5RspXRv9us5UeHEGYCFe0= +github.com/minio/minio-go/v7 v7.0.74/go.mod h1:qydcVzV8Hqtj1VtEocfxbmVFa2siu6HGa+LDEPogjD8= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= +github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +golang.org/x/crypto v0.24.0 h1:mnl8DM0o513X8fdIkmyFE/5hTYxbwYOjDS/+rK6qpRI= +golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM= +golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ= +golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI= golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4= +golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/pkg/backup.go b/pkg/backup.go index f21a78b..ba4676a 100644 --- a/pkg/backup.go +++ b/pkg/backup.go @@ -33,16 +33,35 @@ func StartBackup(cmd *cobra.Command) { prune, _ := cmd.Flags().GetBool("prune") disableCompression, _ = cmd.Flags().GetBool("disable-compression") executionMode, _ = cmd.Flags().GetString("mode") + dbName = os.Getenv("DB_NAME") + storagePath = os.Getenv("STORAGE_PATH") + + //Generate file name + backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbName, time.Now().Format("20060102_150405")) + if disableCompression { + backupFileName = fmt.Sprintf("%s_%s.sql", dbName, time.Now().Format("20060102_150405")) + } if executionMode == "default" { - if storage == "s3" { + switch storage { + case "s3": utils.Info("Backup database to s3 storage") - s3Backup(disableCompression, s3Path, prune, keepLast) - } else { + BackupDatabase(backupFileName, disableCompression, prune, keepLast) + s3Upload(backupFileName, s3Path) + case "local": utils.Info("Backup database to local storage") - BackupDatabase(disableCompression, prune, keepLast) - + BackupDatabase(backupFileName, disableCompression, prune, keepLast) + moveToBackup(backupFileName, storagePath) + case "ssh": + fmt.Println("x is 2") + case "ftp": + fmt.Println("x is 3") + default: + utils.Info("Backup database to local storage") + BackupDatabase(backupFileName, disableCompression, prune, keepLast) + moveToBackup(backupFileName, storagePath) } + } else if executionMode == "scheduled" { scheduledMode() } else { @@ -98,7 +117,7 @@ func scheduledMode() { } // BackupDatabase backup database -func BackupDatabase(disableCompression bool, prune bool, keepLast int) { +func BackupDatabase(backupFileName string, disableCompression bool, prune bool, keepLast int) { dbHost = os.Getenv("DB_HOST") dbPassword = os.Getenv("DB_PASSWORD") dbUserName = os.Getenv("DB_USERNAME") @@ -117,12 +136,9 @@ func BackupDatabase(disableCompression bool, prune bool, keepLast int) { utils.TestDatabaseConnection() // Backup Database database utils.Info("Backing up database...") - //Generate file name - bkFileName := fmt.Sprintf("%s_%s.sql.gz", dbName, time.Now().Format("20060102_150405")) // Verify is compression is disabled if disableCompression { - bkFileName = fmt.Sprintf("%s_%s.sql", dbName, time.Now().Format("20060102_150405")) // Execute pg_dump cmd := exec.Command("pg_dump", "-h", dbHost, @@ -135,7 +151,7 @@ func BackupDatabase(disableCompression bool, prune bool, keepLast int) { log.Fatal(err) } // save output - file, err := os.Create(fmt.Sprintf("%s/%s", storagePath, bkFileName)) + file, err := os.Create(fmt.Sprintf("%s/%s", tmpPath, backupFileName)) if err != nil { log.Fatal(err) } @@ -145,7 +161,6 @@ func BackupDatabase(disableCompression bool, prune bool, keepLast int) { if err != nil { log.Fatal(err) } - utils.Done("Database has been backed up") } else { // Execute pg_dump @@ -162,7 +177,7 @@ func BackupDatabase(disableCompression bool, prune bool, keepLast int) { gzipCmd := exec.Command("gzip") gzipCmd.Stdin = stdout // save output - gzipCmd.Stdout, err = os.Create(fmt.Sprintf("%s/%s", storagePath, bkFileName)) + gzipCmd.Stdout, err = os.Create(fmt.Sprintf("%s/%s", tmpPath, backupFileName)) gzipCmd.Start() if err != nil { log.Fatal(err) @@ -173,30 +188,62 @@ func BackupDatabase(disableCompression bool, prune bool, keepLast int) { if err := gzipCmd.Wait(); err != nil { log.Fatal(err) } - utils.Done("Database has been backed up") } + utils.Done("Database has been backed up") + //Delete old backup - if prune { - deleteOldBackup(keepLast) - } + //if prune { + // deleteOldBackup(keepLast) + //} - historyFile, err := os.OpenFile(fmt.Sprintf("%s/history.txt", storagePath), os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + historyFile, err := os.OpenFile(fmt.Sprintf("%s/history.txt", tmpPath), os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) if err != nil { log.Fatal(err) } defer historyFile.Close() - if _, err := historyFile.WriteString(bkFileName + "\n"); err != nil { + if _, err := historyFile.WriteString(backupFileName + "\n"); err != nil { log.Fatal(err) } } } +func moveToBackup(backupFileName string, destinationPath string) { + //Copy backup from tmp folder to storage destination + err := utils.CopyFile(filepath.Join(tmpPath, backupFileName), filepath.Join(destinationPath, backupFileName)) + if err != nil { + utils.Fatal("Error copying file ", backupFileName, err) -func s3Backup(disableCompression bool, s3Path string, prune bool, keepLast int) { + } + //Delete backup file from tmp folder + err = utils.DeleteFile(filepath.Join(tmpPath, backupFileName)) + if err != nil { + fmt.Println("Error deleting file:", err) + + } + utils.Done("Database has been backed up and copied to destination ") +} +func s3Upload(backupFileName string, s3Path string) { + bucket := os.Getenv("BUCKET_NAME") + utils.Info("Uploading file to S3 storage") + err := utils.UploadFileToS3(tmpPath, backupFileName, bucket, s3Path) + if err != nil { + utils.Fatalf("Error uploading file to S3: %s ", err) + + } + + //Delete backup file from tmp folder + err = utils.DeleteFile(filepath.Join(tmpPath, backupFileName)) + if err != nil { + fmt.Println("Error deleting file:", err) + + } + utils.Done("Database has been backed up and uploaded to s3 ") +} +func s3Backup(backupFileName string, disableCompression bool, s3Path string, prune bool, keepLast int) { // Backup Database to S3 storage - MountS3Storage(s3Path) - BackupDatabase(disableCompression, prune, keepLast) + //MountS3Storage(s3Path) + //BackupDatabase(backupFileName, disableCompression, prune, keepLast) } func deleteOldBackup(keepLast int) { utils.Info("Deleting old backups...") diff --git a/pkg/encrypt_archive.go b/pkg/encrypt_archive.go new file mode 100644 index 0000000..c1caffe --- /dev/null +++ b/pkg/encrypt_archive.go @@ -0,0 +1 @@ +package pkg diff --git a/pkg/restore.go b/pkg/restore.go index 05541f2..619c40a 100644 --- a/pkg/restore.go +++ b/pkg/restore.go @@ -21,13 +21,34 @@ func StartRestore(cmd *cobra.Command) { storage = utils.GetEnv(cmd, "storage", "STORAGE") file = utils.GetEnv(cmd, "file", "FILE_NAME") executionMode, _ = cmd.Flags().GetString("mode") + bucket := os.Getenv("BUCKET_NAME") - if storage == "s3" { + switch storage { + case "s3": utils.Info("Restore database from s3") - s3Restore(file, s3Path) - } else { + err := utils.DownloadFile(tmpPath, file, bucket, s3Path) + if err != nil { + utils.Fatal("Error download file from s3 ", file, err) + } + RestoreDatabase(file) + case "local": + utils.Info("Restore database from local") + copyTmp(storagePath, file) + RestoreDatabase(file) + case "ssh": + fmt.Println("x is 2") + case "ftp": + fmt.Println("x is 3") + default: utils.Info("Restore database from local") RestoreDatabase(file) + } +} +func copyTmp(sourcePath string, backupFileName string) { + //Copy backup from tmp folder to storage destination + err := utils.CopyFile(filepath.Join(sourcePath, backupFileName), filepath.Join(tmpPath, backupFileName)) + if err != nil { + utils.Fatal("Error copying file ", backupFileName, err) } } @@ -39,7 +60,7 @@ func RestoreDatabase(file string) { dbUserName = os.Getenv("DB_USERNAME") dbName = os.Getenv("DB_NAME") dbPort = os.Getenv("DB_PORT") - storagePath = os.Getenv("STORAGE_PATH") + //storagePath = os.Getenv("STORAGE_PATH") if file == "" { utils.Fatal("Error, file required") } @@ -48,7 +69,7 @@ func RestoreDatabase(file string) { utils.Fatal("Please make sure all required environment variables are set") } else { - if utils.FileExists(fmt.Sprintf("%s/%s", storagePath, file)) { + if utils.FileExists(fmt.Sprintf("%s/%s", tmpPath, file)) { err := os.Setenv("PGPASSWORD", dbPassword) if err != nil { @@ -56,10 +77,10 @@ func RestoreDatabase(file string) { } utils.TestDatabaseConnection() - extension := filepath.Ext(fmt.Sprintf("%s/%s", storagePath, file)) + extension := filepath.Ext(fmt.Sprintf("%s/%s", tmpPath, file)) // Restore from compressed file / .sql.gz if extension == ".gz" { - str := "zcat " + fmt.Sprintf("%s/%s", storagePath, file) + " | psql -h " + os.Getenv("DB_HOST") + " -p " + os.Getenv("DB_PORT") + " -U " + os.Getenv("DB_USERNAME") + " -v -d " + os.Getenv("DB_NAME") + str := "zcat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + os.Getenv("DB_HOST") + " -p " + os.Getenv("DB_PORT") + " -U " + os.Getenv("DB_USERNAME") + " -v -d " + os.Getenv("DB_NAME") _, err := exec.Command("bash", "-c", str).Output() if err != nil { utils.Fatal("Error, in restoring the database") @@ -68,7 +89,7 @@ func RestoreDatabase(file string) { } else if extension == ".sql" { //Restore from sql file - str := "cat " + fmt.Sprintf("%s/%s", storagePath, file) + " | psql -h " + os.Getenv("DB_HOST") + " -p " + os.Getenv("DB_PORT") + " -U " + os.Getenv("DB_USERNAME") + " -v -d " + os.Getenv("DB_NAME") + str := "cat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + os.Getenv("DB_HOST") + " -p " + os.Getenv("DB_PORT") + " -U " + os.Getenv("DB_USERNAME") + " -v -d " + os.Getenv("DB_NAME") _, err := exec.Command("bash", "-c", str).Output() if err != nil { utils.Fatal("Error in restoring the database", err) @@ -79,12 +100,13 @@ func RestoreDatabase(file string) { } } else { - utils.Fatal("File not found in ", fmt.Sprintf("%s/%s", storagePath, file)) + utils.Fatal("File not found in ", fmt.Sprintf("%s/%s", tmpPath, file)) } } } -func s3Restore(file, s3Path string) { - // Restore database from S3 - MountS3Storage(s3Path) - RestoreDatabase(file) -} + +//func s3Restore(file, s3Path string) { +// // Restore database from S3 +// MountS3Storage(s3Path) +// RestoreDatabase(file) +//} diff --git a/pkg/var.go b/pkg/var.go index ca9af1e..d849cab 100644 --- a/pkg/var.go +++ b/pkg/var.go @@ -3,6 +3,7 @@ package pkg const s3MountPath string = "/s3mnt" const s3fsPasswdFile string = "/etc/passwd-s3fs" const cronLogFile = "/var/log/pg-bkup.log" +const tmpPath = "/tmp/pg-bkup" const backupCronFile = "/usr/local/bin/backup_cron.sh" var ( diff --git a/utils/s3.go b/utils/s3.go new file mode 100644 index 0000000..1566dca --- /dev/null +++ b/utils/s3.go @@ -0,0 +1,107 @@ +package utils + +import ( + "bytes" + "fmt" + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/credentials" + "github.com/aws/aws-sdk-go/aws/session" + "github.com/aws/aws-sdk-go/service/s3" + "github.com/aws/aws-sdk-go/service/s3/s3manager" + "net/http" + "os" + "path/filepath" +) + +// CreateSession creates a new AWS session +func CreateSession() (*session.Session, error) { + + //key := aws.String("testobject") + endPoint := os.Getenv("S3_ENDPOINT") + //bucket := os.Getenv("BUCKET_NAME") + region := os.Getenv("REGION") + accessKey := os.Getenv("ACCESS_KEY") + secretKey := os.Getenv("SECRET_KEY") + + // Configure to use MinIO Server + s3Config := &aws.Config{ + Credentials: credentials.NewStaticCredentials(accessKey, secretKey, ""), + Endpoint: aws.String(endPoint), + Region: aws.String(region), + DisableSSL: aws.Bool(false), + S3ForcePathStyle: aws.Bool(true), + } + return session.NewSession(s3Config) + +} + +// UploadFileToS3 uploads a file to S3 with a given prefix +func UploadFileToS3(filePath, key, bucket, prefix string) error { + sess, err := CreateSession() + if err != nil { + return err + } + + svc := s3.New(sess) + + file, err := os.Open(filepath.Join(filePath, key)) + if err != nil { + return err + } + defer file.Close() + + fileInfo, err := file.Stat() + if err != nil { + return err + } + + objectKey := fmt.Sprintf("%s/%s", prefix, key) + + buffer := make([]byte, fileInfo.Size()) + file.Read(buffer) + fileBytes := bytes.NewReader(buffer) + fileType := http.DetectContentType(buffer) + + _, err = svc.PutObject(&s3.PutObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(objectKey), + Body: fileBytes, + ContentLength: aws.Int64(fileInfo.Size()), + ContentType: aws.String(fileType), + }) + if err != nil { + return err + } + + return nil +} +func DownloadFile(destinationPath, key, bucket, prefix string) error { + + sess, err := CreateSession() + if err != nil { + return err + } + + file, err := os.Create(filepath.Join(destinationPath, key)) + if err != nil { + fmt.Println("Failed to create file", err) + return err + } + defer file.Close() + + objectKey := fmt.Sprintf("%s/%s", prefix, key) + + downloader := s3manager.NewDownloader(sess) + numBytes, err := downloader.Download(file, + &s3.GetObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(objectKey), + }) + if err != nil { + fmt.Println("Failed to download file", err) + return err + } + fmt.Println("Bytes size", numBytes) + Info("Backup downloaded to ", file.Name()) + return nil +} diff --git a/utils/utils.go b/utils/utils.go index 194233b..c2aa642 100644 --- a/utils/utils.go +++ b/utils/utils.go @@ -9,6 +9,7 @@ package utils import ( "fmt" "github.com/spf13/cobra" + "io" "io/fs" "os" ) @@ -46,6 +47,42 @@ func WriteToFile(filePath, content string) error { _, err = file.WriteString(content) return err } +func DeleteFile(filePath string) error { + err := os.Remove(filePath) + if err != nil { + return fmt.Errorf("failed to delete file: %v", err) + } + return nil +} +func CopyFile(src, dst string) error { + // Open the source file for reading + sourceFile, err := os.Open(src) + if err != nil { + return fmt.Errorf("failed to open source file: %v", err) + } + defer sourceFile.Close() + + // Create the destination file + destinationFile, err := os.Create(dst) + if err != nil { + return fmt.Errorf("failed to create destination file: %v", err) + } + defer destinationFile.Close() + + // Copy the content from source to destination + _, err = io.Copy(destinationFile, sourceFile) + if err != nil { + return fmt.Errorf("failed to copy file: %v", err) + } + + // Flush the buffer to ensure all data is written + err = destinationFile.Sync() + if err != nil { + return fmt.Errorf("failed to sync destination file: %v", err) + } + + return nil +} func ChangePermission(filePath string, mod int) { if err := os.Chmod(filePath, fs.FileMode(mod)); err != nil { Fatalf("Error changing permissions of %s: %v\n", filePath, err)