Compare commits

..

52 Commits
v0.3 ... v0.4

Author SHA1 Message Date
a086921242 Merge pull request #40 from jkaninda/develop
refactor: refactoring of code
2024-01-19 06:57:00 +01:00
3525a90b93 refactor: refactoring of code 2024-01-19 06:56:19 +01:00
40f2a2c99d Merge pull request #39 from jkaninda/develop
refactor: clean up code
2024-01-19 06:17:41 +01:00
3537532d5f refactor: clean up code 2024-01-19 06:16:51 +01:00
b98191f586 Merge pull request #38 from jkaninda/develop
docs: update readme.md
2024-01-19 05:45:42 +01:00
502767bb12 docs: update readme.md 2024-01-19 05:45:07 +01:00
b3b4248fa0 Merge pull request #37 from jkaninda/develop
Develop
2024-01-19 05:37:46 +01:00
8de463ad38 docs: update examples 2024-01-19 05:37:16 +01:00
2f0375dee3 docs: update examples 2024-01-19 05:36:40 +01:00
313e4c9525 Merge pull request #36 from jkaninda/develop
refactor: move backup, restore, s3fs tasks in pkg folder
2024-01-19 05:31:57 +01:00
164d8eda77 refactor: move backup, restore, s3fs tasks in pkg folder 2024-01-19 05:31:30 +01:00
80923885c1 Merge pull request #35 from jkaninda/develop
Develop
2024-01-18 19:15:03 +01:00
078d57fc0d Refactoring of code 2024-01-18 19:14:19 +01:00
abd04c0a37 Refactoring of code 2024-01-18 19:13:04 +01:00
8b7d1576a4 Merge pull request #34 from jkaninda/develop
Develop
2024-01-18 15:08:00 +01:00
452d77f5ee chore: update README 2024-01-18 15:07:01 +01:00
4630df0dd6 chore: update README 2024-01-18 15:05:44 +01:00
5d97ec0a4d chore: update README 2024-01-18 15:05:25 +01:00
d9a86ca053 Merge pull request #33 from jkaninda/develop
chore: clean project
2024-01-18 14:47:19 +01:00
380ea59e95 chore: clean project 2024-01-18 14:43:10 +01:00
af037f195c Merge pull request #32 from jkaninda/jkaninda-patch-1
Delete .DS_Store
2024-01-18 14:41:55 +01:00
45d397dfc8 Delete .DS_Store 2024-01-18 14:41:35 +01:00
72db75b9fb Merge pull request #31 from jkaninda/go_migration
Fix: Docker build file outpout path
2024-01-18 14:29:28 +01:00
4b1501c095 Fix: Docker build file outpout path 2024-01-18 14:28:41 +01:00
2823848fd1 Merge pull request #30 from jkaninda/go_migration
Migrate project to Go
2024-01-18 14:20:29 +01:00
d587c18e75 Migrate project to Go 2024-01-18 14:19:27 +01:00
508ca68366 Merge pull request #29 from jkaninda/develop
docs: add recommendation for backup user
2024-01-14 12:35:00 +01:00
a113b40126 docs: add recommendation for backup user 2024-01-14 12:33:44 +01:00
1c2a9fccc6 Merge pull request #28 from jkaninda/develop
Develop
2024-01-11 19:40:20 +01:00
c3f17b3d85 feat: add database backup verification 2024-01-11 19:39:50 +01:00
2a8ad3a6e2 feat: add database backup verification 2024-01-11 19:38:13 +01:00
cc15452ccf Merge pull request #27 from jkaninda/develop
Refactoring
2023-12-27 21:49:12 +01:00
bcfc69e7f9 Refactoring 2023-12-27 21:48:24 +01:00
f3f859ae05 Merge pull request #26 from jkaninda/develop
Add database connection testing before running in scheduled mode
2023-12-27 21:29:02 +01:00
947b9fa888 Add database connection testing before running in scheduled mode 2023-12-27 21:27:24 +01:00
142a2a2dc3 Merge pull request #25 from jkaninda/develop
Update docker compose example files
2023-12-27 07:12:35 +01:00
548ba17bbb Update docker compose example files 2023-12-27 07:10:57 +01:00
b245fe1a11 Merge pull request #24 from jkaninda/develop
Update docker compose example files
2023-12-27 06:59:37 +01:00
0355be59e1 Update docker compose example files 2023-12-27 06:56:06 +01:00
f5bed16951 Merge pull request #23 from jkaninda/develop
Remove volume
2023-12-26 21:54:41 +01:00
25a32823b7 Remove volume 2023-12-26 21:54:04 +01:00
498f8ee545 Merge pull request #22 from jkaninda/jkaninda-patch-1
Delete .DS_Store
2023-12-26 21:43:02 +01:00
ae01fb0edb Delete .DS_Store 2023-12-26 21:42:27 +01:00
1f797e019d Merge pull request #21 from jkaninda/develop
Add deployment example
2023-12-26 21:42:09 +01:00
3ddc00dcbd Add deployment example 2023-12-26 21:41:43 +01:00
3d4a245181 Merge pull request #20 from jkaninda/develop
Add deployment example
2023-12-26 21:39:55 +01:00
df5ec79d85 Add deployment example 2023-12-26 21:39:03 +01:00
53f7a791fa Merge pull request #19 from jkaninda/develop
Update README.md
2023-12-26 21:30:06 +01:00
31d450e218 Update README.md 2023-12-26 21:28:39 +01:00
f21f726464 Merge pull request #18 from jkaninda/develop
Develop
2023-12-25 03:42:01 +01:00
f1e116a38b Update README.md 2023-12-25 03:41:28 +01:00
5dde6feace Update action 2023-12-25 02:32:36 +01:00
25 changed files with 906 additions and 320 deletions

BIN
.DS_Store vendored

Binary file not shown.

View File

@@ -32,8 +32,8 @@ jobs:
uses: docker/build-push-action@v3
with:
push: true
file: "./src/docker/Dockerfile"
file: "./docker/Dockerfile"
platforms: linux/amd64,linux/arm64
tags: |
"${{env.BUILDKIT_IMAGE}}:v0.4"
"${{env.BUILDKIT_IMAGE}}:latest"
"${{env.BUILDKIT_IMAGE}}:v0.3"

5
.gitignore vendored
View File

@@ -4,4 +4,7 @@ data
compose.yaml
.env
test.md
.DS_Store
.DS_Store
mysql-bkup
/.DS_Store
/.idea

21
LICENSE Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2023 Jonas Kaninda
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,10 +1,17 @@
# MySQL Backup
MySQL Backup tool, backup database to S3 or Object Storage
MySQL Backup and Restoration tool. Backup database to AWS S3 storage or any S3 Alternatives for Object Storage.
[![Build](https://github.com/jkaninda/mysql-bkup/actions/workflows/build.yml/badge.svg)](https://github.com/jkaninda/mysql-bkup/actions/workflows/build.yml)
[![Go Report](https://goreportcard.com/badge/github.com/jkaninda/mysql-bkup)](https://goreportcard.com/report/github.com/jkaninda/mysql-bkup)
![Docker Image Size (latest by date)](https://img.shields.io/docker/image-size/jkaninda/mysql-bkup?style=flat-square)
![Docker Pulls](https://img.shields.io/docker/pulls/jkaninda/mysql-bkup?style=flat-square)
<p align="center">
<a href="https://github.com/jkaninda/mysql-bkup">
<img src="https://www.mysql.com/common/logos/logo-mysql-170x115.png" alt="Logo">
</a>
</p>
> Runs on:
- Docker
- Kubernetes
@@ -23,6 +30,12 @@ MySQL Backup tool, backup database to S3 or Object Storage
- local
- s3
- Object storage
## Volumes:
- /s3mnt => S3 mounting path
- /backup => local storage mounting path
## Usage
| Options | Shorts | Usage |
@@ -35,11 +48,34 @@ MySQL Backup tool, backup database to S3 or Object Storage
| --dbname | -d | Set database name |
| --port | -p | Set database port (default: 3306) |
| --mode | -m | Set execution mode. default or scheduled (default: default) |
| --disable-compression | | Disable database backup compression |
| --period | | Set crontab period for scheduled mode only. (default: "0 1 * * *") |
| --timeout | -t | Set timeout (default: 60s) |
| --help | -h | Print this help message and exit |
| --version | -V | Print version information and exit |
## Note:
Creating a user for backup tasks who has read-only access is recommended!
> create read-only user
```sh
mysql -u root -p
```
```sql
CREATE USER read_only_user IDENTIFIED BY 'your_strong_password';
```
```sql
GRANT SELECT, SHOW VIEW ON *.* TO read_only_user;
```
```sql
FLUSH PRIVILEGES;
```
## Backup database :
Simple backup usage
@@ -149,7 +185,7 @@ docker-compose up -d
## Backup to S3
```sh
docker run --rm --privileged --device /dev/fuse --name mysql-bkup -e "DB_HOST=db_hostname" -e "DB_USERNAME=username" -e "DB_PASSWORD=password" -e "ACCESS_KEY=your_access_key" -e "SECRET_KEY=your_secret_key" -e "BUCKETNAME=your_bucket_name" -e "S3_ENDPOINT=https://eu2.contabostorage.com" jkaninda/mysql-bkup bkup -o backup -s s3 -d database_name
docker run --rm --privileged --device /dev/fuse --name mysql-bkup -e "DB_HOST=db_hostname" -e "DB_USERNAME=username" -e "DB_PASSWORD=password" -e "ACCESS_KEY=your_access_key" -e "SECRET_KEY=your_secret_key" -e "BUCKETNAME=your_bucket_name" -e "S3_ENDPOINT=https://s3.us-west-2.amazonaws.com" jkaninda/mysql-bkup bkup -o backup -s s3 -d database_name
```
> To change s3 backup path add this flag : --path /myPath . default path is /mysql_bkup
@@ -311,6 +347,24 @@ spec:
- name: BUCKETNAME
value: ""
- name: S3_ENDPOINT
value: "https://s3.amazonaws.com"
value: "https://s3.us-west-2.amazonaws.com"
restartPolicy: Never
```
```
## Contributing
Contributions are welcome! If you encounter any issues or have suggestions for improvements, please create an issue or submit a pull request.
Make sure to follow the existing coding style and provide tests for your changes.
## License
This project is licensed under the MIT License. See the LICENSE file for details.
## Authors
**Jonas Kaninda**
- <https://github.com/jkaninda>
## Copyright
Copyright (c) [2023] [Jonas Kaninda]

View File

@@ -6,6 +6,9 @@ if [ $# -eq 0 ]
tag=$1
fi
docker build -f src/docker/Dockerfile -t jkaninda/mysql-bkup:$tag .
#go build
CGO_ENABLED=0 GOOS=linux go build
docker compose up -d
docker build -f docker/Dockerfile -t jkaninda/mysql-bkup:$tag .
#docker compose up -d --force-recreate

54
cmd/root.go Normal file
View File

@@ -0,0 +1,54 @@
// Package cmd /*
/*
Copyright © 2024 Jonas Kaninda <jonaskaninda@gmail.com>
*/
package cmd
import (
"os"
"github.com/spf13/cobra"
)
// rootCmd represents the base command when called without any subcommands
var rootCmd = &cobra.Command{
Use: "mysql-bkup",
Short: "MySQL Backup tool, backup database to S3 or Object Storage",
Long: `MySQL Backup and Restoration tool. Backup database to AWS S3 storage or any S3 Alternatives for Object Storage.`,
// Uncomment the following line if your bare application
// has an action associated with it:
// Run: func(cmd *cobra.Command, args []string) { },
}
// Execute adds all child commands to the root command and sets flags appropriately.
// This is called by main.main(). It only needs to happen once to the rootCmd.
func Execute() {
err := rootCmd.Execute()
if err != nil {
os.Exit(1)
}
}
func init() {
// Here you will define your flags and configuration settings.
// Cobra supports persistent flags, which, if defined here,
// will be global for your application.
// rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.mysql-bkup.yaml)")
// Cobra also supports local flags, which will only run
// when this action is called directly.
rootCmd.PersistentFlags().StringP("operation", "o", "backup", "Set operation")
rootCmd.PersistentFlags().StringP("storage", "s", "local", "Set storage. local or s3")
rootCmd.PersistentFlags().StringP("file", "f", "", "Set file name")
rootCmd.PersistentFlags().StringP("path", "P", "/mysql-bkup", "Set s3 path, without file name")
rootCmd.PersistentFlags().StringP("dbname", "d", "", "Set database name")
rootCmd.PersistentFlags().StringP("mode", "m", "default", "Set execution mode. default or scheduled")
rootCmd.PersistentFlags().StringP("period", "", "0 1 * * *", "Set schedule period time")
rootCmd.PersistentFlags().IntP("timeout", "t", 30, "Set timeout")
rootCmd.PersistentFlags().BoolP("disable-compression", "", false, "Disable backup compression")
rootCmd.PersistentFlags().IntP("port", "p", 3306, "Set database port")
rootCmd.PersistentFlags().BoolP("help", "h", false, "Print this help message")
rootCmd.PersistentFlags().BoolP("version", "v", false, "shows version information")
}

48
docker/Dockerfile Normal file
View File

@@ -0,0 +1,48 @@
FROM golang:1.21.0 AS build
WORKDIR /app
# Copy the source code.
COPY . .
# Installs Go dependencies
RUN go mod download
# Build
RUN CGO_ENABLED=0 GOOS=linux go build -o /app/mysql-bkup
FROM ubuntu:24.04
ENV DB_HOST=""
ENV DB_NAME=""
ENV DB_USERNAME=""
ENV DB_PASSWORD=""
ENV DB_PORT="3306"
ENV STORAGE=local
ENV BUCKETNAME=""
ENV ACCESS_KEY=""
ENV SECRET_KEY=""
ENV S3_ENDPOINT=https://s3.amazonaws.com
ARG DEBIAN_FRONTEND=noninteractive
ENV VERSION="v0.4"
LABEL authors="Jonas Kaninda"
RUN apt-get update -qq
#RUN apt-get install build-essential libcurl4-openssl-dev libxml2-dev mime-support -y
RUN apt install s3fs mysql-client supervisor cron -y
# Clear cache
RUN apt-get clean && rm -rf /var/lib/apt/lists/*
RUN mkdir /s3mnt
RUN mkdir /tmp/s3cache
RUN chmod 777 /s3mnt
RUN chmod 777 /tmp/s3cache
COPY --from=build /app/mysql-bkup /usr/local/bin/mysql-bkup
RUN chmod +x /usr/local/bin/mysql-bkup
RUN ln -s /usr/local/bin/mysql-bkup /usr/local/bin/bkup
ADD docker/supervisord.conf /etc/supervisor/supervisord.conf
RUN mkdir /backup
WORKDIR /backup

View File

@@ -0,0 +1,21 @@
version: "3"
services:
mysql-bkup:
image: jkaninda/mysql-bkup
container_name: mysql-bkup
privileged: true
devices:
- "/dev/fuse"
command:
- /bin/sh
- -c
- bkup --operation backup --storage s3 --path /mys3_custom_path --dbname database_name
environment:
- DB_PORT=3306
- DB_HOST=mysqlhost
- DB_USERNAME=userName
- DB_PASSWORD=${DB_PASSWORD}
- ACCESS_KEY=${ACCESS_KEY}
- SECRET_KEY=${SECRET_KEY}
- BUCKETNAME=${BUCKETNAME}
- S3_ENDPOINT=https://s3.us-west-2.amazonaws.com

View File

@@ -0,0 +1,16 @@
version: "3"
services:
mysql-bkup:
image: jkaninda/mysql-bkup
container_name: mysql-bkup
command:
- /bin/sh
- -c
- bkup --operation backup --dbname database_name --mode scheduled --period "0 1 * * *"
volumes:
- ./backup:/backup
environment:
- DB_PORT=3306
- DB_HOST=mysqlhost
- DB_USERNAME=userName
- DB_PASSWORD=${DB_PASSWORD}

View File

@@ -0,0 +1,21 @@
version: "3"
services:
mysql-bkup:
image: jkaninda/mysql-bkup
container_name: mysql-bkup
privileged: true
devices:
- "/dev/fuse"
command:
- /bin/sh
- -c
- bkup --operation backup --storage s3 --path /mys3_custom_path --dbname database_name --mode scheduled --period "0 1 * * *"
environment:
- DB_PORT=3306
- DB_HOST=mysqlhost
- DB_USERNAME=userName
- DB_PASSWORD=${DB_PASSWORD}
- ACCESS_KEY=${ACCESS_KEY}
- SECRET_KEY=${SECRET_KEY}
- BUCKETNAME=${BUCKETNAME}
- S3_ENDPOINT=https://s3.us-west-2.amazonaws.com

View File

@@ -0,0 +1,16 @@
version: "3"
services:
mysql-bkup:
image: jkaninda/mysql-bkup
container_name: mysql-bkup
command:
- /bin/sh
- -c
- bkup --operation backup --dbname database_name
volumes:
- ./backup:/backup
environment:
- DB_PORT=3306
- DB_HOST=mysqlhost
- DB_USERNAME=userName
- DB_PASSWORD=${DB_PASSWORD}

View File

@@ -16,7 +16,7 @@ spec:
command:
- /bin/sh
- -c
- bkup -o backup -s s3 --path /custom_path
- bkup --operation backup --storage s3 --path /custom_path
env:
- name: DB_PORT
value: "3306"
@@ -36,5 +36,5 @@ spec:
- name: BUCKETNAME
value: ""
- name: S3_ENDPOINT
value: "https://s3.amazonaws.com"
value: "https://s3.us-west-2.amazonaws.com"
restartPolicy: Never

10
go.mod Normal file
View File

@@ -0,0 +1,10 @@
module github.com/jkaninda/mysql-bkup
go 1.21.0
require github.com/spf13/pflag v1.0.5
require (
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/spf13/cobra v1.8.0 // indirect
)

10
go.sum Normal file
View File

@@ -0,0 +1,10 @@
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0=
github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

222
main.go Normal file
View File

@@ -0,0 +1,222 @@
package main
/*****
* MySQL Backup & Restore
* @author Jonas Kaninda
* @license MIT License <https://opensource.org/licenses/MIT>
* @link https://github.com/jkaninda/mysql-bkup
**/
import (
"fmt"
"github.com/jkaninda/mysql-bkup/pkg"
"github.com/jkaninda/mysql-bkup/utils"
flag "github.com/spf13/pflag"
"os"
"os/exec"
)
var appVersion string = os.Getenv("VERSION")
const s3MountPath string = "/s3mnt"
var (
operation string = "backup"
storage string = "local"
file string = ""
s3Path string = "/mysql-bkup"
dbName string = ""
dbHost string = ""
dbPort string = ""
dbPassword string = ""
dbUserName string = ""
executionMode string = "default"
storagePath string = "/backup"
accessKey string = ""
secretKey string = ""
bucketName string = ""
s3Endpoint string = ""
s3fsPasswdFile string = "/etc/passwd-s3fs"
disableCompression bool = false
startBackup bool = true
timeout int = 30
period string = "0 1 * * *"
)
func init() {
var (
operationFlag = flag.StringP("operation", "o", "backup", "Operation")
storageFlag = flag.StringP("storage", "s", "local", "Storage, local or s3")
fileFlag = flag.StringP("file", "f", "", "File name")
pathFlag = flag.StringP("path", "P", "/mysql-bkup", "S3 path, without file name")
dbnameFlag = flag.StringP("dbname", "d", "", "Database name")
modeFlag = flag.StringP("mode", "m", "default", "Execution mode. default or scheduled")
periodFlag = flag.StringP("period", "", "0 1 * * *", "Schedule period time")
timeoutFlag = flag.IntP("timeout", "t", 30, "Timeout (in seconds) to stop database connexion")
disableCompressionFlag = flag.BoolP("disable-compression", "", false, "Disable backup compression")
portFlag = flag.IntP("port", "p", 3306, "Database port")
helpFlag = flag.BoolP("help", "h", false, "Print this help message")
versionFlag = flag.BoolP("version", "v", false, "Version information")
)
flag.Parse()
operation = *operationFlag
storage = *storageFlag
file = *fileFlag
s3Path = *pathFlag
dbName = *dbnameFlag
executionMode = *modeFlag
dbPort = fmt.Sprint(*portFlag)
timeout = *timeoutFlag
period = *periodFlag
disableCompression = *disableCompressionFlag
flag.Usage = func() {
fmt.Print("MySQL BackupDatabase and Restoration tool. BackupDatabase database to AWS S3 storage or any S3 Alternatives for Object Storage.\n\n")
fmt.Print("Usage: bkup --operation backup -storage s3 --dbname databasename --path /my_path ...\n")
fmt.Print(" bkup -o backup -d databasename --disable-compression ...\n")
fmt.Print(" RestoreDatabase: bkup -o restore -d databasename -f db_20231217_051339.sql.gz ...\n\n")
flag.PrintDefaults()
}
if *helpFlag {
startBackup = false
flag.Usage()
os.Exit(0)
}
if *versionFlag {
startBackup = false
version()
os.Exit(0)
}
if *dbnameFlag != "" {
err := os.Setenv("DB_NAME", dbName)
if err != nil {
return
}
}
if *pathFlag != "" {
s3Path = *pathFlag
err := os.Setenv("S3_PATH", fmt.Sprint(*pathFlag))
if err != nil {
return
}
}
if *fileFlag != "" {
file = *fileFlag
err := os.Setenv("FILE_NAME", fmt.Sprint(*fileFlag))
if err != nil {
return
}
}
if *portFlag != 3306 {
err := os.Setenv("DB_PORT", fmt.Sprint(*portFlag))
if err != nil {
return
}
}
if *periodFlag != "" {
err := os.Setenv("SCHEDULE_PERIOD", fmt.Sprint(*periodFlag))
if err != nil {
return
}
}
if *storageFlag != "" {
err := os.Setenv("STORAGE", fmt.Sprint(*storageFlag))
if err != nil {
return
}
}
storage = os.Getenv("STORAGE")
err := os.Setenv("STORAGE_PATH", storagePath)
if err != nil {
return
}
}
func version() {
fmt.Printf("Version: %s \n", appVersion)
fmt.Print()
}
func main() {
//cmd.Execute()
err := os.Setenv("STORAGE_PATH", storagePath)
if err != nil {
return
}
if startBackup {
start()
}
}
func start() {
if executionMode == "default" {
if operation != "backup" {
if storage != "s3" {
utils.Info("RestoreDatabase from local")
pkg.RestoreDatabase(file)
} else {
utils.Info("RestoreDatabase from s3")
s3Restore()
}
} else {
if storage != "s3" {
utils.Info("BackupDatabase to local storage")
pkg.BackupDatabase(disableCompression)
} else {
utils.Info("BackupDatabase to s3 storage")
s3Backup()
}
}
} else if executionMode == "scheduled" {
scheduledMode()
} else {
utils.Fatal("Error, unknown execution mode!")
}
}
func s3Backup() {
// Backup Database to S3 storage
pkg.MountS3Storage(s3Path)
pkg.BackupDatabase(disableCompression)
}
// Run in scheduled mode
func scheduledMode() {
// Verify operation
if operation == "backup" {
fmt.Println()
fmt.Println("**********************************")
fmt.Println(" Starting MySQL Bkup... ")
fmt.Println("***********************************")
utils.Info("Running in Scheduled mode")
utils.Info("Log file in /var/log/mysql-bkup.log")
utils.Info("Execution period ", os.Getenv("SCHEDULE_PERIOD"))
//Test database connexion
utils.TestDatabaseConnection()
utils.Info("Creating backup job...")
pkg.CreateCrontabScript(disableCompression, storage)
supervisordCmd := exec.Command("supervisord", "-c", "/etc/supervisor/supervisord.conf")
if err := supervisordCmd.Run(); err != nil {
utils.Fatalf("Error starting supervisord: %v\n", err)
}
} else {
utils.Fatal("Scheduled mode supports only backup operation")
}
}
func s3Restore() {
// Restore database from S3
pkg.MountS3Storage(s3Path)
pkg.RestoreDatabase(file)
}

103
pkg/backup.go Normal file
View File

@@ -0,0 +1,103 @@
// Package pkg /*
/*
Copyright © 2024 Jonas Kaninda <jonaskaninda.gmail.com>
*/
package pkg
import (
"fmt"
"github.com/jkaninda/mysql-bkup/utils"
"log"
"os"
"os/exec"
"time"
)
var (
dbName = ""
dbHost = ""
dbPort = ""
dbPassword = ""
dbUserName = ""
storagePath = "/backup"
)
// BackupDatabase backup database
func BackupDatabase(disableCompression bool) {
dbHost = os.Getenv("DB_HOST")
dbPassword = os.Getenv("DB_PASSWORD")
dbUserName = os.Getenv("DB_USERNAME")
dbName = os.Getenv("DB_NAME")
dbPort = os.Getenv("DB_PORT")
storagePath = os.Getenv("STORAGE_PATH")
if os.Getenv("DB_HOST") == "" || os.Getenv("DB_NAME") == "" || os.Getenv("DB_USERNAME") == "" || os.Getenv("DB_PASSWORD") == "" {
utils.Fatal("Please make sure all required environment variables for database are set")
} else {
utils.TestDatabaseConnection()
// Backup Database database
utils.Info("Backing up database...")
bkFileName := fmt.Sprintf("%s_%s.sql.gz", dbName, time.Now().Format("20060102_150405"))
if disableCompression {
bkFileName = fmt.Sprintf("%s_%s.sql", dbName, time.Now().Format("20060102_150405"))
cmd := exec.Command("mysqldump",
"-h", dbHost,
"-P", dbPort,
"-u", dbUserName,
"--password="+dbPassword,
dbName,
)
output, err := cmd.Output()
if err != nil {
log.Fatal(err)
}
file, err := os.Create(fmt.Sprintf("%s/%s", storagePath, bkFileName))
if err != nil {
log.Fatal(err)
}
defer file.Close()
_, err = file.Write(output)
if err != nil {
log.Fatal(err)
}
utils.Info("Database has been backed up")
} else {
cmd := exec.Command("mysqldump", "-h", dbHost, "-P", dbPort, "-u", dbUserName, "--password="+dbPassword, dbName)
stdout, err := cmd.StdoutPipe()
if err != nil {
utils.Info("Mysql")
log.Fatal(err)
}
gzipCmd := exec.Command("gzip")
gzipCmd.Stdin = stdout
gzipCmd.Stdout, err = os.Create(fmt.Sprintf("%s/%s", storagePath, bkFileName))
gzipCmd.Start()
if err != nil {
log.Fatal(err)
}
if err := cmd.Run(); err != nil {
log.Fatal(err)
}
if err := gzipCmd.Wait(); err != nil {
log.Fatal(err)
}
utils.Info("Database has been backed up")
}
historyFile, err := os.OpenFile(fmt.Sprintf("%s/history.txt", storagePath), os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
log.Fatal(err)
}
defer historyFile.Close()
if _, err := historyFile.WriteString(bkFileName + "\n"); err != nil {
log.Fatal(err)
}
}
}

56
pkg/restore.go Normal file
View File

@@ -0,0 +1,56 @@
package pkg
import (
"fmt"
"github.com/jkaninda/mysql-bkup/utils"
"os"
"os/exec"
"path/filepath"
)
// RestoreDatabase restore database
func RestoreDatabase(file string) {
dbHost = os.Getenv("DB_HOST")
dbPassword = os.Getenv("DB_PASSWORD")
dbUserName = os.Getenv("DB_USERNAME")
dbName = os.Getenv("DB_NAME")
dbPort = os.Getenv("DB_PORT")
storagePath = os.Getenv("STORAGE_PATH")
if os.Getenv("DB_HOST") == "" || os.Getenv("DB_NAME") == "" || os.Getenv("DB_USERNAME") == "" || os.Getenv("DB_PASSWORD") == "" || file == "" {
utils.Fatal("Please make sure all required environment variables are set")
} else {
if utils.FileExists(fmt.Sprintf("%s/%s", storagePath, file)) {
utils.TestDatabaseConnection()
extension := filepath.Ext(fmt.Sprintf("%s/%s", storagePath, file))
// GZ compressed file
if extension == ".gz" {
str := "zcat " + fmt.Sprintf("%s/%s", storagePath, file) + " | mysql -h " + os.Getenv("DB_HOST") + " -P " + os.Getenv("DB_PORT") + " -u " + os.Getenv("DB_USERNAME") + " --password=" + os.Getenv("DB_PASSWORD") + " " + os.Getenv("DB_NAME")
_, err := exec.Command("bash", "-c", str).Output()
if err != nil {
utils.Fatal("Error, in restoring the database")
}
utils.Info("Database has been restored")
} else if extension == ".sql" {
//SQL file
str := "cat " + fmt.Sprintf("%s/%s", storagePath, file) + " | mysql -h " + os.Getenv("DB_HOST") + " -P " + os.Getenv("DB_PORT") + " -u " + os.Getenv("DB_USERNAME") + " --password=" + os.Getenv("DB_PASSWORD") + " " + os.Getenv("DB_NAME")
_, err := exec.Command("bash", "-c", str).Output()
if err != nil {
utils.Fatal("Error, in restoring the database", err)
}
utils.Info("Database has been restored")
} else {
utils.Fatal("Unknown file extension ", extension)
}
} else {
utils.Fatal("File not found in ", fmt.Sprintf("%s/%s", storagePath, file))
}
}
}

74
pkg/s3fs.go Normal file
View File

@@ -0,0 +1,74 @@
// Package pkg /*
/*
Copyright © 2024 Jonas Kaninda <jonaskaninda.gmail.com>
*/
package pkg
import (
"fmt"
"github.com/jkaninda/mysql-bkup/utils"
"os"
"os/exec"
)
const s3MountPath string = "/s3mnt"
const s3fsPasswdFile string = "/etc/passwd-s3fs"
var (
accessKey = ""
secretKey = ""
bucketName = ""
s3Endpoint = ""
)
// MountS3Storage Mount s3 storage using s3fs
func MountS3Storage(s3Path string) {
accessKey = os.Getenv("ACCESS_KEY")
secretKey = os.Getenv("SECRET_KEY")
bucketName = os.Getenv("BUCKETNAME")
s3Endpoint = os.Getenv("S3_ENDPOINT")
if accessKey == "" || secretKey == "" || bucketName == "" {
utils.Fatal("Please make sure all environment variables are set")
} else {
storagePath := fmt.Sprintf("%s%s", s3MountPath, s3Path)
err := os.Setenv("STORAGE_PATH", storagePath)
if err != nil {
return
}
//Write file
err = utils.WriteToFile(s3fsPasswdFile, fmt.Sprintf("%s:%s", accessKey, secretKey))
if err != nil {
utils.Fatal("Error creating file")
}
//Change file permission
utils.ChangePermission(s3fsPasswdFile, 0600)
utils.Info("Mounting Object storage in", s3MountPath)
if isEmpty, _ := utils.IsDirEmpty(s3MountPath); isEmpty {
cmd := exec.Command("s3fs", bucketName, s3MountPath,
"-o", "passwd_file="+s3fsPasswdFile,
"-o", "use_cache=/tmp/s3cache",
"-o", "allow_other",
"-o", "url="+s3Endpoint,
"-o", "use_path_request_style",
)
if err := cmd.Run(); err != nil {
utils.Fatal("Error mounting Object storage:", err)
}
if err := os.MkdirAll(storagePath, os.ModePerm); err != nil {
utils.Fatalf("Error creating directory %v %v", storagePath, err)
}
} else {
utils.Info("Object storage already mounted in " + s3MountPath)
if err := os.MkdirAll(storagePath, os.ModePerm); err != nil {
utils.Fatal("Error creating directory "+storagePath, err)
}
}
}
}

76
pkg/scripts.go Normal file
View File

@@ -0,0 +1,76 @@
package pkg
// Package pkg /*
/*
Copyright © 2024 Jonas Kaninda <jonaskaninda.gmail.com>
*/
import (
"fmt"
"github.com/jkaninda/mysql-bkup/utils"
"os"
"os/exec"
)
const cronLogFile = "/var/log/mysql-bkup.log"
const backupCronFile = "/usr/local/bin/backup_cron.sh"
func CreateCrontabScript(disableCompression bool, storage string) {
//task := "/usr/local/bin/backup_cron.sh"
touchCmd := exec.Command("touch", backupCronFile)
if err := touchCmd.Run(); err != nil {
utils.Fatalf("Error creating file %s: %v\n", backupCronFile, err)
}
var disableC = ""
if disableCompression {
disableC = "--disable-compression"
}
var scriptContent string
if storage == "s3" {
scriptContent = fmt.Sprintf(`#!/usr/bin/env bash
set -e
bkup --operation backup --dbname %s --port %s --storage s3 --path %s %v
`, os.Getenv("DB_NAME"), os.Getenv("DB_PORT"), os.Getenv("S3_PATH"), disableC)
} else {
scriptContent = fmt.Sprintf(`#!/usr/bin/env bash
set -e
bkup --operation backup --dbname %s --port %s %v
`, os.Getenv("DB_NAME"), os.Getenv("DB_PORT"), disableC)
}
if err := utils.WriteToFile(backupCronFile, scriptContent); err != nil {
utils.Fatalf("Error writing to %s: %v\n", backupCronFile, err)
}
chmodCmd := exec.Command("chmod", "+x", "/usr/local/bin/backup_cron.sh")
if err := chmodCmd.Run(); err != nil {
utils.Fatalf("Error changing permissions of %s: %v\n", backupCronFile, err)
}
lnCmd := exec.Command("ln", "-s", "/usr/local/bin/backup_cron.sh", "/usr/local/bin/backup_cron")
if err := lnCmd.Run(); err != nil {
utils.Fatalf("Error creating symbolic link: %v\n", err)
}
cronJob := "/etc/cron.d/backup_cron"
touchCronCmd := exec.Command("touch", cronJob)
if err := touchCronCmd.Run(); err != nil {
utils.Fatalf("Error creating file %s: %v\n", cronJob, err)
}
cronContent := fmt.Sprintf(`%s root exec /bin/bash -c ". /run/supervisord.env; /usr/local/bin/backup_cron.sh >> %s"
`, os.Getenv("SCHEDULE_PERIOD"), cronLogFile)
if err := utils.WriteToFile(cronJob, cronContent); err != nil {
utils.Fatalf("Error writing to %s: %v\n", cronJob, err)
}
utils.ChangePermission("/etc/cron.d/backup_cron", 0644)
crontabCmd := exec.Command("crontab", "/etc/cron.d/backup_cron")
if err := crontabCmd.Run(); err != nil {
utils.Fatal("Error updating crontab: ", err)
}
utils.Info("Starting backup in scheduled mode")
}

View File

@@ -5,4 +5,4 @@ DB_HOST='db_hostname'
DB_NAME='db_name'
BACKUP_DIR="$PWD/backup"
docker run --rm --name mysql-bkup -v $BACKUP_DIR:/backup/ -e "DB_HOST=$DB_HOST" -e "DB_USERNAME=$DB_USERNAME" -e "DB_PASSWORD=$DB_PASSWORD" jkaninda/mysql-bkup:latest bkup -o backup -db $DB_NAME
docker run --rm --name mysql-bkup -v $BACKUP_DIR:/backup/ -e "DB_HOST=$DB_HOST" -e "DB_USERNAME=$DB_USERNAME" -e "DB_PASSWORD=$DB_PASSWORD" jkaninda/mysql-bkup:latest bkup -o backup -d $DB_NAME

View File

@@ -1,36 +0,0 @@
FROM ubuntu:24.04
ENV DB_HOST=""
ENV DB_NAME=""
ENV DB_USERNAME=""
ENV DB_PASSWORD=""
ENV DB_PORT="3306"
ENV STORAGE=local
ENV BUCKETNAME=""
ENV ACCESS_KEY=""
ENV SECRET_KEY=""
ENV S3_ENDPOINT=https://s3.amazonaws.com
ARG DEBIAN_FRONTEND=noninteractive
ENV VERSION="v0.3"
RUN apt-get update -qq
RUN apt-get install build-essential libcurl4-openssl-dev libxml2-dev mime-support -y
RUN apt install s3fs mysql-client supervisor cron -y
# Clear cache
RUN apt-get clean && rm -rf /var/lib/apt/lists/*
RUN mkdir /s3mnt
RUN mkdir /tmp/s3cache
RUN chmod 777 /s3mnt
RUN chmod 777 /tmp/s3cache
COPY src/mysql_bkup.sh /usr/local/bin/
RUN chmod +x /usr/local/bin/mysql_bkup.sh
ADD src/supervisord.conf /etc/supervisor/supervisord.conf
RUN ln -s /usr/local/bin/mysql_bkup.sh /usr/local/bin/mysql_bkup
RUN ln -s /usr/local/bin/mysql_bkup.sh /usr/local/bin/bkup
RUN mkdir /backup
WORKDIR /backup

View File

@@ -1,272 +0,0 @@
#!/usr/bin/env bash
set -e
TIME=$(date +%Y%m%d_%H%M%S)
MY_SQL_DUMP=/usr/bin/mysqldump
arg0=$(basename "$0" .sh)
blnk=$(echo "$arg0" | sed 's/./ /g')
export OPERATION=backup
export STORAGE=local
export STORAGE_PATH=/backup
export S3_PATH=/mysql-bkup
export TIMEOUT=60
export EXECUTION_MODE="default"
export SCHEDULE_PERIOD="0 1 * * *"
export FILE_COMPRESION=true
usage_info()
{
echo "Usage: \\"
echo " $blnk Backup: mysql_bkup -o backup -d s3 \\"
echo " $blnk Restore: mysql_bkup -o restore -s s3 -f my_db.sql \\"
echo " $blnk [-o|--operation] [{-f|--file} ] [{-s|--storage} ] [{-h|--help} ] \\"
}
version_info()
{
echo "Version: $VERSION"
exit 0
}
usage()
{
exec 1>2 # Send standard output to standard error
usage_info
exit 0
}
error()
{
echo "$arg0: $*" >&2
exit 0
}
help()
{
echo
echo " -o |--operation -- Set operation (default: backup)"
echo " -s |--storage -- Set storage (default: local)"
echo " -f |--file -- Set file name "
echo " |--path -- Set s3 path, without file name"
echo " -d |--dbname -- Set database name "
echo " -p |--port -- Set database port (default: 3306)"
echo " -m |--mode -- Set execution mode (default: default)"
echo " |--period -- Set schedule period time (default: '0 1 * * *')"
echo " -t |--timeout -- Set timeout (default: 120s)"
echo " -h |--help -- Print this help message and exit"
echo " -V |--version -- Print version information and exit"
exit 0
}
flags()
{
while test $# -gt 0
do
case "$1" in
(-o|--operation)
shift
[ $# = 0 ] && error "No operation specified - restore or backup"
export OPERATION="$1"
shift;;
(-d|--dbname)
shift
[ $# = 0 ] && error "No database name specified"
export DB_NAME="$1"
shift;;
(-s|--storage)
shift
[ $# = 0 ] && error "No storage specified - local or s3 | default local"
export STORAGE="$1"
shift;;
(-f|--file)
shift
[ $# = 0 ] && error "No file specified - file to restore"
export FILE_NAME="$1"
shift;;
(--path)
shift
[ $# = 0 ] && error "No s3 path specified - s3 path without file name"
export S3_PATH="$1"
shift;;
(-db|--database)
shift
[ $# = 0 ] && error "No database name specified"
export DB_NAME="$1"
shift;;
(-p|--port)
shift
[ $# = 0 ] && error "No database name specified"
export DB_PORT="$1"
shift;;
(-m|--mode)
shift
[ $# = 0 ] && error "No execution mode specified"
export EXECUTION_MODE="$1"
shift;;
(--period)
shift
[ $# = 0 ] && error "No schedule period entered"
export SCHEDULE_PERIOD="$1"
shift;;
(-t|--timeout)
shift
[ $# = 0 ] && error "No timeout specified"
export TIMEOUT="$1"
shift;;
(-h|--help)
help;;
(-V|--version)
version_info;;
(--)
help;;
(*) usage;;
esac
done
}
backup()
{
if [[ -z $DB_HOST ]] || [[ -z $DB_NAME ]] || [[ -z $DB_USERNAME ]] || [[ -z $DB_PASSWORD ]]; then
echo "Please make sure all required environment variables are set "
else
## Test database connection
mysql -h ${DB_HOST} -P ${DB_PORT} -u ${DB_USERNAME} --password=${DB_PASSWORD} ${DB_NAME} -e"quit"
## Backup database
mysqldump -h ${DB_HOST} -P ${DB_PORT} -u ${DB_USERNAME} --password=${DB_PASSWORD} ${DB_NAME} | gzip > ${STORAGE_PATH}/${DB_NAME}_${TIME}.sql.gz
echo "$TIME: ${DB_NAME}_${TIME}.sql.gz" | tee -a "${STORAGE_PATH}/history.txt"
echo "Database has been saved"
fi
exit 0
}
restore()
{
if [[ -z $DB_HOST ]] || [[ -z $DB_NAME ]] || [[ -z $DB_USERNAME ]] || [[ -z $DB_PASSWORD ]]; then
echo "Please make sure all required environment variables are set "
else
## Restore database
if [ -f "${STORAGE_PATH}/$FILE_NAME" ]; then
if gzip -t ${STORAGE_PATH}/$FILE_NAME; then
zcat ${STORAGE_PATH}/${FILE_NAME} | mysql -h ${DB_HOST} -P ${DB_PORT} -u ${DB_USERNAME} --password=${DB_PASSWORD} ${DB_NAME}
else
cat ${STORAGE_PATH}/${FILE_NAME} | mysql -h ${DB_HOST} -P ${DB_PORT} -u ${DB_USERNAME} --password=${DB_PASSWORD} ${DB_NAME}
fi
echo "Database has been restored"
else
echo "Error, file not found in ${STORAGE_PATH}/${FILE_NAME}"
fi
fi
exit
}
s3_backup()
{
mount_s3
backup
}
s3_restore()
{
mount_s3
restore
}
mount_s3()
{
if [[ -z $ACCESS_KEY ]] || [[ -z $SECRET_KEY ]]; then
echo "Please make sure all environment variables are set "
echo "BUCKETNAME=$BUCKETNAME \nACCESS_KEY=$nACCESS_KEY \nSECRET_KEY=$SECRET_KEY"
else
echo "$ACCESS_KEY:$SECRET_KEY" | tee /etc/passwd-s3fs
chmod 600 /etc/passwd-s3fs
echo "Mounting Object storage in /s3mnt .... "
if [ -z "$(ls -A /s3mnt)" ]; then
s3fs $BUCKETNAME /s3mnt -o passwd_file=/etc/passwd-s3fs -o use_cache=/tmp/s3cache -o allow_other -o url=$S3_ENDPOINT -o use_path_request_style
if [ ! -d "/s3mnt$S3_PATH" ]; then
mkdir -p /s3mnt$S3_PATH
fi
else
echo "Object storage already mounted in /s3mnt"
fi
export STORAGE_PATH=/s3mnt$S3_PATH
fi
}
create_crontab_script()
{
TASK=/usr/local/bin/backup_cron.sh
touch $TASK
if [ $STORAGE == 's3' ]
then
cat > "$TASK" <<EOF
#!/usr/bin/env bash
set -e
bkup --operation backup --dbname $DB_NAME --port $DB_PORT --storage s3 --path $S3_PATH
EOF
else
cat > "$TASK" <<EOF
#!/usr/bin/env bash
set -e
bkup --operation backup --dbname $DB_NAME --port $DB_PORT
EOF
fi
chmod +x /usr/local/bin/backup_cron.sh
ln -s /usr/local/bin/backup_cron.sh /usr/local/bin/backup_cron
## Create crontab job
CRON_JOB=/etc/cron.d/backup_cron
touch $CRON_JOB
cat > "$CRON_JOB" <<EOF
$SCHEDULE_PERIOD root exec /bin/bash -c ". /run/supervisord.env; /usr/local/bin/backup_cron.sh >> /var/log/mysql-bkup.log"
EOF
chmod 0644 /etc/cron.d/*
crontab /etc/cron.d/backup_cron
}
scheduled_mode()
{
if [ $OPERATION == 'backup' ]
then
create_crontab_script
echo ""
echo "**********************************"
echo " Starting MySQL Bkup... "
echo "***********************************"
echo "Running in Scheduled mode"
echo "Log file in /var/log/mysql-bkup.log"
echo "Execution period $SCHEDULE_PERIOD"
supervisord -c /etc/supervisor/supervisord.conf
else
echo "Scheduled mode supports only backup operation"
exit 1
fi
}
flags "$@"
# ?
if [ $EXECUTION_MODE == 'default' ]
then
if [ $OPERATION != 'backup' ]
then
if [ $STORAGE != 's3' ]
then
echo "Restore from local"
restore
else
echo "Restore from s3"
s3_restore
fi
else
if [ $STORAGE != 's3' ]
then
echo "Backup to local destination"
backup
else
echo "Backup to s3 storage"
s3_backup
fi
fi
elif [ $EXECUTION_MODE == 'scheduled' ]
then
scheduled_mode
else
echo "Error, unknow execution mode!"
exit 1
fi

86
utils/utils.go Normal file
View File

@@ -0,0 +1,86 @@
package utils
/*****
* MySQL Backup & Restore
* @author Jonas Kaninda
* @license MIT License <https://opensource.org/licenses/MIT>
* @link https://github.com/jkaninda/mysql-bkup
**/
import (
"fmt"
"io/fs"
"os"
"os/exec"
)
func Info(v ...any) {
fmt.Println("[INFO] ", fmt.Sprint(v...))
}
func Infof(msg string, v ...any) {
fmt.Printf("[INFO] "+msg, v...)
}
func Warning(message string) {
fmt.Println("[WARNING]", message)
}
func Warningf(msg string, v ...any) {
fmt.Printf("[WARNING] "+msg, v...)
}
func Fatal(v ...any) {
fmt.Println("[ERROR] ", fmt.Sprint(v...))
os.Exit(1)
}
func Fatalf(msg string, v ...any) {
fmt.Printf("[ERROR] "+msg, v...)
os.Exit(1)
}
func FileExists(filename string) bool {
info, err := os.Stat(filename)
if os.IsNotExist(err) {
return false
}
return !info.IsDir()
}
func WriteToFile(filePath, content string) error {
file, err := os.Create(filePath)
if err != nil {
return err
}
defer file.Close()
_, err = file.WriteString(content)
return err
}
func ChangePermission(filePath string, mod int) {
if err := os.Chmod(filePath, fs.FileMode(mod)); err != nil {
Fatalf("Error changing permissions of %s: %v\n", filePath, err)
}
}
func IsDirEmpty(name string) (bool, error) {
f, err := os.Open(name)
if err != nil {
return false, err
}
defer f.Close()
_, err = f.Readdirnames(1)
if err == nil {
return false, nil
}
return true, nil
}
// TestDatabaseConnection tests the database connection
func TestDatabaseConnection() {
Info("Testing database connection...")
// Test database connection
cmd := exec.Command("mysql", "-h", os.Getenv("DB_HOST"), "-P", os.Getenv("DB_PORT"), "-u", os.Getenv("DB_USERNAME"), "--password="+os.Getenv("DB_PASSWORD"), os.Getenv("DB_NAME"), "-e", "quit")
err := cmd.Run()
if err != nil {
Fatal("Error testing database connection:", err)
}
}