Compare commits

...

20 Commits
v0.1.1 ... main

Author SHA1 Message Date
IamTheFij 4b16dea34e Log when attempting to push metrics to gateway
continuous-integration/drone/push Build is passing Details
continuous-integration/drone/tag Build is passing Details
2024-01-23 12:26:10 -08:00
IamTheFij 7b313b8f9b Move log line to the proper location
continuous-integration/drone/push Build is passing Details
2024-01-22 08:50:49 -08:00
IamTheFij bea338c27a Add log when tasks are stopped
continuous-integration/drone/push Build is passing Details
2024-01-22 08:49:46 -08:00
IamTheFij 1d0d6b3fe6 More loosly pin tzdata
continuous-integration/drone/push Build is passing Details
continuous-integration/drone/tag Build is passing Details
2024-01-06 15:34:02 -08:00
IamTheFij 985572d737 Pin pre-commit plugin to specific sha
continuous-integration/drone/push Build is failing Details
continuous-integration/drone/tag Build is failing Details
2024-01-06 15:12:27 -08:00
IamTheFij cff06cd1c6 Update linters and fumpt all files
continuous-integration/drone/push Build was killed Details
2024-01-06 15:10:29 -08:00
IamTheFij 90cd0ec9e0 Update versions to fix golangci-lint on drone
continuous-integration/drone/push Build is failing Details
2024-01-06 14:55:44 -08:00
IamTheFij a0db27be1a Add ability to unlock repos that may have stale locks
continuous-integration/drone/push Build is failing Details
continuous-integration/drone/tag Build is failing Details
Defaults to remove all locks, even non-stale
2024-01-06 14:29:14 -08:00
IamTheFij cddc290ee0 Fix exhaustive structs 2024-01-06 14:28:52 -08:00
IamTheFij d049228980 Fix index out of range when reading snapshots list
continuous-integration/drone/push Build was killed Details
continuous-integration/drone/tag Build is passing Details
2023-11-07 11:03:18 -08:00
IamTheFij 390074e048 Add insecure-tls global option
continuous-integration/drone/push Build is passing Details
continuous-integration/drone/tag Build is passing Details
2023-11-06 15:02:07 -08:00
IamTheFij e0542a68e5 Add docker healthcheck checking default address
continuous-integration/drone/push Build is passing Details
2023-10-25 20:17:18 -07:00
IamTheFij 4ddcea9f7d Bump to alpine 3.18 and bump versions
continuous-integration/drone/push Build is passing Details
Also, more loosely pinning some dependencies with stable apis.
2023-10-25 20:16:26 -07:00
IamTheFij a2823e09ad Add tzdata
continuous-integration/drone/push Build is passing Details
Allows setting container timezone using TZ env variable
2023-08-11 05:53:17 -07:00
IamTheFij b1fe2537e2 Add postgres support for backup and restore
continuous-integration/drone/push Build is passing Details
continuous-integration/drone/tag Build is passing Details
2023-08-02 15:58:41 -07:00
IamTheFij f3ecabf4fe Fix mysql test
continuous-integration/drone/push Build is passing Details
continuous-integration/drone/tag Build is passing Details
2023-08-02 15:56:06 -07:00
IamTheFij 4c7baa46a7 Ignore binary
continuous-integration/drone/push Build is passing Details
2023-08-02 15:41:15 -07:00
IamTheFij 1d6957f45f Fix mysql restoration
Password and database weren't passed to the mysql command
2023-08-02 15:41:15 -07:00
IamTheFij 0de267a4cf Disable depguard 2023-08-02 14:58:18 -07:00
IamTheFij 28f081c8d0 Add integration testing to verify backup and restoration
continuous-integration/drone/push Build is passing Details
Including databases
2023-08-02 14:55:14 -07:00
21 changed files with 546 additions and 69 deletions

View File

@ -4,7 +4,7 @@ name: test
steps: steps:
- name: test - name: test
image: golang:1.20 image: golang:1.21
environment: environment:
VERSION: ${DRONE_TAG:-${DRONE_COMMIT}} VERSION: ${DRONE_TAG:-${DRONE_COMMIT}}
commands: commands:
@ -13,7 +13,7 @@ steps:
- make test - make test
- name: check - name: check
image: iamthefij/drone-pre-commit:personal image: iamthefij/drone-pre-commit@sha256:30fa17489b86d7a4c3ad9c3ce2e152c25d82b8671e5609d322c6cae0baed89cd
--- ---
kind: pipeline kind: pipeline
@ -32,7 +32,7 @@ trigger:
steps: steps:
- name: build all binaries - name: build all binaries
image: golang:1.17 image: golang:1.21
environment: environment:
VERSION: ${DRONE_TAG:-${DRONE_COMMIT}} VERSION: ${DRONE_TAG:-${DRONE_COMMIT}}
commands: commands:

5
.gitignore vendored
View File

@ -20,4 +20,9 @@ dist/
# Built executable # Built executable
restic-scheduler restic-scheduler
resticscheduler
data/ data/
# Itest temp dirs
itest/data
itest/repo

View File

@ -16,7 +16,6 @@ linters:
- contextcheck - contextcheck
- cyclop - cyclop
- decorder - decorder
- depguard
- dupl - dupl
- durationcheck - durationcheck
- errchkjson - errchkjson
@ -32,10 +31,8 @@ linters:
- gocognit - gocognit
- goconst - goconst
- gocritic - gocritic
# - gocyclo # Using cyclop
- godot - godot
# - goerr113 # Using errorlint - gofumpt
- gofmt
- goheader - goheader
- goimports - goimports
- gomnd - gomnd
@ -45,7 +42,6 @@ linters:
- gosec - gosec
- grouper - grouper
- importas - importas
# - ireturn
- lll - lll
- maintidx - maintidx
- makezero - makezero
@ -60,11 +56,9 @@ linters:
- paralleltest - paralleltest
- prealloc - prealloc
- predeclared - predeclared
# - promlinter # Not common enough
- revive - revive
- rowserrcheck - rowserrcheck
- sqlclosecheck - sqlclosecheck
# - stylecheck # Using revive
- tagliatelle - tagliatelle
- tenv - tenv
- testpackage - testpackage
@ -72,27 +66,11 @@ linters:
- tparallel - tparallel
- unconvert - unconvert
- unparam - unparam
- varnamelen
- wastedassign - wastedassign
- whitespace - whitespace
- wrapcheck - wrapcheck
- wsl - wsl
disable:
- gochecknoglobals
- godox
- forbidigo
# Deprecated
- golint
- interfacer
- maligned
- scopelint
- ifshort
- varcheck
- structcheck
- deadcode
- exhaustivestruct
linters-settings: linters-settings:
gomnd: gomnd:
settings: settings:

View File

@ -1,7 +1,7 @@
--- ---
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.4.0 rev: v4.5.0
hooks: hooks:
- id: check-added-large-files - id: check-added-large-files
- id: check-yaml - id: check-yaml
@ -11,10 +11,8 @@ repos:
- id: end-of-file-fixer - id: end-of-file-fixer
- id: check-merge-conflict - id: check-merge-conflict
- repo: https://github.com/dnephin/pre-commit-golang - repo: https://github.com/dnephin/pre-commit-golang
rev: v0.4.0 rev: v0.5.1
hooks: hooks:
- id: go-fmt
- id: go-imports
- id: golangci-lint - id: golangci-lint
args: args:
- --timeout=3m - --timeout=3m

View File

@ -1,19 +1,23 @@
FROM alpine:3.17 FROM alpine:3.18
RUN apk add --no-cache \ RUN apk add --no-cache \
bash~=5 \ bash~=5 \
consul~=1.14 \ consul~=1 \
nomad~=1.4 \ mariadb-client~=10 \
mariadb-client~=10.6 \ mariadb-connector-c~=3 \
mariadb-connector-c~=3.3 \ nomad~=1 \
rclone~=1.60 \ postgresql15-client~=15 \
redis~=7.0 \ rclone~=1.62 \
restic~=0.14 \ redis~=7 \
restic~=0.15 \
sqlite~=3 \ sqlite~=3 \
tzdata~=2023 \
; ;
ARG TARGETOS ARG TARGETOS
ARG TARGETARCH ARG TARGETARCH
COPY ./dist/resticscheduler-$TARGETOS-$TARGETARCH /bin/resticscheduler COPY ./dist/resticscheduler-$TARGETOS-$TARGETARCH /bin/resticscheduler
HEALTHCHECK CMD ["wget", "-O", "-", "http://localhost:8080/health"]
ENTRYPOINT [ "/bin/resticscheduler" ] ENTRYPOINT [ "/bin/resticscheduler" ]

View File

@ -32,6 +32,10 @@ test:
go test -v -coverprofile=coverage.out # -short go test -v -coverprofile=coverage.out # -short
go tool cover -func=coverage.out go tool cover -func=coverage.out
.PHONY: itest
itest: docker-build
./itest/run.sh
# Installs pre-commit hooks # Installs pre-commit hooks
.PHONY: install-hooks .PHONY: install-hooks
install-hooks: install-hooks:

46
itest/bootstrap-tests.sh Executable file
View File

@ -0,0 +1,46 @@
#! /bin/sh
set -ex
# Create flat file
echo "Hello" > /data/test.txt
# Create Sqlite database
touch /data/test_database.db
sqlite3 /data/test_database.db <<-EOF
CREATE TABLE test_table (
id INTEGER PRIMARY KEY,
data TEXT NOT NULL
);
INSERT INTO test_table(data)
VALUES ("Test row");
EOF
# Create MySql database
until mysql --host "$MYSQL_HOST" --user "$MYSQL_USER" --password="$MYSQL_PWD" --execute "SHOW DATABASES;"; do
sleep 1
done
mysql --host "$MYSQL_HOST" --user "$MYSQL_USER" --password="$MYSQL_PWD" main <<EOF
CREATE TABLE test_table (
id INTEGER AUTO_INCREMENT PRIMARY KEY,
data TEXT NOT NULL
);
INSERT INTO test_table(data)
VALUES ("Test row");
EOF
# Create Postgres database
export PGPASSWORD="$PGSQL_PASS"
until psql --host "$PGSQL_HOST" --username "$PGSQL_USER" --command "SELECT datname FROM pg_database;"; do
sleep 1
done
psql -v ON_ERROR_STOP=1 --host "$PGSQL_HOST" --username "$PGSQL_USER" main <<EOF
CREATE TABLE test_table (
id SERIAL PRIMARY KEY,
data TEXT NOT NULL
);
INSERT INTO test_table(data)
VALUES ('Test row');
EOF

57
itest/docker-compose.yml Normal file
View File

@ -0,0 +1,57 @@
---
version: "3.9"
services:
mysql:
image: mysql
environment:
MYSQL_ROOT_PASSWORD: shhh
MYSQL_DATABASE: main
postgres:
image: postgres
environment:
POSTGRES_PASSWORD: shhh
POSTGRES_DB: main
bootstrap:
image: resticscheduler
entrypoint: /bootstrap-tests.sh
environment:
MYSQL_HOST: mysql
MYSQL_USER: root
MYSQL_PWD: shhh
PGSQL_HOST: postgres
PGSQL_USER: postgres
PGSQL_PASS: shhh
volumes:
- ./bootstrap-tests.sh:/bootstrap-tests.sh
- ./data:/data
main:
image: resticscheduler
environment:
MYSQL_HOST: mysql
MYSQL_USER: root
MYSQL_PWD: shhh
PGSQL_HOST: postgres
PGSQL_USER: postgres
PGSQL_PASS: shhh
volumes:
- ./repo:/repo
- ./data:/data
- ./test-backup.hcl:/test-backup.hcl
validate:
image: resticscheduler
entrypoint: /validate-tests.sh
environment:
MYSQL_HOST: mysql
MYSQL_USER: root
MYSQL_PWD: shhh
PGSQL_HOST: postgres
PGSQL_USER: postgres
PGSQL_PASS: shhh
volumes:
- ./validate-tests.sh:/validate-tests.sh
- ./data:/data

35
itest/run.sh Executable file
View File

@ -0,0 +1,35 @@
#! /bin/bash
set -ex
cd "$(dirname "$0")"
mkdir -p ./repo ./data
echo Clean everything
docker-compose down -v
rm -fr ./repo/* ./data/*
sleep 5
echo Boostrap databases and data
docker-compose up -d mysql postgres
docker-compose run bootstrap
sleep 1
echo Run backup job
docker-compose run main -backup IntegrationTest -once /test-backup.hcl
echo Clean data
docker-compose down -v
docker-compose up -d mysql postgres
rm -fr ./data/*
sleep 15
echo Run restore
docker-compose run main -restore IntegrationTest -once /test-backup.hcl
sleep 1
echo Validate data
docker-compose run validate
echo Clean all again
docker-compose down -v
rm -fr ./repo/* ./data/*

38
itest/test-backup.hcl Normal file
View File

@ -0,0 +1,38 @@
job "IntegrationTest" {
schedule = "@daily"
config {
repo = "/repo"
passphrase = "shh"
}
mysql "MySQL" {
hostname = env("MYSQL_HOST")
database = "main"
username = env("MYSQL_USER")
password = env("MYSQL_PWD")
dump_to = "/tmp/mysql.sql"
}
postgres "Postgres" {
hostname = env("PGSQL_HOST")
database = "main"
username = env("PGSQL_USER")
password = env("PGSQL_PASS")
create = true
dump_to = "/tmp/psql.sql"
}
sqlite "SQLite" {
path = "/data/test_database.db"
dump_to = "/data/test_database.db.bak"
}
backup {
paths = ["/data"]
restore_opts {
Target = "/"
}
}
}

21
itest/validate-tests.sh Executable file
View File

@ -0,0 +1,21 @@
#! /bin/sh
set -ex
# Check flat file
test -f /data/test.txt
grep "^Hello" /data/test.txt
# Check Sqlite database
test -f /data/test_database.db
sqlite3 /data/test_database.db "select data from test_table where id = 1" | grep "^Test row"
# Check MySql database
mysql --host "$MYSQL_HOST" --user "$MYSQL_USER" --password="$MYSQL_PWD" main <<EOF | grep "^Test row"
select data from test_table where id = 1;
EOF
# Check Postgres database
export PGPASSWORD="$PGSQL_PASS"
psql --host "$PGSQL_HOST" --user "$PGSQL_USER" main <<EOF | grep "Test row"
select data from test_table where id = 1;
EOF

29
job.go
View File

@ -60,8 +60,9 @@ type Job struct {
// Meta Tasks // Meta Tasks
// NOTE: Now that these are also available within a task // NOTE: Now that these are also available within a task
// these could be removed to make task order more obvious // these could be removed to make task order more obvious
MySQL []JobTaskMySQL `hcl:"mysql,block"` MySQL []JobTaskMySQL `hcl:"mysql,block"`
Sqlite []JobTaskSqlite `hcl:"sqlite,block"` Postgres []JobTaskPostgres `hcl:"postgres,block"`
Sqlite []JobTaskSqlite `hcl:"sqlite,block"`
// Metrics and health // Metrics and health
healthy bool healthy bool
@ -81,6 +82,12 @@ func (j Job) validateTasks() error {
} }
} }
for _, pg := range j.Postgres {
if err := pg.Validate(); err != nil {
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
}
}
for _, sqlite := range j.Sqlite { for _, sqlite := range j.Sqlite {
if err := sqlite.Validate(); err != nil { if err := sqlite.Validate(); err != nil {
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err) return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
@ -126,6 +133,10 @@ func (j Job) AllTasks() []ExecutableTask {
allTasks = append(allTasks, mysql.GetPreTask()) allTasks = append(allTasks, mysql.GetPreTask())
} }
for _, pg := range j.Postgres {
allTasks = append(allTasks, pg.GetPreTask())
}
for _, sqlite := range j.Sqlite { for _, sqlite := range j.Sqlite {
allTasks = append(allTasks, sqlite.GetPreTask()) allTasks = append(allTasks, sqlite.GetPreTask())
} }
@ -146,6 +157,10 @@ func (j Job) AllTasks() []ExecutableTask {
allTasks = append(allTasks, mysql.GetPostTask()) allTasks = append(allTasks, mysql.GetPostTask())
} }
for _, pg := range j.Postgres {
allTasks = append(allTasks, pg.GetPostTask())
}
for _, sqlite := range j.Sqlite { for _, sqlite := range j.Sqlite {
allTasks = append(allTasks, sqlite.GetPostTask()) allTasks = append(allTasks, sqlite.GetPostTask())
} }
@ -160,6 +175,10 @@ func (j Job) BackupPaths() []string {
paths = append(paths, t.DumpToPath) paths = append(paths, t.DumpToPath)
} }
for _, t := range j.Postgres {
paths = append(paths, t.DumpToPath)
}
for _, t := range j.Sqlite { for _, t := range j.Sqlite {
paths = append(paths, t.DumpToPath) paths = append(paths, t.DumpToPath)
} }
@ -261,8 +280,10 @@ func (j Job) Run() {
result.LastError = err result.LastError = err
} else { } else {
Metrics.SnapshotCurrentCount.WithLabelValues(j.Name).Set(float64(len(snapshots))) Metrics.SnapshotCurrentCount.WithLabelValues(j.Name).Set(float64(len(snapshots)))
latestSnapshot := snapshots[len(snapshots)-1] if len(snapshots) > 0 {
Metrics.SnapshotLatestTime.WithLabelValues(j.Name).Set(float64(latestSnapshot.Time.Unix())) latestSnapshot := snapshots[len(snapshots)-1]
Metrics.SnapshotLatestTime.WithLabelValues(j.Name).Set(float64(latestSnapshot.Time.Unix()))
}
} }
if result.Success { if result.Success {

View File

@ -92,6 +92,7 @@ func TestJobValidation(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}, },
expectedErr: nil, expectedErr: nil,
@ -106,6 +107,7 @@ func TestJobValidation(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}, },
expectedErr: main.ErrMissingField, expectedErr: main.ErrMissingField,
@ -120,6 +122,7 @@ func TestJobValidation(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}, },
expectedErr: main.ErrInvalidConfigValue, expectedErr: main.ErrInvalidConfigValue,
@ -134,6 +137,7 @@ func TestJobValidation(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}, },
expectedErr: main.ErrMutuallyExclusive, expectedErr: main.ErrMutuallyExclusive,
@ -144,10 +148,13 @@ func TestJobValidation(t *testing.T) {
Name: "Test job", Name: "Test job",
Schedule: "@daily", Schedule: "@daily",
Config: ValidResticConfig(), Config: ValidResticConfig(),
Tasks: []main.JobTask{{}}, Tasks: []main.JobTask{
{}, //nolint:exhaustruct
},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}, },
expectedErr: main.ErrMissingField, expectedErr: main.ErrMissingField,
@ -161,7 +168,10 @@ func TestJobValidation(t *testing.T) {
Tasks: []main.JobTask{}, Tasks: []main.JobTask{},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{{}}, MySQL: []main.JobTaskMySQL{
{}, //nolint:exhaustruct
},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}, },
expectedErr: main.ErrMissingField, expectedErr: main.ErrMissingField,
@ -176,7 +186,10 @@ func TestJobValidation(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Sqlite: []main.JobTaskSqlite{{}}, Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{
{}, //nolint:exhaustruct
},
}, },
expectedErr: main.ErrMissingField, expectedErr: main.ErrMissingField,
}, },
@ -215,7 +228,9 @@ func TestConfigValidation(t *testing.T) {
Config: ValidResticConfig(), Config: ValidResticConfig(),
Tasks: []main.JobTask{}, Tasks: []main.JobTask{},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}}, }},
}, },
@ -231,7 +246,9 @@ func TestConfigValidation(t *testing.T) {
Config: nil, Config: nil,
Tasks: []main.JobTask{}, Tasks: []main.JobTask{},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}}, }},
}, },
@ -257,6 +274,7 @@ func TestConfigValidation(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}}, }},
}, },
@ -274,8 +292,10 @@ func TestConfigValidation(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}}}, }},
},
expectedErr: main.ErrMissingField, expectedErr: main.ErrMissingField,
}, },
} }

50
main.go
View File

@ -29,8 +29,12 @@ func ParseConfig(path string) ([]Job, error) {
Functions: map[string]function.Function{ Functions: map[string]function.Function{
"env": function.New(&function.Spec{ "env": function.New(&function.Spec{
Params: []function.Parameter{{ Params: []function.Parameter{{
Name: "var", Name: "var",
Type: cty.String, Type: cty.String,
AllowNull: false,
AllowUnknown: false,
AllowDynamicType: false,
AllowMarked: false,
}}, }},
VarParam: nil, VarParam: nil,
Type: function.StaticReturnType(cty.String), Type: function.StaticReturnType(cty.String),
@ -40,8 +44,12 @@ func ParseConfig(path string) ([]Job, error) {
}), }),
"readfile": function.New(&function.Spec{ "readfile": function.New(&function.Spec{
Params: []function.Parameter{{ Params: []function.Parameter{{
Name: "path", Name: "path",
Type: cty.String, Type: cty.String,
AllowNull: false,
AllowUnknown: false,
AllowDynamicType: false,
AllowMarked: false,
}}, }},
VarParam: nil, VarParam: nil,
Type: function.StaticReturnType(cty.String), Type: function.StaticReturnType(cty.String),
@ -181,10 +189,32 @@ func runRestoreJobs(jobs []Job, names string, snapshot string) error {
return filterJobErr return filterJobErr
} }
func runUnlockJobs(jobs []Job, names string) error {
if names == "" {
return nil
}
namesSlice := strings.Split(names, ",")
if len(namesSlice) == 0 {
return nil
}
jobs, filterJobErr := FilterJobs(jobs, namesSlice)
for _, job := range jobs {
if err := job.NewRestic().Unlock(UnlockOpts{RemoveAll: true}); err != nil {
return err
}
}
return filterJobErr
}
type Flags struct { type Flags struct {
showVersion bool showVersion bool
backup string backup string
restore string restore string
unlock string
restoreSnapshot string restoreSnapshot string
once bool once bool
healthCheckAddr string healthCheckAddr string
@ -196,6 +226,7 @@ func readFlags() Flags {
flag.BoolVar(&flags.showVersion, "version", false, "Display the version and exit") flag.BoolVar(&flags.showVersion, "version", false, "Display the version and exit")
flag.StringVar(&flags.backup, "backup", "", "Run backup jobs now. Names are comma separated. `all` will run all.") flag.StringVar(&flags.backup, "backup", "", "Run backup jobs now. Names are comma separated. `all` will run all.")
flag.StringVar(&flags.restore, "restore", "", "Run restore jobs now. Names are comma separated. `all` will run all.") flag.StringVar(&flags.restore, "restore", "", "Run restore jobs now. Names are comma separated. `all` will run all.")
flag.StringVar(&flags.unlock, "unlock", "", "Unlock job repos now. Names are comma separated. `all` will run all.")
flag.BoolVar(&flags.once, "once", false, "Run jobs specified using -backup and -restore once and exit") flag.BoolVar(&flags.once, "once", false, "Run jobs specified using -backup and -restore once and exit")
flag.StringVar(&flags.healthCheckAddr, "addr", "0.0.0.0:8080", "address to bind health check API") flag.StringVar(&flags.healthCheckAddr, "addr", "0.0.0.0:8080", "address to bind health check API")
flag.StringVar(&flags.metricsPushGateway, "push-gateway", "", "url of push gateway service for batch runs (optional)") flag.StringVar(&flags.metricsPushGateway, "push-gateway", "", "url of push gateway service for batch runs (optional)")
@ -206,7 +237,12 @@ func readFlags() Flags {
return flags return flags
} }
func runSpecifiedJobs(jobs []Job, backupJobs, restoreJobs, snapshot string) error { func runSpecifiedJobs(jobs []Job, backupJobs, restoreJobs, unlockJobs, snapshot string) error {
// Run specified job unlocks
if err := runUnlockJobs(jobs, unlockJobs); err != nil {
return fmt.Errorf("Failed running unlock for jobs: %w", err)
}
// Run specified backup jobs // Run specified backup jobs
if err := runBackupJobs(jobs, backupJobs); err != nil { if err := runBackupJobs(jobs, backupJobs); err != nil {
return fmt.Errorf("Failed running backup jobs: %w", err) return fmt.Errorf("Failed running backup jobs: %w", err)
@ -222,6 +258,8 @@ func runSpecifiedJobs(jobs []Job, backupJobs, restoreJobs, snapshot string) erro
func maybePushMetrics(metricsPushGateway string) error { func maybePushMetrics(metricsPushGateway string) error {
if metricsPushGateway != "" { if metricsPushGateway != "" {
fmt.Println("Pushing metrics to push gateway")
if err := Metrics.PushToGateway(metricsPushGateway); err != nil { if err := Metrics.PushToGateway(metricsPushGateway); err != nil {
return fmt.Errorf("Failed pushing metrics after jobs run: %w", err) return fmt.Errorf("Failed pushing metrics after jobs run: %w", err)
} }
@ -253,7 +291,7 @@ func main() {
log.Fatalf("Failed to read jobs from files: %v", err) log.Fatalf("Failed to read jobs from files: %v", err)
} }
if err := runSpecifiedJobs(jobs, flags.backup, flags.restore, flags.restoreSnapshot); err != nil { if err := runSpecifiedJobs(jobs, flags.backup, flags.restore, flags.unlock, flags.restoreSnapshot); err != nil {
log.Fatal(err) log.Fatal(err)
} }

View File

@ -31,7 +31,6 @@ func TestReadJobs(t *testing.T) {
t.Parallel() t.Parallel()
jobs, err := main.ReadJobs([]string{"./test/sample.hcl"}) jobs, err := main.ReadJobs([]string{"./test/sample.hcl"})
if err != nil { if err != nil {
t.Errorf("Unexpected error reading jobs: %v", err) t.Errorf("Unexpected error reading jobs: %v", err)
} }
@ -52,6 +51,7 @@ func TestRunJobs(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
} }

View File

@ -19,7 +19,6 @@ func (m ResticMetrics) PushToGateway(url string) error {
err := push.New(url, "batch"). err := push.New(url, "batch").
Gatherer(m.Registry). Gatherer(m.Registry).
Add() Add()
if err != nil { if err != nil {
return fmt.Errorf("error pushing to registry %s: %w", url, err) return fmt.Errorf("error pushing to registry %s: %w", url, err)
} }

View File

@ -11,8 +11,10 @@ import (
"time" "time"
) )
var ErrRestic = errors.New("restic error") var (
var ErrRepoNotFound = errors.New("repository not found or uninitialized") ErrRestic = errors.New("restic error")
ErrRepoNotFound = errors.New("repository not found or uninitialized")
)
func lineIn(needle string, haystack []string) bool { func lineIn(needle string, haystack []string) bool {
for _, line := range haystack { for _, line := range haystack {
@ -72,6 +74,16 @@ func (NoOpts) ToArgs() []string {
return []string{} return []string{}
} }
type UnlockOpts struct {
RemoveAll bool `hcl:"RemoveAll,optional"`
}
func (uo UnlockOpts) ToArgs() (args []string) {
args = maybeAddArgBool(args, "--remove-all", uo.RemoveAll)
return
}
type BackupOpts struct { type BackupOpts struct {
Exclude []string `hcl:"Exclude,optional"` Exclude []string `hcl:"Exclude,optional"`
Include []string `hcl:"Include,optional"` Include []string `hcl:"Include,optional"`
@ -193,9 +205,10 @@ type ResticGlobalOpts struct {
TLSClientCertFile string `hcl:"TlsClientCertFile,optional"` TLSClientCertFile string `hcl:"TlsClientCertFile,optional"`
LimitDownload int `hcl:"LimitDownload,optional"` LimitDownload int `hcl:"LimitDownload,optional"`
LimitUpload int `hcl:"LimitUpload,optional"` LimitUpload int `hcl:"LimitUpload,optional"`
Options map[string]string `hcl:"Options,optional"`
VerboseLevel int `hcl:"VerboseLevel,optional"` VerboseLevel int `hcl:"VerboseLevel,optional"`
Options map[string]string `hcl:"Options,optional"`
CleanupCache bool `hcl:"CleanupCache,optional"` CleanupCache bool `hcl:"CleanupCache,optional"`
InsecureTLS bool `hcl:"InsecureTls,optional"`
NoCache bool `hcl:"NoCache,optional"` NoCache bool `hcl:"NoCache,optional"`
NoLock bool `hcl:"NoLock,optional"` NoLock bool `hcl:"NoLock,optional"`
} }
@ -209,6 +222,7 @@ func (glo ResticGlobalOpts) ToArgs() (args []string) {
args = maybeAddArgInt(args, "--limit-upload", glo.LimitUpload) args = maybeAddArgInt(args, "--limit-upload", glo.LimitUpload)
args = maybeAddArgInt(args, "--verbose", glo.VerboseLevel) args = maybeAddArgInt(args, "--verbose", glo.VerboseLevel)
args = maybeAddArgBool(args, "--cleanup-cache", glo.CleanupCache) args = maybeAddArgBool(args, "--cleanup-cache", glo.CleanupCache)
args = maybeAddArgBool(args, "--insecure-tls", glo.InsecureTLS)
args = maybeAddArgBool(args, "--no-cache", glo.NoCache) args = maybeAddArgBool(args, "--no-cache", glo.NoCache)
args = maybeAddArgBool(args, "--no-lock", glo.NoLock) args = maybeAddArgBool(args, "--no-lock", glo.NoLock)
@ -331,6 +345,12 @@ func (rcmd Restic) Check() error {
return err return err
} }
func (rcmd Restic) Unlock(unlockOpts UnlockOpts) error {
_, err := rcmd.RunRestic("unlock", unlockOpts)
return err
}
type Snapshot struct { type Snapshot struct {
UID int `json:"uid"` UID int `json:"uid"`
GID int `json:"gid"` GID int `json:"gid"`

View File

@ -32,6 +32,7 @@ func TestGlobalOptions(t *testing.T) {
LimitUpload: 1, LimitUpload: 1,
VerboseLevel: 1, VerboseLevel: 1,
CleanupCache: true, CleanupCache: true,
InsecureTLS: true,
NoCache: true, NoCache: true,
NoLock: true, NoLock: true,
Options: map[string]string{ Options: map[string]string{
@ -48,6 +49,7 @@ func TestGlobalOptions(t *testing.T) {
"--limit-upload", "1", "--limit-upload", "1",
"--verbose", "1", "--verbose", "1",
"--cleanup-cache", "--cleanup-cache",
"--insecure-tls",
"--no-cache", "--no-cache",
"--no-lock", "--no-lock",
"--option", "key='a long value'", "--option", "key='a long value'",
@ -150,6 +152,20 @@ func TestForgetOpts(t *testing.T) {
AssertEqual(t, "args didn't match", expected, args) AssertEqual(t, "args didn't match", expected, args)
} }
func TestUnlockOpts(t *testing.T) {
t.Parallel()
args := main.UnlockOpts{
RemoveAll: true,
}.ToArgs()
expected := []string{
"--remove-all",
}
AssertEqual(t, "args didn't match", expected, args)
}
func TestBuildEnv(t *testing.T) { func TestBuildEnv(t *testing.T) {
t.Parallel() t.Parallel()
@ -221,7 +237,7 @@ func TestResticInterface(t *testing.T) {
} }
// Write test file to the data dir // Write test file to the data dir
err := os.WriteFile(dataFile, []byte("testing"), 0644) err := os.WriteFile(dataFile, []byte("testing"), 0o644)
AssertEqualFail(t, "unexpected error writing to test file", nil, err) AssertEqualFail(t, "unexpected error writing to test file", nil, err)
// Make sure no existing repo is found // Make sure no existing repo is found
@ -281,7 +297,7 @@ func TestResticInterface(t *testing.T) {
AssertEqualFail(t, "unexpected error checking repo", nil, err) AssertEqualFail(t, "unexpected error checking repo", nil, err)
// Change the data file // Change the data file
err = os.WriteFile(dataFile, []byte("unexpected"), 0644) err = os.WriteFile(dataFile, []byte("unexpected"), 0o644)
AssertEqualFail(t, "unexpected error writing to test file", nil, err) AssertEqualFail(t, "unexpected error writing to test file", nil, err)
// Check that data wrote // Check that data wrote
@ -297,4 +313,8 @@ func TestResticInterface(t *testing.T) {
value, err = os.ReadFile(restoredDataFile) value, err = os.ReadFile(restoredDataFile)
AssertEqualFail(t, "unexpected error reading from test file", nil, err) AssertEqualFail(t, "unexpected error reading from test file", nil, err)
AssertEqualFail(t, "incorrect value in test file", "testing", string(value)) AssertEqualFail(t, "incorrect value in test file", "testing", string(value))
// Try to unlock the repo (repo shouldn't really be locked, but this should still run without error
err = restic.Unlock(main.UnlockOpts{}) //nolint:exhaustruct
AssertEqualFail(t, "unexpected error unlocking repo", nil, err)
} }

View File

@ -13,8 +13,10 @@ import (
"github.com/robfig/cron/v3" "github.com/robfig/cron/v3"
) )
var jobResultsLock = sync.Mutex{} var (
var jobResults = map[string]JobResult{} jobResultsLock = sync.Mutex{}
jobResults = map[string]JobResult{}
)
type JobResult struct { type JobResult struct {
JobName string JobName string
@ -113,6 +115,8 @@ func ScheduleAndRunJobs(jobs []Job) error {
defer func() { defer func() {
ctx := scheduler.Stop() ctx := scheduler.Stop()
<-ctx.Done() <-ctx.Done()
fmt.Println("All jobs successfully stopped")
}() }()
return nil return nil

161
tasks.go
View File

@ -67,7 +67,7 @@ func (t *JobTaskScript) SetName(name string) {
t.name = name t.name = name
} }
// JobTaskMySQL is a sqlite backup task that performs required pre and post tasks. // JobTaskMySQL is a MySQL backup task that performs required pre and post tasks.
type JobTaskMySQL struct { type JobTaskMySQL struct {
Port int `hcl:"port,optional"` Port int `hcl:"port,optional"`
Name string `hcl:"name,label"` Name string `hcl:"name,label"`
@ -160,12 +160,20 @@ func (t JobTaskMySQL) GetPostTask() ExecutableTask {
command = append(command, "--host", t.Hostname) command = append(command, "--host", t.Hostname)
} }
if t.Port != 0 {
command = append(command, "--port", fmt.Sprintf("%d", t.Port))
}
if t.Username != "" { if t.Username != "" {
command = append(command, "--user", t.Username) command = append(command, "--user", t.Username)
} }
if t.Password != "" { if t.Password != "" {
command = append(command, "--password", t.Password) command = append(command, fmt.Sprintf("--password=%s", t.Password))
}
if t.Database != "" {
command = append(command, t.Database)
} }
command = append(command, "<", t.DumpToPath) command = append(command, "<", t.DumpToPath)
@ -179,6 +187,144 @@ func (t JobTaskMySQL) GetPostTask() ExecutableTask {
} }
} }
// JobTaskPostgres is a postgres backup task that performs required pre and post tasks.
type JobTaskPostgres struct {
Port int `hcl:"port,optional"`
Name string `hcl:"name,label"`
Hostname string `hcl:"hostname,optional"`
Database string `hcl:"database,optional"`
Username string `hcl:"username,optional"`
Password string `hcl:"password,optional"`
Tables []string `hcl:"tables,optional"`
DumpToPath string `hcl:"dump_to"`
NoTablespaces bool `hcl:"no_tablespaces,optional"`
Clean bool `hcl:"clean,optional"`
Create bool `hcl:"create,optional"`
}
func (t JobTaskPostgres) Paths() []string {
return []string{t.DumpToPath}
}
func (t JobTaskPostgres) Validate() error {
if t.DumpToPath == "" {
return fmt.Errorf("task %s is missing dump_to path: %w", t.Name, ErrMissingField)
}
if stat, err := os.Stat(t.DumpToPath); err != nil {
if !errors.Is(err, fs.ErrNotExist) {
return fmt.Errorf(
"task %s: invalid dump_to: could not stat path: %s: %w",
t.Name,
t.DumpToPath,
ErrInvalidConfigValue,
)
}
} else if stat.Mode().IsDir() {
return fmt.Errorf("task %s: dump_to cannot be a directory: %w", t.Name, ErrInvalidConfigValue)
}
if len(t.Tables) > 0 && t.Database == "" {
return fmt.Errorf(
"task %s is invalid. Must specify a database to use tables: %w",
t.Name,
ErrMissingField,
)
}
return nil
}
//nolint:cyclop
func (t JobTaskPostgres) GetPreTask() ExecutableTask {
command := []string{"pg_dump"}
if t.Database == "" {
command = []string{"pg_dumpall"}
}
command = append(command, "--file", t.DumpToPath)
if t.Hostname != "" {
command = append(command, "--host", t.Hostname)
}
if t.Port != 0 {
command = append(command, "--port", fmt.Sprintf("%d", t.Port))
}
if t.Username != "" {
command = append(command, "--username", t.Username)
}
if t.NoTablespaces {
command = append(command, "--no-tablespaces")
}
if t.Clean {
command = append(command, "--clean")
}
if t.Create {
command = append(command, "--create")
}
for _, table := range t.Tables {
command = append(command, "--table", table)
}
if t.Database != "" {
command = append(command, t.Database)
}
env := map[string]string{}
if t.Password != "" {
env["PGPASSWORD"] = t.Password
}
return JobTaskScript{
name: t.Name,
env: env,
Cwd: ".",
OnBackup: strings.Join(command, " "),
OnRestore: "",
}
}
func (t JobTaskPostgres) GetPostTask() ExecutableTask {
command := []string{"psql"}
if t.Hostname != "" {
command = append(command, "--host", t.Hostname)
}
if t.Port != 0 {
command = append(command, "--port", fmt.Sprintf("%d", t.Port))
}
if t.Username != "" {
command = append(command, "--username", t.Username)
}
if t.Database != "" {
command = append(command, t.Database)
}
command = append(command, "<", t.DumpToPath)
env := map[string]string{}
if t.Password != "" {
env["PGPASSWORD"] = t.Password
}
return JobTaskScript{
name: t.Name,
env: env,
Cwd: ".",
OnBackup: "",
OnRestore: strings.Join(command, " "),
}
}
// JobTaskSqlite is a sqlite backup task that performs required pre and post tasks. // JobTaskSqlite is a sqlite backup task that performs required pre and post tasks.
type JobTaskSqlite struct { type JobTaskSqlite struct {
Name string `hcl:"name,label"` Name string `hcl:"name,label"`
@ -291,11 +437,12 @@ func (t *BackupFilesTask) Validate() error {
// JobTask represents a single task within a backup job. // JobTask represents a single task within a backup job.
type JobTask struct { type JobTask struct {
Name string `hcl:"name,label"` Name string `hcl:"name,label"`
PreScripts []JobTaskScript `hcl:"pre_script,block"` PreScripts []JobTaskScript `hcl:"pre_script,block"`
PostScripts []JobTaskScript `hcl:"post_script,block"` PostScripts []JobTaskScript `hcl:"post_script,block"`
MySQL []JobTaskMySQL `hcl:"mysql,block"` MySQL []JobTaskMySQL `hcl:"mysql,block"`
Sqlite []JobTaskSqlite `hcl:"sqlite,block"` Postgres []JobTaskPostgres `hcl:"postgres,block"`
Sqlite []JobTaskSqlite `hcl:"sqlite,block"`
} }
func (t JobTask) Validate() error { func (t JobTask) Validate() error {

View File

@ -163,7 +163,29 @@ func TestJobTaskSql(t *testing.T) {
" --user user --password=pass --no-tablespaces db table1 table2", " --user user --password=pass --no-tablespaces db table1 table2",
postBackup: "", postBackup: "",
preRestore: "", preRestore: "",
postRestore: "mysql --host host --user user --password pass < ./simple.sql", postRestore: "mysql --host host --port 3306 --user user --password=pass db < ./simple.sql",
},
{
name: "psql all",
task: main.JobTaskPostgres{
Name: "simple",
Hostname: "host",
Port: 6543,
Username: "user",
Password: "pass",
Database: "db",
NoTablespaces: true,
Create: true,
Clean: true,
Tables: []string{"table1", "table2"},
DumpToPath: "./simple.sql",
},
validationErr: nil,
preBackup: "pg_dump --file ./simple.sql --host host --port 6543 --username user --no-tablespaces" +
" --clean --create --table table1 --table table2 db",
postBackup: "",
preRestore: "",
postRestore: "psql --host host --port 6543 --username user db < ./simple.sql",
}, },
// Sqlite // Sqlite
{ {