Compare commits

..

2 Commits

Author SHA1 Message Date
IamTheFij ecf375511c Pin Alpine version to 3.16
continuous-integration/drone/push Build is passing Details
Package versions always break in builds when a new Alpine version comes
out. Im going to start pinning these.
2022-11-28 07:09:14 -08:00
IamTheFij e2c71d8e25 Add support for prometheus push gateway on single runs
continuous-integration/drone/push Build is failing Details
2022-11-14 15:33:26 -08:00
23 changed files with 137 additions and 738 deletions

View File

@ -4,7 +4,7 @@ name: test
steps:
- name: test
image: golang:1.21
image: golang:1.17
environment:
VERSION: ${DRONE_TAG:-${DRONE_COMMIT}}
commands:
@ -13,7 +13,7 @@ steps:
- make test
- name: check
image: iamthefij/drone-pre-commit@sha256:30fa17489b86d7a4c3ad9c3ce2e152c25d82b8671e5609d322c6cae0baed89cd
image: iamthefij/drone-pre-commit:personal
---
kind: pipeline
@ -32,7 +32,7 @@ trigger:
steps:
- name: build all binaries
image: golang:1.21
image: golang:1.17
environment:
VERSION: ${DRONE_TAG:-${DRONE_COMMIT}}
commands:

5
.gitignore vendored
View File

@ -20,9 +20,4 @@ dist/
# Built executable
restic-scheduler
resticscheduler
data/
# Itest temp dirs
itest/data
itest/repo

View File

@ -16,6 +16,7 @@ linters:
- contextcheck
- cyclop
- decorder
- depguard
- dupl
- durationcheck
- errchkjson
@ -31,8 +32,10 @@ linters:
- gocognit
- goconst
- gocritic
# - gocyclo # Using cyclop
- godot
- gofumpt
# - goerr113 # Using errorlint
- gofmt
- goheader
- goimports
- gomnd
@ -42,6 +45,7 @@ linters:
- gosec
- grouper
- importas
# - ireturn
- lll
- maintidx
- makezero
@ -56,9 +60,11 @@ linters:
- paralleltest
- prealloc
- predeclared
# - promlinter # Not common enough
- revive
- rowserrcheck
- sqlclosecheck
# - stylecheck # Using revive
- tagliatelle
- tenv
- testpackage
@ -66,11 +72,27 @@ linters:
- tparallel
- unconvert
- unparam
- varnamelen
- wastedassign
- whitespace
- wrapcheck
- wsl
disable:
- gochecknoglobals
- godox
- forbidigo
# Deprecated
- golint
- interfacer
- maligned
- scopelint
- ifshort
- varcheck
- structcheck
- deadcode
- exhaustivestruct
linters-settings:
gomnd:
settings:

View File

@ -1,7 +1,7 @@
---
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v3.4.0
hooks:
- id: check-added-large-files
- id: check-yaml
@ -11,8 +11,10 @@ repos:
- id: end-of-file-fixer
- id: check-merge-conflict
- repo: https://github.com/dnephin/pre-commit-golang
rev: v0.5.1
rev: v0.4.0
hooks:
- id: go-fmt
- id: go-imports
- id: golangci-lint
args:
- --timeout=3m

View File

@ -1,23 +1,18 @@
FROM alpine:3.18
FROM alpine:3.16
RUN apk add --no-cache \
bash~=5 \
consul~=1 \
mariadb-client~=10 \
mariadb-connector-c~=3 \
nomad~=1 \
postgresql15-client~=15 \
rclone~=1.62 \
redis~=7 \
restic~=0.15 \
consul~=1.12 \
mariadb-client~=10.6 \
mariadb-connector-c~=3.1 \
rclone~=1.58 \
redis~=7.0 \
restic~=0.13 \
sqlite~=3 \
tzdata~=2023 \
;
ARG TARGETOS
ARG TARGETARCH
COPY ./dist/resticscheduler-$TARGETOS-$TARGETARCH /bin/resticscheduler
HEALTHCHECK CMD ["wget", "-O", "-", "http://localhost:8080/health"]
ENTRYPOINT [ "/bin/resticscheduler" ]

View File

@ -2,7 +2,7 @@ APP_NAME = resticscheduler
VERSION ?= $(shell git describe --tags --dirty)
GOFILES = *.go
# Multi-arch targets are generated from this
TARGET_ALIAS = $(APP_NAME)-linux-amd64 $(APP_NAME)-linux-arm $(APP_NAME)-linux-arm64
TARGET_ALIAS = $(APP_NAME)-linux-amd64 $(APP_NAME)-linux-arm $(APP_NAME)-linux-arm64 $(APP_NAME)-darwin-amd64 $(APP_NAME)-darwin-arm64
TARGETS = $(addprefix dist/,$(TARGET_ALIAS))
.QUOTE = "
CURRENT_GOARCH = $(shell go env | awk -F "=" '/GOARCH/ { gsub(/$(.QUOTE)/,"", $$2); print $$2}')
@ -29,13 +29,9 @@ build: $(APP_NAME)
# Run all tests
.PHONY: test
test:
go test -v -coverprofile=coverage.out # -short
go test -coverprofile=coverage.out # -short
go tool cover -func=coverage.out
.PHONY: itest
itest: docker-build
./itest/run.sh
# Installs pre-commit hooks
.PHONY: install-hooks
install-hooks:

2
go.mod
View File

@ -1,6 +1,6 @@
module git.iamthefij.com/iamthefij/restic-scheduler
go 1.20
go 1.17
require (
github.com/go-test/deep v1.0.8

View File

@ -1,46 +0,0 @@
#! /bin/sh
set -ex
# Create flat file
echo "Hello" > /data/test.txt
# Create Sqlite database
touch /data/test_database.db
sqlite3 /data/test_database.db <<-EOF
CREATE TABLE test_table (
id INTEGER PRIMARY KEY,
data TEXT NOT NULL
);
INSERT INTO test_table(data)
VALUES ("Test row");
EOF
# Create MySql database
until mysql --host "$MYSQL_HOST" --user "$MYSQL_USER" --password="$MYSQL_PWD" --execute "SHOW DATABASES;"; do
sleep 1
done
mysql --host "$MYSQL_HOST" --user "$MYSQL_USER" --password="$MYSQL_PWD" main <<EOF
CREATE TABLE test_table (
id INTEGER AUTO_INCREMENT PRIMARY KEY,
data TEXT NOT NULL
);
INSERT INTO test_table(data)
VALUES ("Test row");
EOF
# Create Postgres database
export PGPASSWORD="$PGSQL_PASS"
until psql --host "$PGSQL_HOST" --username "$PGSQL_USER" --command "SELECT datname FROM pg_database;"; do
sleep 1
done
psql -v ON_ERROR_STOP=1 --host "$PGSQL_HOST" --username "$PGSQL_USER" main <<EOF
CREATE TABLE test_table (
id SERIAL PRIMARY KEY,
data TEXT NOT NULL
);
INSERT INTO test_table(data)
VALUES ('Test row');
EOF

View File

@ -1,57 +0,0 @@
---
version: "3.9"
services:
mysql:
image: mysql
environment:
MYSQL_ROOT_PASSWORD: shhh
MYSQL_DATABASE: main
postgres:
image: postgres
environment:
POSTGRES_PASSWORD: shhh
POSTGRES_DB: main
bootstrap:
image: resticscheduler
entrypoint: /bootstrap-tests.sh
environment:
MYSQL_HOST: mysql
MYSQL_USER: root
MYSQL_PWD: shhh
PGSQL_HOST: postgres
PGSQL_USER: postgres
PGSQL_PASS: shhh
volumes:
- ./bootstrap-tests.sh:/bootstrap-tests.sh
- ./data:/data
main:
image: resticscheduler
environment:
MYSQL_HOST: mysql
MYSQL_USER: root
MYSQL_PWD: shhh
PGSQL_HOST: postgres
PGSQL_USER: postgres
PGSQL_PASS: shhh
volumes:
- ./repo:/repo
- ./data:/data
- ./test-backup.hcl:/test-backup.hcl
validate:
image: resticscheduler
entrypoint: /validate-tests.sh
environment:
MYSQL_HOST: mysql
MYSQL_USER: root
MYSQL_PWD: shhh
PGSQL_HOST: postgres
PGSQL_USER: postgres
PGSQL_PASS: shhh
volumes:
- ./validate-tests.sh:/validate-tests.sh
- ./data:/data

View File

@ -1,35 +0,0 @@
#! /bin/bash
set -ex
cd "$(dirname "$0")"
mkdir -p ./repo ./data
echo Clean everything
docker-compose down -v
rm -fr ./repo/* ./data/*
sleep 5
echo Boostrap databases and data
docker-compose up -d mysql postgres
docker-compose run bootstrap
sleep 1
echo Run backup job
docker-compose run main -backup IntegrationTest -once /test-backup.hcl
echo Clean data
docker-compose down -v
docker-compose up -d mysql postgres
rm -fr ./data/*
sleep 15
echo Run restore
docker-compose run main -restore IntegrationTest -once /test-backup.hcl
sleep 1
echo Validate data
docker-compose run validate
echo Clean all again
docker-compose down -v
rm -fr ./repo/* ./data/*

View File

@ -1,38 +0,0 @@
job "IntegrationTest" {
schedule = "@daily"
config {
repo = "/repo"
passphrase = "shh"
}
mysql "MySQL" {
hostname = env("MYSQL_HOST")
database = "main"
username = env("MYSQL_USER")
password = env("MYSQL_PWD")
dump_to = "/tmp/mysql.sql"
}
postgres "Postgres" {
hostname = env("PGSQL_HOST")
database = "main"
username = env("PGSQL_USER")
password = env("PGSQL_PASS")
create = true
dump_to = "/tmp/psql.sql"
}
sqlite "SQLite" {
path = "/data/test_database.db"
dump_to = "/data/test_database.db.bak"
}
backup {
paths = ["/data"]
restore_opts {
Target = "/"
}
}
}

View File

@ -1,21 +0,0 @@
#! /bin/sh
set -ex
# Check flat file
test -f /data/test.txt
grep "^Hello" /data/test.txt
# Check Sqlite database
test -f /data/test_database.db
sqlite3 /data/test_database.db "select data from test_table where id = 1" | grep "^Test row"
# Check MySql database
mysql --host "$MYSQL_HOST" --user "$MYSQL_USER" --password="$MYSQL_PWD" main <<EOF | grep "^Test row"
select data from test_table where id = 1;
EOF
# Check Postgres database
export PGPASSWORD="$PGSQL_PASS"
psql --host "$PGSQL_HOST" --user "$PGSQL_USER" main <<EOF | grep "Test row"
select data from test_table where id = 1;
EOF

79
job.go
View File

@ -52,17 +52,14 @@ func (r ResticConfig) Validate() error {
type Job struct {
Name string `hcl:"name,label"`
Schedule string `hcl:"schedule"`
Config *ResticConfig `hcl:"config,block"`
Config ResticConfig `hcl:"config,block"`
Tasks []JobTask `hcl:"task,block"`
Backup BackupFilesTask `hcl:"backup,block"`
Forget *ForgetOpts `hcl:"forget,block"`
// Meta Tasks
// NOTE: Now that these are also available within a task
// these could be removed to make task order more obvious
MySQL []JobTaskMySQL `hcl:"mysql,block"`
Postgres []JobTaskPostgres `hcl:"postgres,block"`
Sqlite []JobTaskSqlite `hcl:"sqlite,block"`
MySQL []JobTaskMySQL `hcl:"mysql,block"`
Sqlite []JobTaskSqlite `hcl:"sqlite,block"`
// Metrics and health
healthy bool
@ -76,24 +73,6 @@ func (j Job) validateTasks() error {
}
}
for _, mysql := range j.MySQL {
if err := mysql.Validate(); err != nil {
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
}
}
for _, pg := range j.Postgres {
if err := pg.Validate(); err != nil {
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
}
}
for _, sqlite := range j.Sqlite {
if err := sqlite.Validate(); err != nil {
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
}
}
return nil
}
@ -103,11 +82,7 @@ func (j Job) Validate() error {
}
if _, err := cron.ParseStandard(j.Schedule); err != nil {
return fmt.Errorf("job %s has an invalid schedule: %w: %w", j.Name, err, ErrInvalidConfigValue)
}
if j.Config == nil {
return fmt.Errorf("job %s is missing restic config: %w", j.Name, ErrMissingField)
return fmt.Errorf("job %s has an invalid schedule: %v: %w", j.Name, err, ErrInvalidConfigValue)
}
if err := j.Config.Validate(); err != nil {
@ -118,6 +93,18 @@ func (j Job) Validate() error {
return err
}
for _, mysql := range j.MySQL {
if err := mysql.Validate(); err != nil {
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
}
}
for _, sqlite := range j.Sqlite {
if err := sqlite.Validate(); err != nil {
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
}
}
if err := j.Backup.Validate(); err != nil {
return fmt.Errorf("job %s has an invalid backup config: %w", j.Name, err)
}
@ -133,10 +120,6 @@ func (j Job) AllTasks() []ExecutableTask {
allTasks = append(allTasks, mysql.GetPreTask())
}
for _, pg := range j.Postgres {
allTasks = append(allTasks, pg.GetPreTask())
}
for _, sqlite := range j.Sqlite {
allTasks = append(allTasks, sqlite.GetPreTask())
}
@ -157,10 +140,6 @@ func (j Job) AllTasks() []ExecutableTask {
allTasks = append(allTasks, mysql.GetPostTask())
}
for _, pg := range j.Postgres {
allTasks = append(allTasks, pg.GetPostTask())
}
for _, sqlite := range j.Sqlite {
allTasks = append(allTasks, sqlite.GetPostTask())
}
@ -175,10 +154,6 @@ func (j Job) BackupPaths() []string {
paths = append(paths, t.DumpToPath)
}
for _, t := range j.Postgres {
paths = append(paths, t.DumpToPath)
}
for _, t := range j.Sqlite {
paths = append(paths, t.DumpToPath)
}
@ -222,7 +197,7 @@ func (j Job) Logger() *log.Logger {
return GetLogger(j.Name)
}
func (j Job) RunRestore(snapshot string) error {
func (j Job) RunRestore() error {
logger := j.Logger()
restic := j.NewRestic()
@ -238,10 +213,6 @@ func (j Job) RunRestore(snapshot string) error {
Env: nil,
}
if backupTask, ok := exTask.(BackupFilesTask); ok {
backupTask.snapshot = snapshot
}
if err := exTask.RunRestore(taskCfg); err != nil {
return fmt.Errorf("failed running job %s: %w", j.Name, err)
}
@ -280,10 +251,8 @@ func (j Job) Run() {
result.LastError = err
} else {
Metrics.SnapshotCurrentCount.WithLabelValues(j.Name).Set(float64(len(snapshots)))
if len(snapshots) > 0 {
latestSnapshot := snapshots[len(snapshots)-1]
Metrics.SnapshotLatestTime.WithLabelValues(j.Name).Set(float64(latestSnapshot.Time.Unix()))
}
latestSnapshot := snapshots[len(snapshots)-1]
Metrics.SnapshotLatestTime.WithLabelValues(j.Name).Set(float64(latestSnapshot.Time.Unix()))
}
if result.Success {
@ -307,8 +276,8 @@ func (j Job) NewRestic() *Restic {
}
type Config struct {
DefaultConfig *ResticConfig `hcl:"default_config,block"`
Jobs []Job `hcl:"job,block"`
// GlobalConfig *ResticConfig `hcl:"global_config,block"`
Jobs []Job `hcl:"job,block"`
}
func (c Config) Validate() error {
@ -317,12 +286,6 @@ func (c Config) Validate() error {
}
for _, job := range c.Jobs {
// Use default restic config if no job config is provided
// TODO: Maybe merge values here
if job.Config == nil {
job.Config = c.DefaultConfig
}
if err := job.Validate(); err != nil {
return err
}

View File

@ -7,8 +7,8 @@ import (
main "git.iamthefij.com/iamthefij/restic-scheduler"
)
func ValidResticConfig() *main.ResticConfig {
return &main.ResticConfig{
func ValidResticConfig() main.ResticConfig {
return main.ResticConfig{
Passphrase: "shh",
Repo: "./data",
Env: nil,
@ -92,7 +92,6 @@ func TestJobValidation(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{},
},
expectedErr: nil,
@ -107,7 +106,6 @@ func TestJobValidation(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{},
},
expectedErr: main.ErrMissingField,
@ -122,7 +120,6 @@ func TestJobValidation(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{},
},
expectedErr: main.ErrInvalidConfigValue,
@ -132,12 +129,11 @@ func TestJobValidation(t *testing.T) {
job: main.Job{
Name: "Test job",
Schedule: "@daily",
Config: &main.ResticConfig{}, //nolint:exhaustruct
Config: main.ResticConfig{}, //nolint:exhaustruct
Tasks: []main.JobTask{},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{},
},
expectedErr: main.ErrMutuallyExclusive,
@ -148,13 +144,10 @@ func TestJobValidation(t *testing.T) {
Name: "Test job",
Schedule: "@daily",
Config: ValidResticConfig(),
Tasks: []main.JobTask{
{}, //nolint:exhaustruct
},
Tasks: []main.JobTask{{}},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{},
},
expectedErr: main.ErrMissingField,
@ -168,10 +161,7 @@ func TestJobValidation(t *testing.T) {
Tasks: []main.JobTask{},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{
{}, //nolint:exhaustruct
},
Postgres: []main.JobTaskPostgres{},
MySQL: []main.JobTaskMySQL{{}},
Sqlite: []main.JobTaskSqlite{},
},
expectedErr: main.ErrMissingField,
@ -186,10 +176,7 @@ func TestJobValidation(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{
{}, //nolint:exhaustruct
},
Sqlite: []main.JobTaskSqlite{{}},
},
expectedErr: main.ErrMissingField,
},
@ -220,82 +207,34 @@ func TestConfigValidation(t *testing.T) {
}{
{
name: "Valid job",
config: main.Config{
DefaultConfig: nil,
Jobs: []main.Job{{
Name: "Valid job",
Schedule: "@daily",
Config: ValidResticConfig(),
Tasks: []main.JobTask{},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{},
}},
},
config: main.Config{Jobs: []main.Job{{
Name: "Valid job",
Schedule: "@daily",
Config: ValidResticConfig(),
Tasks: []main.JobTask{},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
MySQL: []main.JobTaskMySQL{},
Sqlite: []main.JobTaskSqlite{},
}}},
expectedErr: nil,
},
{
name: "Valid job with default config",
config: main.Config{
DefaultConfig: ValidResticConfig(),
Jobs: []main.Job{{
Name: "Valid job",
Schedule: "@daily",
Config: nil,
Tasks: []main.JobTask{},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{},
}},
},
expectedErr: nil,
},
{
name: "No jobs",
config: main.Config{
DefaultConfig: nil,
Jobs: []main.Job{},
},
name: "No jobs",
config: main.Config{Jobs: []main.Job{}},
expectedErr: main.ErrNoJobsFound,
},
{
name: "Invalid name",
config: main.Config{
DefaultConfig: nil,
Jobs: []main.Job{{
Name: "",
Schedule: "@daily",
Config: ValidResticConfig(),
Tasks: []main.JobTask{},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{},
}},
},
expectedErr: main.ErrMissingField,
},
{
name: "Missing config",
config: main.Config{
DefaultConfig: nil,
Jobs: []main.Job{{
Name: "",
Schedule: "@daily",
Config: nil,
Tasks: []main.JobTask{},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{},
}},
},
config: main.Config{Jobs: []main.Job{{
Name: "",
Schedule: "@daily",
Config: ValidResticConfig(),
Tasks: []main.JobTask{},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{},
Sqlite: []main.JobTaskSqlite{},
}}},
expectedErr: main.ErrMissingField,
},
}

66
main.go
View File

@ -29,12 +29,8 @@ func ParseConfig(path string) ([]Job, error) {
Functions: map[string]function.Function{
"env": function.New(&function.Spec{
Params: []function.Parameter{{
Name: "var",
Type: cty.String,
AllowNull: false,
AllowUnknown: false,
AllowDynamicType: false,
AllowMarked: false,
Name: "var",
Type: cty.String,
}},
VarParam: nil,
Type: function.StaticReturnType(cty.String),
@ -44,12 +40,8 @@ func ParseConfig(path string) ([]Job, error) {
}),
"readfile": function.New(&function.Spec{
Params: []function.Parameter{{
Name: "path",
Type: cty.String,
AllowNull: false,
AllowUnknown: false,
AllowDynamicType: false,
AllowMarked: false,
Name: "path",
Type: cty.String,
}},
VarParam: nil,
Type: function.StaticReturnType(cty.String),
@ -148,10 +140,6 @@ func FilterJobs(jobs []Job, names []string) ([]Job, error) {
}
func runBackupJobs(jobs []Job, names string) error {
if names == "" {
return nil
}
namesSlice := strings.Split(names, ",")
if len(namesSlice) == 0 {
@ -168,11 +156,7 @@ func runBackupJobs(jobs []Job, names string) error {
return filterJobErr
}
func runRestoreJobs(jobs []Job, names string, snapshot string) error {
if names == "" {
return nil
}
func runRestoreJobs(jobs []Job, names string) error {
namesSlice := strings.Split(names, ",")
if len(namesSlice) == 0 {
@ -181,28 +165,7 @@ func runRestoreJobs(jobs []Job, names string, snapshot string) error {
jobs, filterJobErr := FilterJobs(jobs, namesSlice)
for _, job := range jobs {
if err := job.RunRestore(snapshot); err != nil {
return err
}
}
return filterJobErr
}
func runUnlockJobs(jobs []Job, names string) error {
if names == "" {
return nil
}
namesSlice := strings.Split(names, ",")
if len(namesSlice) == 0 {
return nil
}
jobs, filterJobErr := FilterJobs(jobs, namesSlice)
for _, job := range jobs {
if err := job.NewRestic().Unlock(UnlockOpts{RemoveAll: true}); err != nil {
if err := job.RunRestore(); err != nil {
return err
}
}
@ -214,8 +177,6 @@ type Flags struct {
showVersion bool
backup string
restore string
unlock string
restoreSnapshot string
once bool
healthCheckAddr string
metricsPushGateway string
@ -226,30 +187,23 @@ func readFlags() Flags {
flag.BoolVar(&flags.showVersion, "version", false, "Display the version and exit")
flag.StringVar(&flags.backup, "backup", "", "Run backup jobs now. Names are comma separated. `all` will run all.")
flag.StringVar(&flags.restore, "restore", "", "Run restore jobs now. Names are comma separated. `all` will run all.")
flag.StringVar(&flags.unlock, "unlock", "", "Unlock job repos now. Names are comma separated. `all` will run all.")
flag.BoolVar(&flags.once, "once", false, "Run jobs specified using -backup and -restore once and exit")
flag.StringVar(&flags.healthCheckAddr, "addr", "0.0.0.0:8080", "address to bind health check API")
flag.StringVar(&flags.metricsPushGateway, "push-gateway", "", "url of push gateway service for batch runs (optional)")
flag.StringVar(&JobBaseDir, "base-dir", JobBaseDir, "Base dir to create intermediate job files like SQL dumps.")
flag.StringVar(&flags.restoreSnapshot, "snapshot", "latest", "the snapshot to restore")
flag.Parse()
return flags
}
func runSpecifiedJobs(jobs []Job, backupJobs, restoreJobs, unlockJobs, snapshot string) error {
// Run specified job unlocks
if err := runUnlockJobs(jobs, unlockJobs); err != nil {
return fmt.Errorf("Failed running unlock for jobs: %w", err)
}
func runSpecifiedJobs(jobs []Job, backupJobs, restoreJobs string) error {
// Run specified backup jobs
if err := runBackupJobs(jobs, backupJobs); err != nil {
return fmt.Errorf("Failed running backup jobs: %w", err)
}
// Run specified restore jobs
if err := runRestoreJobs(jobs, restoreJobs, snapshot); err != nil {
if err := runRestoreJobs(jobs, restoreJobs); err != nil {
return fmt.Errorf("Failed running restore jobs: %w", err)
}
@ -258,8 +212,6 @@ func runSpecifiedJobs(jobs []Job, backupJobs, restoreJobs, unlockJobs, snapshot
func maybePushMetrics(metricsPushGateway string) error {
if metricsPushGateway != "" {
fmt.Println("Pushing metrics to push gateway")
if err := Metrics.PushToGateway(metricsPushGateway); err != nil {
return fmt.Errorf("Failed pushing metrics after jobs run: %w", err)
}
@ -291,7 +243,7 @@ func main() {
log.Fatalf("Failed to read jobs from files: %v", err)
}
if err := runSpecifiedJobs(jobs, flags.backup, flags.restore, flags.unlock, flags.restoreSnapshot); err != nil {
if err := runSpecifiedJobs(jobs, flags.backup, flags.restore); err != nil {
log.Fatal(err)
}

View File

@ -18,9 +18,9 @@ func TestMain(m *testing.M) {
if testResult == 0 && testing.CoverMode() != "" {
c := testing.Coverage()
if c < MinCoverage {
fmt.Printf("WARNING: Tests passed but coverage failed at %0.2f and minimum to pass is %0.2f\n", c, MinCoverage)
fmt.Printf("Tests passed but coverage failed at %0.2f and minimum to pass is %0.2f\n", c, MinCoverage)
testResult = 0
testResult = -1
}
}
@ -31,6 +31,7 @@ func TestReadJobs(t *testing.T) {
t.Parallel()
jobs, err := main.ReadJobs([]string{"./test/sample.hcl"})
if err != nil {
t.Errorf("Unexpected error reading jobs: %v", err)
}
@ -51,7 +52,6 @@ func TestRunJobs(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{},
}

View File

@ -19,6 +19,7 @@ func (m ResticMetrics) PushToGateway(url string) error {
err := push.New(url, "batch").
Gatherer(m.Registry).
Add()
if err != nil {
return fmt.Errorf("error pushing to registry %s: %w", url, err)
}

View File

@ -11,10 +11,8 @@ import (
"time"
)
var (
ErrRestic = errors.New("restic error")
ErrRepoNotFound = errors.New("repository not found or uninitialized")
)
var ErrRestic = errors.New("restic error")
var ErrRepoNotFound = errors.New("repository not found or uninitialized")
func lineIn(needle string, haystack []string) bool {
for _, line := range haystack {
@ -74,16 +72,6 @@ func (NoOpts) ToArgs() []string {
return []string{}
}
type UnlockOpts struct {
RemoveAll bool `hcl:"RemoveAll,optional"`
}
func (uo UnlockOpts) ToArgs() (args []string) {
args = maybeAddArgBool(args, "--remove-all", uo.RemoveAll)
return
}
type BackupOpts struct {
Exclude []string `hcl:"Exclude,optional"`
Include []string `hcl:"Include,optional"`
@ -205,10 +193,9 @@ type ResticGlobalOpts struct {
TLSClientCertFile string `hcl:"TlsClientCertFile,optional"`
LimitDownload int `hcl:"LimitDownload,optional"`
LimitUpload int `hcl:"LimitUpload,optional"`
VerboseLevel int `hcl:"VerboseLevel,optional"`
Options map[string]string `hcl:"Options,optional"`
VerboseLevel int `hcl:"VerboseLevel,optional"`
CleanupCache bool `hcl:"CleanupCache,optional"`
InsecureTLS bool `hcl:"InsecureTls,optional"`
NoCache bool `hcl:"NoCache,optional"`
NoLock bool `hcl:"NoLock,optional"`
}
@ -222,7 +209,6 @@ func (glo ResticGlobalOpts) ToArgs() (args []string) {
args = maybeAddArgInt(args, "--limit-upload", glo.LimitUpload)
args = maybeAddArgInt(args, "--verbose", glo.VerboseLevel)
args = maybeAddArgBool(args, "--cleanup-cache", glo.CleanupCache)
args = maybeAddArgBool(args, "--insecure-tls", glo.InsecureTLS)
args = maybeAddArgBool(args, "--no-cache", glo.NoCache)
args = maybeAddArgBool(args, "--no-lock", glo.NoLock)
@ -287,11 +273,7 @@ func (e *ResticError) Unwrap() error {
return e.OriginalError
}
func (rcmd Restic) RunRestic(
command string,
options CommandOptions,
commandArgs ...string,
) (*CapturedCommandLogWriter, error) {
func (rcmd Restic) RunRestic(command string, options CommandOptions, commandArgs ...string) ([]string, error) {
args := []string{}
if rcmd.GlobalOpts != nil {
args = rcmd.GlobalOpts.ToArgs()
@ -303,22 +285,22 @@ func (rcmd Restic) RunRestic(
cmd := exec.Command("restic", args...)
output := NewCapturedCommandLogWriter(rcmd.Logger)
cmd.Stdout = output.Stdout
cmd.Stderr = output.Stderr
output := NewCapturedLogWriter(rcmd.Logger)
cmd.Stdout = output
cmd.Stderr = output
cmd.Env = rcmd.BuildEnv()
cmd.Dir = rcmd.Cwd
if err := cmd.Run(); err != nil {
responseErr := ErrRestic
if lineIn("Is there a repository at the following location?", output.Stderr.Lines) {
if lineIn("Is there a repository at the following location?", output.Lines) {
responseErr = ErrRepoNotFound
}
return output, NewResticError(command, output.AllLines(), responseErr)
return output.Lines, NewResticError(command, output.Lines, responseErr)
}
return output, nil
return output.Lines, nil
}
func (rcmd Restic) Backup(files []string, opts BackupOpts) error {
@ -345,12 +327,6 @@ func (rcmd Restic) Check() error {
return err
}
func (rcmd Restic) Unlock(unlockOpts UnlockOpts) error {
_, err := rcmd.RunRestic("unlock", unlockOpts)
return err
}
type Snapshot struct {
UID int `json:"uid"`
GID int `json:"gid"`
@ -365,20 +341,15 @@ type Snapshot struct {
}
func (rcmd Restic) ReadSnapshots() ([]Snapshot, error) {
output, err := rcmd.RunRestic("snapshots", GenericOpts{"--json"})
lines, err := rcmd.RunRestic("snapshots", GenericOpts{"--json"})
if err != nil {
return nil, err
}
if len(output.Stdout.Lines) == 0 {
return nil, fmt.Errorf("no snapshot output to parse: %w", ErrRestic)
}
singleLineOutput := strings.Join(output.Stdout.Lines, "")
snapshots := new([]Snapshot)
if err = json.Unmarshal([]byte(singleLineOutput), snapshots); err != nil {
return nil, fmt.Errorf("failed parsing snapshot results from %s: %w", singleLineOutput, err)
if err = json.Unmarshal([]byte(lines[0]), snapshots); err != nil {
return nil, fmt.Errorf("failed parsing snapshot results from %s: %w", lines[0], err)
}
return *snapshots, nil

View File

@ -32,7 +32,6 @@ func TestGlobalOptions(t *testing.T) {
LimitUpload: 1,
VerboseLevel: 1,
CleanupCache: true,
InsecureTLS: true,
NoCache: true,
NoLock: true,
Options: map[string]string{
@ -49,7 +48,6 @@ func TestGlobalOptions(t *testing.T) {
"--limit-upload", "1",
"--verbose", "1",
"--cleanup-cache",
"--insecure-tls",
"--no-cache",
"--no-lock",
"--option", "key='a long value'",
@ -152,20 +150,6 @@ func TestForgetOpts(t *testing.T) {
AssertEqual(t, "args didn't match", expected, args)
}
func TestUnlockOpts(t *testing.T) {
t.Parallel()
args := main.UnlockOpts{
RemoveAll: true,
}.ToArgs()
expected := []string{
"--remove-all",
}
AssertEqual(t, "args didn't match", expected, args)
}
func TestBuildEnv(t *testing.T) {
t.Parallel()
@ -237,7 +221,7 @@ func TestResticInterface(t *testing.T) {
}
// Write test file to the data dir
err := os.WriteFile(dataFile, []byte("testing"), 0o644)
err := os.WriteFile(dataFile, []byte("testing"), 0644)
AssertEqualFail(t, "unexpected error writing to test file", nil, err)
// Make sure no existing repo is found
@ -297,7 +281,7 @@ func TestResticInterface(t *testing.T) {
AssertEqualFail(t, "unexpected error checking repo", nil, err)
// Change the data file
err = os.WriteFile(dataFile, []byte("unexpected"), 0o644)
err = os.WriteFile(dataFile, []byte("unexpected"), 0644)
AssertEqualFail(t, "unexpected error writing to test file", nil, err)
// Check that data wrote
@ -313,8 +297,4 @@ func TestResticInterface(t *testing.T) {
value, err = os.ReadFile(restoredDataFile)
AssertEqualFail(t, "unexpected error reading from test file", nil, err)
AssertEqualFail(t, "incorrect value in test file", "testing", string(value))
// Try to unlock the repo (repo shouldn't really be locked, but this should still run without error
err = restic.Unlock(main.UnlockOpts{}) //nolint:exhaustruct
AssertEqualFail(t, "unexpected error unlocking repo", nil, err)
}

View File

@ -13,10 +13,8 @@ import (
"github.com/robfig/cron/v3"
)
var (
jobResultsLock = sync.Mutex{}
jobResults = map[string]JobResult{}
)
var jobResultsLock = sync.Mutex{}
var jobResults = map[string]JobResult{}
type JobResult struct {
JobName string
@ -69,12 +67,9 @@ func healthHandleFunc(writer http.ResponseWriter, request *http.Request) {
func RunHTTPHandlers(addr string) error {
http.HandleFunc("/health", healthHandleFunc)
http.Handle("/metrics", promhttp.HandlerFor(
Metrics.Registry,
promhttp.HandlerOpts{Registry: Metrics.Registry}, //nolint:exhaustruct
))
http.Handle("/metrics", promhttp.Handler())
return fmt.Errorf("error on http server: %w", http.ListenAndServe(addr, nil)) //#nosec: g114
return fmt.Errorf("error on healthcheck: %w", http.ListenAndServe(addr, nil)) //#nosec: g114
}
func ScheduleAndRunJobs(jobs []Job) error {
@ -115,8 +110,6 @@ func ScheduleAndRunJobs(jobs []Job) error {
defer func() {
ctx := scheduler.Stop()
<-ctx.Done()
fmt.Println("All jobs successfully stopped")
}()
return nil

View File

@ -5,7 +5,6 @@ import (
"log"
"os"
"os/exec"
"sort"
"strings"
)
@ -40,7 +39,6 @@ func NewCapturedLogWriter(logger *log.Logger) *CapturedLogWriter {
return &CapturedLogWriter{Lines: []string{}, logger: logger}
}
// Write writes the provided byte slice to the logger and stores each captured line.
func (w *CapturedLogWriter) Write(content []byte) (n int, err error) {
message := string(content)
for _, line := range strings.Split(message, "\n") {
@ -51,33 +49,6 @@ func (w *CapturedLogWriter) Write(content []byte) (n int, err error) {
return len(content), nil
}
// LinesMergedWith returns a slice of lines from this logger merged with another.
func (w CapturedLogWriter) LinesMergedWith(other CapturedLogWriter) []string {
allLines := []string{}
allLines = append(allLines, w.Lines...)
allLines = append(allLines, other.Lines...)
sort.Strings(allLines)
return allLines
}
type CapturedCommandLogWriter struct {
Stdout *CapturedLogWriter
Stderr *CapturedLogWriter
}
func NewCapturedCommandLogWriter(logger *log.Logger) *CapturedCommandLogWriter {
return &CapturedCommandLogWriter{
Stdout: NewCapturedLogWriter(logger),
Stderr: NewCapturedLogWriter(logger),
}
}
func (cclw CapturedCommandLogWriter) AllLines() []string {
return cclw.Stdout.LinesMergedWith(*cclw.Stderr)
}
func RunShell(script string, cwd string, env map[string]string, logger *log.Logger) error {
cmd := exec.Command("sh", "-c", strings.TrimSpace(script)) //nolint:gosec

192
tasks.go
View File

@ -67,7 +67,7 @@ func (t *JobTaskScript) SetName(name string) {
t.name = name
}
// JobTaskMySQL is a MySQL backup task that performs required pre and post tasks.
// JobTaskMySQL is a sqlite backup task that performs required pre and post tasks.
type JobTaskMySQL struct {
Port int `hcl:"port,optional"`
Name string `hcl:"name,label"`
@ -89,16 +89,11 @@ func (t JobTaskMySQL) Validate() error {
return fmt.Errorf("task %s is missing dump_to path: %w", t.Name, ErrMissingField)
}
if stat, err := os.Stat(t.DumpToPath); err != nil {
if s, err := os.Stat(t.DumpToPath); err != nil {
if !errors.Is(err, fs.ErrNotExist) {
return fmt.Errorf(
"task %s: invalid dump_to: could not stat path: %s: %w",
t.Name,
t.DumpToPath,
ErrInvalidConfigValue,
)
return fmt.Errorf("task %s: invalid dump_to: could not stat path: %v: %w", t.Name, err, ErrInvalidConfigValue)
}
} else if stat.Mode().IsDir() {
} else if s.Mode().IsDir() {
return fmt.Errorf("task %s: dump_to cannot be a directory: %w", t.Name, ErrInvalidConfigValue)
}
@ -160,20 +155,12 @@ func (t JobTaskMySQL) GetPostTask() ExecutableTask {
command = append(command, "--host", t.Hostname)
}
if t.Port != 0 {
command = append(command, "--port", fmt.Sprintf("%d", t.Port))
}
if t.Username != "" {
command = append(command, "--user", t.Username)
}
if t.Password != "" {
command = append(command, fmt.Sprintf("--password=%s", t.Password))
}
if t.Database != "" {
command = append(command, t.Database)
command = append(command, "--password", t.Password)
}
command = append(command, "<", t.DumpToPath)
@ -187,144 +174,6 @@ func (t JobTaskMySQL) GetPostTask() ExecutableTask {
}
}
// JobTaskPostgres is a postgres backup task that performs required pre and post tasks.
type JobTaskPostgres struct {
Port int `hcl:"port,optional"`
Name string `hcl:"name,label"`
Hostname string `hcl:"hostname,optional"`
Database string `hcl:"database,optional"`
Username string `hcl:"username,optional"`
Password string `hcl:"password,optional"`
Tables []string `hcl:"tables,optional"`
DumpToPath string `hcl:"dump_to"`
NoTablespaces bool `hcl:"no_tablespaces,optional"`
Clean bool `hcl:"clean,optional"`
Create bool `hcl:"create,optional"`
}
func (t JobTaskPostgres) Paths() []string {
return []string{t.DumpToPath}
}
func (t JobTaskPostgres) Validate() error {
if t.DumpToPath == "" {
return fmt.Errorf("task %s is missing dump_to path: %w", t.Name, ErrMissingField)
}
if stat, err := os.Stat(t.DumpToPath); err != nil {
if !errors.Is(err, fs.ErrNotExist) {
return fmt.Errorf(
"task %s: invalid dump_to: could not stat path: %s: %w",
t.Name,
t.DumpToPath,
ErrInvalidConfigValue,
)
}
} else if stat.Mode().IsDir() {
return fmt.Errorf("task %s: dump_to cannot be a directory: %w", t.Name, ErrInvalidConfigValue)
}
if len(t.Tables) > 0 && t.Database == "" {
return fmt.Errorf(
"task %s is invalid. Must specify a database to use tables: %w",
t.Name,
ErrMissingField,
)
}
return nil
}
//nolint:cyclop
func (t JobTaskPostgres) GetPreTask() ExecutableTask {
command := []string{"pg_dump"}
if t.Database == "" {
command = []string{"pg_dumpall"}
}
command = append(command, "--file", t.DumpToPath)
if t.Hostname != "" {
command = append(command, "--host", t.Hostname)
}
if t.Port != 0 {
command = append(command, "--port", fmt.Sprintf("%d", t.Port))
}
if t.Username != "" {
command = append(command, "--username", t.Username)
}
if t.NoTablespaces {
command = append(command, "--no-tablespaces")
}
if t.Clean {
command = append(command, "--clean")
}
if t.Create {
command = append(command, "--create")
}
for _, table := range t.Tables {
command = append(command, "--table", table)
}
if t.Database != "" {
command = append(command, t.Database)
}
env := map[string]string{}
if t.Password != "" {
env["PGPASSWORD"] = t.Password
}
return JobTaskScript{
name: t.Name,
env: env,
Cwd: ".",
OnBackup: strings.Join(command, " "),
OnRestore: "",
}
}
func (t JobTaskPostgres) GetPostTask() ExecutableTask {
command := []string{"psql"}
if t.Hostname != "" {
command = append(command, "--host", t.Hostname)
}
if t.Port != 0 {
command = append(command, "--port", fmt.Sprintf("%d", t.Port))
}
if t.Username != "" {
command = append(command, "--username", t.Username)
}
if t.Database != "" {
command = append(command, t.Database)
}
command = append(command, "<", t.DumpToPath)
env := map[string]string{}
if t.Password != "" {
env["PGPASSWORD"] = t.Password
}
return JobTaskScript{
name: t.Name,
env: env,
Cwd: ".",
OnBackup: "",
OnRestore: strings.Join(command, " "),
}
}
// JobTaskSqlite is a sqlite backup task that performs required pre and post tasks.
type JobTaskSqlite struct {
Name string `hcl:"name,label"`
@ -341,16 +190,11 @@ func (t JobTaskSqlite) Validate() error {
return fmt.Errorf("task %s is missing dump_to path: %w", t.Name, ErrMissingField)
}
if stat, err := os.Stat(t.DumpToPath); err != nil {
if s, err := os.Stat(t.DumpToPath); err != nil {
if !errors.Is(err, fs.ErrNotExist) {
return fmt.Errorf(
"task %s: invalid dump_to: could not stat path: %s: %w",
t.Name,
t.DumpToPath,
ErrInvalidConfigValue,
)
return fmt.Errorf("task %s: invalid dump_to: could not stat path: %v: %w", t.Name, err, ErrInvalidConfigValue)
}
} else if stat.Mode().IsDir() {
} else if s.Mode().IsDir() {
return fmt.Errorf("task %s: dump_to cannot be a directory: %w", t.Name, ErrInvalidConfigValue)
}
@ -382,7 +226,6 @@ type BackupFilesTask struct {
BackupOpts *BackupOpts `hcl:"backup_opts,block"`
RestoreOpts *RestoreOpts `hcl:"restore_opts,block"`
name string
snapshot string
}
func (t BackupFilesTask) RunBackup(cfg TaskConfig) error {
@ -405,11 +248,8 @@ func (t BackupFilesTask) RunRestore(cfg TaskConfig) error {
t.RestoreOpts = &RestoreOpts{} //nolint:exhaustruct
}
if t.snapshot == "" {
t.snapshot = "latest"
}
if err := cfg.Restic.Restore(t.snapshot, *t.RestoreOpts); err != nil {
// TODO: Make the snapshot configurable
if err := cfg.Restic.Restore("latest", *t.RestoreOpts); err != nil {
err = fmt.Errorf("failed restoring paths: %w", err)
cfg.Logger.Print(err)
@ -437,16 +277,14 @@ func (t *BackupFilesTask) Validate() error {
// JobTask represents a single task within a backup job.
type JobTask struct {
Name string `hcl:"name,label"`
PreScripts []JobTaskScript `hcl:"pre_script,block"`
PostScripts []JobTaskScript `hcl:"post_script,block"`
MySQL []JobTaskMySQL `hcl:"mysql,block"`
Postgres []JobTaskPostgres `hcl:"postgres,block"`
Sqlite []JobTaskSqlite `hcl:"sqlite,block"`
Name string `hcl:"name,label"`
PreScripts []JobTaskScript `hcl:"pre_script,block"`
PostScripts []JobTaskScript `hcl:"post_script,block"`
MySQL []JobTaskMySQL `hcl:"mysql,block"`
Sqlite []JobTaskSqlite `hcl:"sqlite,block"`
}
func (t JobTask) Validate() error {
// NOTE: Might make task types mutually exclusive because order is confusing even if deterministic
if t.Name == "" {
return fmt.Errorf("task is missing a name: %w", ErrMissingField)
}

View File

@ -163,29 +163,7 @@ func TestJobTaskSql(t *testing.T) {
" --user user --password=pass --no-tablespaces db table1 table2",
postBackup: "",
preRestore: "",
postRestore: "mysql --host host --port 3306 --user user --password=pass db < ./simple.sql",
},
{
name: "psql all",
task: main.JobTaskPostgres{
Name: "simple",
Hostname: "host",
Port: 6543,
Username: "user",
Password: "pass",
Database: "db",
NoTablespaces: true,
Create: true,
Clean: true,
Tables: []string{"table1", "table2"},
DumpToPath: "./simple.sql",
},
validationErr: nil,
preBackup: "pg_dump --file ./simple.sql --host host --port 6543 --username user --no-tablespaces" +
" --clean --create --table table1 --table table2 db",
postBackup: "",
preRestore: "",
postRestore: "psql --host host --port 6543 --username user db < ./simple.sql",
postRestore: "mysql --host host --user user --password pass < ./simple.sql",
},
// Sqlite
{