Compare commits

..

2 Commits

Author SHA1 Message Date
ecf375511c Pin Alpine version to 3.16
All checks were successful
continuous-integration/drone/push Build is passing
Package versions always break in builds when a new Alpine version comes
out. Im going to start pinning these.
2022-11-28 07:09:14 -08:00
e2c71d8e25 Add support for prometheus push gateway on single runs
Some checks failed
continuous-integration/drone/push Build is failing
2022-11-14 15:33:26 -08:00
25 changed files with 155 additions and 976 deletions

View File

@ -4,7 +4,7 @@ name: test
steps: steps:
- name: test - name: test
image: golang:1.21 image: golang:1.17
environment: environment:
VERSION: ${DRONE_TAG:-${DRONE_COMMIT}} VERSION: ${DRONE_TAG:-${DRONE_COMMIT}}
commands: commands:
@ -13,7 +13,7 @@ steps:
- make test - make test
- name: check - name: check
image: iamthefij/drone-pre-commit@sha256:30fa17489b86d7a4c3ad9c3ce2e152c25d82b8671e5609d322c6cae0baed89cd image: iamthefij/drone-pre-commit:personal
--- ---
kind: pipeline kind: pipeline
@ -32,7 +32,7 @@ trigger:
steps: steps:
- name: build all binaries - name: build all binaries
image: golang:1.21 image: golang:1.17
environment: environment:
VERSION: ${DRONE_TAG:-${DRONE_COMMIT}} VERSION: ${DRONE_TAG:-${DRONE_COMMIT}}
commands: commands:
@ -65,7 +65,7 @@ steps:
- name: push images - name: push images
image: thegeeklab/drone-docker-buildx image: thegeeklab/drone-docker-buildx
settings: settings:
repo: iamthefij/restic-scheduler repo: iamthefij/resticscheduler
auto_tag: true auto_tag: true
platforms: platforms:
- linux/amd64 - linux/amd64

5
.gitignore vendored
View File

@ -20,9 +20,4 @@ dist/
# Built executable # Built executable
restic-scheduler restic-scheduler
resticscheduler
data/ data/
# Itest temp dirs
itest/data
itest/repo

View File

@ -16,6 +16,7 @@ linters:
- contextcheck - contextcheck
- cyclop - cyclop
- decorder - decorder
- depguard
- dupl - dupl
- durationcheck - durationcheck
- errchkjson - errchkjson
@ -31,8 +32,10 @@ linters:
- gocognit - gocognit
- goconst - goconst
- gocritic - gocritic
# - gocyclo # Using cyclop
- godot - godot
- gofumpt # - goerr113 # Using errorlint
- gofmt
- goheader - goheader
- goimports - goimports
- gomnd - gomnd
@ -42,6 +45,7 @@ linters:
- gosec - gosec
- grouper - grouper
- importas - importas
# - ireturn
- lll - lll
- maintidx - maintidx
- makezero - makezero
@ -56,9 +60,11 @@ linters:
- paralleltest - paralleltest
- prealloc - prealloc
- predeclared - predeclared
# - promlinter # Not common enough
- revive - revive
- rowserrcheck - rowserrcheck
- sqlclosecheck - sqlclosecheck
# - stylecheck # Using revive
- tagliatelle - tagliatelle
- tenv - tenv
- testpackage - testpackage
@ -66,11 +72,27 @@ linters:
- tparallel - tparallel
- unconvert - unconvert
- unparam - unparam
- varnamelen
- wastedassign - wastedassign
- whitespace - whitespace
- wrapcheck - wrapcheck
- wsl - wsl
disable:
- gochecknoglobals
- godox
- forbidigo
# Deprecated
- golint
- interfacer
- maligned
- scopelint
- ifshort
- varcheck
- structcheck
- deadcode
- exhaustivestruct
linters-settings: linters-settings:
gomnd: gomnd:
settings: settings:

View File

@ -1,7 +1,7 @@
--- ---
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0 rev: v3.4.0
hooks: hooks:
- id: check-added-large-files - id: check-added-large-files
- id: check-yaml - id: check-yaml
@ -11,8 +11,10 @@ repos:
- id: end-of-file-fixer - id: end-of-file-fixer
- id: check-merge-conflict - id: check-merge-conflict
- repo: https://github.com/dnephin/pre-commit-golang - repo: https://github.com/dnephin/pre-commit-golang
rev: v0.5.1 rev: v0.4.0
hooks: hooks:
- id: go-fmt
- id: go-imports
- id: golangci-lint - id: golangci-lint
args: args:
- --timeout=3m - --timeout=3m

View File

@ -1,23 +1,18 @@
FROM alpine:3.18 FROM alpine:3.16
RUN apk add --no-cache \ RUN apk add --no-cache \
bash~=5 \ bash~=5 \
consul~=1 \ consul~=1.12 \
mariadb-client~=10 \ mariadb-client~=10.6 \
mariadb-connector-c~=3 \ mariadb-connector-c~=3.1 \
nomad~=1 \ rclone~=1.58 \
postgresql15-client~=15 \ redis~=7.0 \
rclone~=1.62 \ restic~=0.13 \
redis~=7 \
restic~=0.15 \
sqlite~=3 \ sqlite~=3 \
tzdata~=2024 \
; ;
ARG TARGETOS ARG TARGETOS
ARG TARGETARCH ARG TARGETARCH
COPY ./dist/restic-scheduler-$TARGETOS-$TARGETARCH /bin/restic-scheduler COPY ./dist/resticscheduler-$TARGETOS-$TARGETARCH /bin/resticscheduler
HEALTHCHECK CMD ["wget", "-O", "-", "http://localhost:8080/health"] ENTRYPOINT [ "/bin/resticscheduler" ]
ENTRYPOINT [ "/bin/restic-scheduler" ]

View File

@ -1,10 +1,11 @@
APP_NAME = restic-scheduler APP_NAME = resticscheduler
VERSION ?= $(shell git describe --tags --dirty) VERSION ?= $(shell git describe --tags --dirty)
GOFILES = *.go GOFILES = *.go
# Multi-arch targets are generated from this # Multi-arch targets are generated from this
TARGET_ALIAS = $(APP_NAME)-linux-amd64 $(APP_NAME)-linux-arm $(APP_NAME)-linux-arm64 TARGET_ALIAS = $(APP_NAME)-linux-amd64 $(APP_NAME)-linux-arm $(APP_NAME)-linux-arm64 $(APP_NAME)-darwin-amd64 $(APP_NAME)-darwin-arm64
TARGETS = $(addprefix dist/,$(TARGET_ALIAS)) TARGETS = $(addprefix dist/,$(TARGET_ALIAS))
CURRENT_GOARCH = $(shell go env GOARCH) .QUOTE = "
CURRENT_GOARCH = $(shell go env | awk -F "=" '/GOARCH/ { gsub(/$(.QUOTE)/,"", $$2); print $$2}')
# Default make target will run tests # Default make target will run tests
.DEFAULT_GOAL = test .DEFAULT_GOAL = test
@ -28,13 +29,9 @@ build: $(APP_NAME)
# Run all tests # Run all tests
.PHONY: test .PHONY: test
test: test:
go test -v -coverprofile=coverage.out # -short go test -coverprofile=coverage.out # -short
go tool cover -func=coverage.out go tool cover -func=coverage.out
.PHONY: itest
itest: docker-build
./itest/run.sh
# Installs pre-commit hooks # Installs pre-commit hooks
.PHONY: install-hooks .PHONY: install-hooks
install-hooks: install-hooks:
@ -54,7 +51,7 @@ clean:
## Multi-arch targets ## Multi-arch targets
$(TARGETS): $(GOFILES) $(TARGETS): $(GOFILES)
mkdir -p ./dist mkdir -p ./dist
GOOS=$(word 3, $(subst -, ,$(@))) GOARCH=$(word 4, $(subst -, ,$(@))) CGO_ENABLED=0 \ GOOS=$(word 2, $(subst -, ,$(@))) GOARCH=$(word 3, $(subst -, ,$(@))) CGO_ENABLED=0 \
go build -ldflags '-X "main.version=$(VERSION)"' -a -installsuffix nocgo \ go build -ldflags '-X "main.version=$(VERSION)"' -a -installsuffix nocgo \
-o $@ -o $@

210
README.md
View File

@ -1,211 +1,3 @@
# [restic-scheduler](/iamthefij/restic-scheduler) # [restic-scheduler](/iamthefij/restic-scheduler)
## About Job scheduler for Restic backups
`restic-scheduler` is a tool designed to allow declarative scheduling of restic backups using HCL (HashiCorp Configuration Language). This tool simplifies the process of managing and automating backups by defining jobs in a configuration file.
## Getting Started
### Installation
You can install `restic-scheduler` using the following command:
```sh
go install git.iamthefij.com/iamthefij/restic-scheduler@latest
```
You can also download the latest release from the [releases page](https://git.iamthefij.com/iamthefij/restic-scheduler/releases).
Finally, if you prefer to use Docker, you can run something like the following command:
```sh
docker run -v /path/to/config:/config -v /path/to/data:/data iamthefij/restic-scheduler -config /config/jobs.hcl
```
### Prerequisites
If you're not using Docker, you'll need to ensure that `restic` is installed and available in your system's PATH. You can download and install restic from [here](https://restic.net/).
## Usage
### Command Line Interface
The `restic-scheduler` command line interface provides several options for managing backup, restore, and unlock jobs. Below are some examples of how to use this tool.
#### Display Version
To display the version of `restic-scheduler`, use the `-version` flag:
```sh
restic-scheduler -version
```
#### Run Backup Jobs
To run backup jobs, use the `-backup` flag followed by a comma-separated list of job names. Use `all` to run all backup jobs:
```sh
restic-scheduler -backup job1,job2
```
#### Run Restore Jobs
To run restore jobs, use the `-restore` flag followed by a comma-separated list of job names. Use `all` to run all restore jobs:
```sh
restic-scheduler -restore job1,job2
```
#### Unlock Job Repositories
To unlock job repositories, use the `-unlock` flag followed by a comma-separated list of job names. Use `all` to unlock all job repositories:
```sh
restic-scheduler -unlock job1,job2
```
#### Run Jobs Once and Exit
To run specified backup and restore jobs once and exit, use the `-once` flag:
```sh
restic-scheduler -backup job1 -restore job2 -once
```
#### Health Check and metrics API
To bind the health check and Prometheus metrics API to a specific address, use the `-addr` flag:
```sh
restic-scheduler -addr 0.0.0.0:8080
```
#### Metrics Push Gateway
To specify the URL of a Prometheus push gateway service for batch runs, use the `-push-gateway` flag:
```sh
restic-scheduler -push-gateway http://example.com
```
## HCL Configuration
The configuration for `restic-scheduler` is defined using HCL. Below is a description and example of how to define a backup job in the configuration file.
### Job Configuration
A job in the configuration file is defined using the `job` block. Each job must have a unique name, a schedule, and a configuration for restic. Additionally, tasks can be defined to perform specific actions before and after the backup.
#### Fields
- `name`: The name of the job.
- `schedule`: The cron schedule for the job.
- `config`: The restic configuration block.
- `repo`: The restic repository.
- `passphrase`: (Optional) The passphrase for the repository.
- `env`: (Optional) Environment variables for restic.
- `options`: (Optional) Global options for restic. See the `restic` command for details.
- `task`: (Optional) A list of tasks to run before and after the backup.
- `mysql`, `postgres`, `sqlite`: (Optional) Database-specific tasks.
- `backup`: The backup configuration block.
- `forget`: (Optional) Options for forgetting old snapshots.
### Example
Below is an example of a job configuration in HCL:
```hcl
// Example job file
job "MyApp" {
schedule = "* * * * *"
config {
repo = "s3://..."
passphrase = "foo"
# Some alternate ways to pass the passphrase to restic
# passphrase = env("RESTIC_PASSWORD")
# passphrase = readfile("/path/to/passphrase")
env = {
"foo" = "bar",
}
options {
VerboseLevel = 3
# Another alternate way to pass the passphrase to restic
# PasswordFile = "/path/to/passphrase"
}
}
mysql "DumpMainDB" {
hostname = "foo"
username = "bar"
dump_to = "/data/main.sql"
}
sqlite "DumpSqlite" {
path = "/db/sqlite.db"
dump_to = "/data/sqlite.db.bak"
}
task "Create biz file" {
pre_script {
on_backup = <<EOF
echo bar >> /biz.txt
EOF
}
post_script {
on_backup = <<EOF
rm /biz.txt
EOF
}
}
task "Run restore shell script" {
pre_script {
on_restore = "/foo/bar.sh"
}
}
backup {
files =[
"/data",
"/biz.txt",
]
backup_opts {
Tags = ["service"]
}
restore_opts {
Verify = true
# Since paths are absolute, restore to root
Target = "/"
}
}
forget {
KeepLast = 3
KeepWeekly = 2
KeepMonthly = 2
KeepYearly = 2
Prune = true
}
}
```
```sh
restic-scheduler jobs.hcl
```
This will read the job definitions from `jobs.hcl` and execute the specified jobs.
For more examples, check out `./config.hcl` or some of the example integration test configs in `./test/`.
## Contributing
Contributions are welcome! Please open an issue or submit a pull request on the [GitHub repository](https://git.iamthefij.com/iamthefij/restic-scheduler).
## License
This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details.

2
go.mod
View File

@ -1,6 +1,6 @@
module git.iamthefij.com/iamthefij/restic-scheduler module git.iamthefij.com/iamthefij/restic-scheduler
go 1.20 go 1.17
require ( require (
github.com/go-test/deep v1.0.8 github.com/go-test/deep v1.0.8

View File

@ -1,46 +0,0 @@
#! /bin/sh
set -ex
# Create flat file
echo "Hello" > /data/test.txt
# Create Sqlite database
touch /data/test_database.db
sqlite3 /data/test_database.db <<-EOF
CREATE TABLE test_table (
id INTEGER PRIMARY KEY,
data TEXT NOT NULL
);
INSERT INTO test_table(data)
VALUES ("Test row");
EOF
# Create MySql database
until mysql --host "$MYSQL_HOST" --user "$MYSQL_USER" --password="$MYSQL_PWD" --execute "SHOW DATABASES;"; do
sleep 1
done
mysql --host "$MYSQL_HOST" --user "$MYSQL_USER" --password="$MYSQL_PWD" main <<EOF
CREATE TABLE test_table (
id INTEGER AUTO_INCREMENT PRIMARY KEY,
data TEXT NOT NULL
);
INSERT INTO test_table(data)
VALUES ("Test row");
EOF
# Create Postgres database
export PGPASSWORD="$PGSQL_PASS"
until psql --host "$PGSQL_HOST" --username "$PGSQL_USER" --command "SELECT datname FROM pg_database;"; do
sleep 1
done
psql -v ON_ERROR_STOP=1 --host "$PGSQL_HOST" --username "$PGSQL_USER" main <<EOF
CREATE TABLE test_table (
id SERIAL PRIMARY KEY,
data TEXT NOT NULL
);
INSERT INTO test_table(data)
VALUES ('Test row');
EOF

View File

@ -1,57 +0,0 @@
---
version: "3.9"
services:
mysql:
image: mysql
environment:
MYSQL_ROOT_PASSWORD: shhh
MYSQL_DATABASE: main
postgres:
image: postgres
environment:
POSTGRES_PASSWORD: shhh
POSTGRES_DB: main
bootstrap:
image: restic-scheduler
entrypoint: /bootstrap-tests.sh
environment:
MYSQL_HOST: mysql
MYSQL_USER: root
MYSQL_PWD: shhh
PGSQL_HOST: postgres
PGSQL_USER: postgres
PGSQL_PASS: shhh
volumes:
- ./bootstrap-tests.sh:/bootstrap-tests.sh
- ./data:/data
main:
image: restic-scheduler
environment:
MYSQL_HOST: mysql
MYSQL_USER: root
MYSQL_PWD: shhh
PGSQL_HOST: postgres
PGSQL_USER: postgres
PGSQL_PASS: shhh
volumes:
- ./repo:/repo
- ./data:/data
- ./test-backup.hcl:/test-backup.hcl
validate:
image: restic-scheduler
entrypoint: /validate-tests.sh
environment:
MYSQL_HOST: mysql
MYSQL_USER: root
MYSQL_PWD: shhh
PGSQL_HOST: postgres
PGSQL_USER: postgres
PGSQL_PASS: shhh
volumes:
- ./validate-tests.sh:/validate-tests.sh
- ./data:/data

View File

@ -1,35 +0,0 @@
#! /bin/bash
set -ex
cd "$(dirname "$0")"
mkdir -p ./repo ./data
echo Clean everything
docker-compose down -v
rm -fr ./repo/* ./data/*
sleep 5
echo Boostrap databases and data
docker-compose up -d mysql postgres
docker-compose run bootstrap
sleep 1
echo Run backup job
docker-compose run main -backup IntegrationTest -once /test-backup.hcl
echo Clean data
docker-compose down -v
docker-compose up -d mysql postgres
rm -fr ./data/*
sleep 15
echo Run restore
docker-compose run main -restore IntegrationTest -once /test-backup.hcl
sleep 1
echo Validate data
docker-compose run validate
echo Clean all again
docker-compose down -v
rm -fr ./repo/* ./data/*

View File

@ -1,38 +0,0 @@
job "IntegrationTest" {
schedule = "@daily"
config {
repo = "/repo"
passphrase = "shh"
}
mysql "MySQL" {
hostname = env("MYSQL_HOST")
database = "main"
username = env("MYSQL_USER")
password = env("MYSQL_PWD")
dump_to = "/tmp/mysql.sql"
}
postgres "Postgres" {
hostname = env("PGSQL_HOST")
database = "main"
username = env("PGSQL_USER")
password = env("PGSQL_PASS")
create = true
dump_to = "/tmp/psql.sql"
}
sqlite "SQLite" {
path = "/data/test_database.db"
dump_to = "/data/test_database.db.bak"
}
backup {
paths = ["/data"]
restore_opts {
Target = "/"
}
}
}

View File

@ -1,21 +0,0 @@
#! /bin/sh
set -ex
# Check flat file
test -f /data/test.txt
grep "^Hello" /data/test.txt
# Check Sqlite database
test -f /data/test_database.db
sqlite3 /data/test_database.db "select data from test_table where id = 1" | grep "^Test row"
# Check MySql database
mysql --host "$MYSQL_HOST" --user "$MYSQL_USER" --password="$MYSQL_PWD" main <<EOF | grep "^Test row"
select data from test_table where id = 1;
EOF
# Check Postgres database
export PGPASSWORD="$PGSQL_PASS"
psql --host "$PGSQL_HOST" --user "$PGSQL_USER" main <<EOF | grep "Test row"
select data from test_table where id = 1;
EOF

69
job.go
View File

@ -52,16 +52,13 @@ func (r ResticConfig) Validate() error {
type Job struct { type Job struct {
Name string `hcl:"name,label"` Name string `hcl:"name,label"`
Schedule string `hcl:"schedule"` Schedule string `hcl:"schedule"`
Config *ResticConfig `hcl:"config,block"` Config ResticConfig `hcl:"config,block"`
Tasks []JobTask `hcl:"task,block"` Tasks []JobTask `hcl:"task,block"`
Backup BackupFilesTask `hcl:"backup,block"` Backup BackupFilesTask `hcl:"backup,block"`
Forget *ForgetOpts `hcl:"forget,block"` Forget *ForgetOpts `hcl:"forget,block"`
// Meta Tasks // Meta Tasks
// NOTE: Now that these are also available within a task
// these could be removed to make task order more obvious
MySQL []JobTaskMySQL `hcl:"mysql,block"` MySQL []JobTaskMySQL `hcl:"mysql,block"`
Postgres []JobTaskPostgres `hcl:"postgres,block"`
Sqlite []JobTaskSqlite `hcl:"sqlite,block"` Sqlite []JobTaskSqlite `hcl:"sqlite,block"`
// Metrics and health // Metrics and health
@ -76,24 +73,6 @@ func (j Job) validateTasks() error {
} }
} }
for _, mysql := range j.MySQL {
if err := mysql.Validate(); err != nil {
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
}
}
for _, pg := range j.Postgres {
if err := pg.Validate(); err != nil {
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
}
}
for _, sqlite := range j.Sqlite {
if err := sqlite.Validate(); err != nil {
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
}
}
return nil return nil
} }
@ -103,11 +82,7 @@ func (j Job) Validate() error {
} }
if _, err := cron.ParseStandard(j.Schedule); err != nil { if _, err := cron.ParseStandard(j.Schedule); err != nil {
return fmt.Errorf("job %s has an invalid schedule: %w: %w", j.Name, err, ErrInvalidConfigValue) return fmt.Errorf("job %s has an invalid schedule: %v: %w", j.Name, err, ErrInvalidConfigValue)
}
if j.Config == nil {
return fmt.Errorf("job %s is missing restic config: %w", j.Name, ErrMissingField)
} }
if err := j.Config.Validate(); err != nil { if err := j.Config.Validate(); err != nil {
@ -118,6 +93,18 @@ func (j Job) Validate() error {
return err return err
} }
for _, mysql := range j.MySQL {
if err := mysql.Validate(); err != nil {
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
}
}
for _, sqlite := range j.Sqlite {
if err := sqlite.Validate(); err != nil {
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
}
}
if err := j.Backup.Validate(); err != nil { if err := j.Backup.Validate(); err != nil {
return fmt.Errorf("job %s has an invalid backup config: %w", j.Name, err) return fmt.Errorf("job %s has an invalid backup config: %w", j.Name, err)
} }
@ -133,10 +120,6 @@ func (j Job) AllTasks() []ExecutableTask {
allTasks = append(allTasks, mysql.GetPreTask()) allTasks = append(allTasks, mysql.GetPreTask())
} }
for _, pg := range j.Postgres {
allTasks = append(allTasks, pg.GetPreTask())
}
for _, sqlite := range j.Sqlite { for _, sqlite := range j.Sqlite {
allTasks = append(allTasks, sqlite.GetPreTask()) allTasks = append(allTasks, sqlite.GetPreTask())
} }
@ -157,10 +140,6 @@ func (j Job) AllTasks() []ExecutableTask {
allTasks = append(allTasks, mysql.GetPostTask()) allTasks = append(allTasks, mysql.GetPostTask())
} }
for _, pg := range j.Postgres {
allTasks = append(allTasks, pg.GetPostTask())
}
for _, sqlite := range j.Sqlite { for _, sqlite := range j.Sqlite {
allTasks = append(allTasks, sqlite.GetPostTask()) allTasks = append(allTasks, sqlite.GetPostTask())
} }
@ -175,10 +154,6 @@ func (j Job) BackupPaths() []string {
paths = append(paths, t.DumpToPath) paths = append(paths, t.DumpToPath)
} }
for _, t := range j.Postgres {
paths = append(paths, t.DumpToPath)
}
for _, t := range j.Sqlite { for _, t := range j.Sqlite {
paths = append(paths, t.DumpToPath) paths = append(paths, t.DumpToPath)
} }
@ -222,7 +197,7 @@ func (j Job) Logger() *log.Logger {
return GetLogger(j.Name) return GetLogger(j.Name)
} }
func (j Job) RunRestore(snapshot string) error { func (j Job) RunRestore() error {
logger := j.Logger() logger := j.Logger()
restic := j.NewRestic() restic := j.NewRestic()
@ -238,10 +213,6 @@ func (j Job) RunRestore(snapshot string) error {
Env: nil, Env: nil,
} }
if backupTask, ok := exTask.(BackupFilesTask); ok {
backupTask.snapshot = snapshot
}
if err := exTask.RunRestore(taskCfg); err != nil { if err := exTask.RunRestore(taskCfg); err != nil {
return fmt.Errorf("failed running job %s: %w", j.Name, err) return fmt.Errorf("failed running job %s: %w", j.Name, err)
} }
@ -280,11 +251,9 @@ func (j Job) Run() {
result.LastError = err result.LastError = err
} else { } else {
Metrics.SnapshotCurrentCount.WithLabelValues(j.Name).Set(float64(len(snapshots))) Metrics.SnapshotCurrentCount.WithLabelValues(j.Name).Set(float64(len(snapshots)))
if len(snapshots) > 0 {
latestSnapshot := snapshots[len(snapshots)-1] latestSnapshot := snapshots[len(snapshots)-1]
Metrics.SnapshotLatestTime.WithLabelValues(j.Name).Set(float64(latestSnapshot.Time.Unix())) Metrics.SnapshotLatestTime.WithLabelValues(j.Name).Set(float64(latestSnapshot.Time.Unix()))
} }
}
if result.Success { if result.Success {
Metrics.JobFailureCount.WithLabelValues(j.Name).Set(0.0) Metrics.JobFailureCount.WithLabelValues(j.Name).Set(0.0)
@ -307,7 +276,7 @@ func (j Job) NewRestic() *Restic {
} }
type Config struct { type Config struct {
DefaultConfig *ResticConfig `hcl:"default_config,block"` // GlobalConfig *ResticConfig `hcl:"global_config,block"`
Jobs []Job `hcl:"job,block"` Jobs []Job `hcl:"job,block"`
} }
@ -317,12 +286,6 @@ func (c Config) Validate() error {
} }
for _, job := range c.Jobs { for _, job := range c.Jobs {
// Use default restic config if no job config is provided
// TODO: Maybe merge values here
if job.Config == nil {
job.Config = c.DefaultConfig
}
if err := job.Validate(); err != nil { if err := job.Validate(); err != nil {
return err return err
} }

View File

@ -7,8 +7,8 @@ import (
main "git.iamthefij.com/iamthefij/restic-scheduler" main "git.iamthefij.com/iamthefij/restic-scheduler"
) )
func ValidResticConfig() *main.ResticConfig { func ValidResticConfig() main.ResticConfig {
return &main.ResticConfig{ return main.ResticConfig{
Passphrase: "shh", Passphrase: "shh",
Repo: "./data", Repo: "./data",
Env: nil, Env: nil,
@ -92,7 +92,6 @@ func TestJobValidation(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}, },
expectedErr: nil, expectedErr: nil,
@ -107,7 +106,6 @@ func TestJobValidation(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}, },
expectedErr: main.ErrMissingField, expectedErr: main.ErrMissingField,
@ -122,7 +120,6 @@ func TestJobValidation(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}, },
expectedErr: main.ErrInvalidConfigValue, expectedErr: main.ErrInvalidConfigValue,
@ -132,12 +129,11 @@ func TestJobValidation(t *testing.T) {
job: main.Job{ job: main.Job{
Name: "Test job", Name: "Test job",
Schedule: "@daily", Schedule: "@daily",
Config: &main.ResticConfig{}, //nolint:exhaustruct Config: main.ResticConfig{}, //nolint:exhaustruct
Tasks: []main.JobTask{}, Tasks: []main.JobTask{},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}, },
expectedErr: main.ErrMutuallyExclusive, expectedErr: main.ErrMutuallyExclusive,
@ -148,13 +144,10 @@ func TestJobValidation(t *testing.T) {
Name: "Test job", Name: "Test job",
Schedule: "@daily", Schedule: "@daily",
Config: ValidResticConfig(), Config: ValidResticConfig(),
Tasks: []main.JobTask{ Tasks: []main.JobTask{{}},
{}, //nolint:exhaustruct
},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}, },
expectedErr: main.ErrMissingField, expectedErr: main.ErrMissingField,
@ -168,10 +161,7 @@ func TestJobValidation(t *testing.T) {
Tasks: []main.JobTask{}, Tasks: []main.JobTask{},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{ MySQL: []main.JobTaskMySQL{{}},
{}, //nolint:exhaustruct
},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}, },
expectedErr: main.ErrMissingField, expectedErr: main.ErrMissingField,
@ -186,10 +176,7 @@ func TestJobValidation(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{}, Sqlite: []main.JobTaskSqlite{{}},
Sqlite: []main.JobTaskSqlite{
{}, //nolint:exhaustruct
},
}, },
expectedErr: main.ErrMissingField, expectedErr: main.ErrMissingField,
}, },
@ -220,53 +207,25 @@ func TestConfigValidation(t *testing.T) {
}{ }{
{ {
name: "Valid job", name: "Valid job",
config: main.Config{ config: main.Config{Jobs: []main.Job{{
DefaultConfig: nil,
Jobs: []main.Job{{
Name: "Valid job", Name: "Valid job",
Schedule: "@daily", Schedule: "@daily",
Config: ValidResticConfig(), Config: ValidResticConfig(),
Tasks: []main.JobTask{}, Tasks: []main.JobTask{},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}}, }}},
},
expectedErr: nil,
},
{
name: "Valid job with default config",
config: main.Config{
DefaultConfig: ValidResticConfig(),
Jobs: []main.Job{{
Name: "Valid job",
Schedule: "@daily",
Config: nil,
Tasks: []main.JobTask{},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{},
}},
},
expectedErr: nil, expectedErr: nil,
}, },
{ {
name: "No jobs", name: "No jobs",
config: main.Config{ config: main.Config{Jobs: []main.Job{}},
DefaultConfig: nil,
Jobs: []main.Job{},
},
expectedErr: main.ErrNoJobsFound, expectedErr: main.ErrNoJobsFound,
}, },
{ {
name: "Invalid name", name: "Invalid name",
config: main.Config{ config: main.Config{Jobs: []main.Job{{
DefaultConfig: nil,
Jobs: []main.Job{{
Name: "", Name: "",
Schedule: "@daily", Schedule: "@daily",
Config: ValidResticConfig(), Config: ValidResticConfig(),
@ -274,28 +233,8 @@ func TestConfigValidation(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
}}, }}},
},
expectedErr: main.ErrMissingField,
},
{
name: "Missing config",
config: main.Config{
DefaultConfig: nil,
Jobs: []main.Job{{
Name: "",
Schedule: "@daily",
Config: nil,
Tasks: []main.JobTask{},
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil,
MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{},
}},
},
expectedErr: main.ErrMissingField, expectedErr: main.ErrMissingField,
}, },
} }

58
main.go
View File

@ -31,10 +31,6 @@ func ParseConfig(path string) ([]Job, error) {
Params: []function.Parameter{{ Params: []function.Parameter{{
Name: "var", Name: "var",
Type: cty.String, Type: cty.String,
AllowNull: false,
AllowUnknown: false,
AllowDynamicType: false,
AllowMarked: false,
}}, }},
VarParam: nil, VarParam: nil,
Type: function.StaticReturnType(cty.String), Type: function.StaticReturnType(cty.String),
@ -46,10 +42,6 @@ func ParseConfig(path string) ([]Job, error) {
Params: []function.Parameter{{ Params: []function.Parameter{{
Name: "path", Name: "path",
Type: cty.String, Type: cty.String,
AllowNull: false,
AllowUnknown: false,
AllowDynamicType: false,
AllowMarked: false,
}}, }},
VarParam: nil, VarParam: nil,
Type: function.StaticReturnType(cty.String), Type: function.StaticReturnType(cty.String),
@ -148,10 +140,6 @@ func FilterJobs(jobs []Job, names []string) ([]Job, error) {
} }
func runBackupJobs(jobs []Job, names string) error { func runBackupJobs(jobs []Job, names string) error {
if names == "" {
return nil
}
namesSlice := strings.Split(names, ",") namesSlice := strings.Split(names, ",")
if len(namesSlice) == 0 { if len(namesSlice) == 0 {
@ -168,11 +156,7 @@ func runBackupJobs(jobs []Job, names string) error {
return filterJobErr return filterJobErr
} }
func runRestoreJobs(jobs []Job, names string, snapshot string) error { func runRestoreJobs(jobs []Job, names string) error {
if names == "" {
return nil
}
namesSlice := strings.Split(names, ",") namesSlice := strings.Split(names, ",")
if len(namesSlice) == 0 { if len(namesSlice) == 0 {
@ -181,28 +165,7 @@ func runRestoreJobs(jobs []Job, names string, snapshot string) error {
jobs, filterJobErr := FilterJobs(jobs, namesSlice) jobs, filterJobErr := FilterJobs(jobs, namesSlice)
for _, job := range jobs { for _, job := range jobs {
if err := job.RunRestore(snapshot); err != nil { if err := job.RunRestore(); err != nil {
return err
}
}
return filterJobErr
}
func runUnlockJobs(jobs []Job, names string) error {
if names == "" {
return nil
}
namesSlice := strings.Split(names, ",")
if len(namesSlice) == 0 {
return nil
}
jobs, filterJobErr := FilterJobs(jobs, namesSlice)
for _, job := range jobs {
if err := job.NewRestic().Unlock(UnlockOpts{RemoveAll: true}); err != nil {
return err return err
} }
} }
@ -214,8 +177,6 @@ type Flags struct {
showVersion bool showVersion bool
backup string backup string
restore string restore string
unlock string
restoreSnapshot string
once bool once bool
healthCheckAddr string healthCheckAddr string
metricsPushGateway string metricsPushGateway string
@ -226,30 +187,23 @@ func readFlags() Flags {
flag.BoolVar(&flags.showVersion, "version", false, "Display the version and exit") flag.BoolVar(&flags.showVersion, "version", false, "Display the version and exit")
flag.StringVar(&flags.backup, "backup", "", "Run backup jobs now. Names are comma separated. `all` will run all.") flag.StringVar(&flags.backup, "backup", "", "Run backup jobs now. Names are comma separated. `all` will run all.")
flag.StringVar(&flags.restore, "restore", "", "Run restore jobs now. Names are comma separated. `all` will run all.") flag.StringVar(&flags.restore, "restore", "", "Run restore jobs now. Names are comma separated. `all` will run all.")
flag.StringVar(&flags.unlock, "unlock", "", "Unlock job repos now. Names are comma separated. `all` will run all.")
flag.BoolVar(&flags.once, "once", false, "Run jobs specified using -backup and -restore once and exit") flag.BoolVar(&flags.once, "once", false, "Run jobs specified using -backup and -restore once and exit")
flag.StringVar(&flags.healthCheckAddr, "addr", "0.0.0.0:8080", "address to bind health check API") flag.StringVar(&flags.healthCheckAddr, "addr", "0.0.0.0:8080", "address to bind health check API")
flag.StringVar(&flags.metricsPushGateway, "push-gateway", "", "url of push gateway service for batch runs (optional)") flag.StringVar(&flags.metricsPushGateway, "push-gateway", "", "url of push gateway service for batch runs (optional)")
flag.StringVar(&JobBaseDir, "base-dir", JobBaseDir, "Base dir to create intermediate job files like SQL dumps.") flag.StringVar(&JobBaseDir, "base-dir", JobBaseDir, "Base dir to create intermediate job files like SQL dumps.")
flag.StringVar(&flags.restoreSnapshot, "snapshot", "latest", "the snapshot to restore")
flag.Parse() flag.Parse()
return flags return flags
} }
func runSpecifiedJobs(jobs []Job, backupJobs, restoreJobs, unlockJobs, snapshot string) error { func runSpecifiedJobs(jobs []Job, backupJobs, restoreJobs string) error {
// Run specified job unlocks
if err := runUnlockJobs(jobs, unlockJobs); err != nil {
return fmt.Errorf("Failed running unlock for jobs: %w", err)
}
// Run specified backup jobs // Run specified backup jobs
if err := runBackupJobs(jobs, backupJobs); err != nil { if err := runBackupJobs(jobs, backupJobs); err != nil {
return fmt.Errorf("Failed running backup jobs: %w", err) return fmt.Errorf("Failed running backup jobs: %w", err)
} }
// Run specified restore jobs // Run specified restore jobs
if err := runRestoreJobs(jobs, restoreJobs, snapshot); err != nil { if err := runRestoreJobs(jobs, restoreJobs); err != nil {
return fmt.Errorf("Failed running restore jobs: %w", err) return fmt.Errorf("Failed running restore jobs: %w", err)
} }
@ -258,8 +212,6 @@ func runSpecifiedJobs(jobs []Job, backupJobs, restoreJobs, unlockJobs, snapshot
func maybePushMetrics(metricsPushGateway string) error { func maybePushMetrics(metricsPushGateway string) error {
if metricsPushGateway != "" { if metricsPushGateway != "" {
fmt.Println("Pushing metrics to push gateway")
if err := Metrics.PushToGateway(metricsPushGateway); err != nil { if err := Metrics.PushToGateway(metricsPushGateway); err != nil {
return fmt.Errorf("Failed pushing metrics after jobs run: %w", err) return fmt.Errorf("Failed pushing metrics after jobs run: %w", err)
} }
@ -291,7 +243,7 @@ func main() {
log.Fatalf("Failed to read jobs from files: %v", err) log.Fatalf("Failed to read jobs from files: %v", err)
} }
if err := runSpecifiedJobs(jobs, flags.backup, flags.restore, flags.unlock, flags.restoreSnapshot); err != nil { if err := runSpecifiedJobs(jobs, flags.backup, flags.restore); err != nil {
log.Fatal(err) log.Fatal(err)
} }

View File

@ -18,9 +18,9 @@ func TestMain(m *testing.M) {
if testResult == 0 && testing.CoverMode() != "" { if testResult == 0 && testing.CoverMode() != "" {
c := testing.Coverage() c := testing.Coverage()
if c < MinCoverage { if c < MinCoverage {
fmt.Printf("WARNING: Tests passed but coverage failed at %0.2f and minimum to pass is %0.2f\n", c, MinCoverage) fmt.Printf("Tests passed but coverage failed at %0.2f and minimum to pass is %0.2f\n", c, MinCoverage)
testResult = 0 testResult = -1
} }
} }
@ -31,6 +31,7 @@ func TestReadJobs(t *testing.T) {
t.Parallel() t.Parallel()
jobs, err := main.ReadJobs([]string{"./test/sample.hcl"}) jobs, err := main.ReadJobs([]string{"./test/sample.hcl"})
if err != nil { if err != nil {
t.Errorf("Unexpected error reading jobs: %v", err) t.Errorf("Unexpected error reading jobs: %v", err)
} }
@ -51,7 +52,6 @@ func TestRunJobs(t *testing.T) {
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
Forget: nil, Forget: nil,
MySQL: []main.JobTaskMySQL{}, MySQL: []main.JobTaskMySQL{},
Postgres: []main.JobTaskPostgres{},
Sqlite: []main.JobTaskSqlite{}, Sqlite: []main.JobTaskSqlite{},
} }

View File

@ -19,6 +19,7 @@ func (m ResticMetrics) PushToGateway(url string) error {
err := push.New(url, "batch"). err := push.New(url, "batch").
Gatherer(m.Registry). Gatherer(m.Registry).
Add() Add()
if err != nil { if err != nil {
return fmt.Errorf("error pushing to registry %s: %w", url, err) return fmt.Errorf("error pushing to registry %s: %w", url, err)
} }

View File

@ -11,10 +11,18 @@ import (
"time" "time"
) )
var ( var ErrRestic = errors.New("restic error")
ErrRestic = errors.New("restic error") var ErrRepoNotFound = errors.New("repository not found or uninitialized")
ErrRepoNotFound = errors.Join(errors.New("repository not found or uninitialized"), ErrRestic)
) func lineIn(needle string, haystack []string) bool {
for _, line := range haystack {
if line == needle {
return true
}
}
return false
}
func maybeAddArgString(args []string, name, value string) []string { func maybeAddArgString(args []string, name, value string) []string {
if value != "" { if value != "" {
@ -48,41 +56,22 @@ func maybeAddArgsList(args []string, name string, value []string) []string {
return args return args
} }
// CommandOptions interface dictates a ToArgs() method should return each commandline arg as a string slice.
type CommandOptions interface { type CommandOptions interface {
// ToArgs returns the structs arguments as a slice of strings.
ToArgs() []string ToArgs() []string
} }
// GenericOpts allows passing an arbitrary string slice as a set of command line options compatible with CommandOptions.
type GenericOpts []string type GenericOpts []string
// ToArgs returns the structs arguments as a slice of strings.
func (o GenericOpts) ToArgs() []string { func (o GenericOpts) ToArgs() []string {
return o return o
} }
// NoOpts is a struct that fulfils the CommandOptions interface but provides no arguments.
type NoOpts struct{} type NoOpts struct{}
// ToArgs returns the structs arguments as a slice of strings.
func (NoOpts) ToArgs() []string { func (NoOpts) ToArgs() []string {
return []string{} return []string{}
} }
// UnlockOpts holds optional arguments for unlock command.
type UnlockOpts struct {
RemoveAll bool `hcl:"RemoveAll,optional"`
}
// ToArgs returns the structs arguments as a slice of strings.
func (uo UnlockOpts) ToArgs() (args []string) {
args = maybeAddArgBool(args, "--remove-all", uo.RemoveAll)
return
}
// BackupOpts holds optional arguments for the Restic backup command.
type BackupOpts struct { type BackupOpts struct {
Exclude []string `hcl:"Exclude,optional"` Exclude []string `hcl:"Exclude,optional"`
Include []string `hcl:"Include,optional"` Include []string `hcl:"Include,optional"`
@ -90,7 +79,6 @@ type BackupOpts struct {
Host string `hcl:"Host,optional"` Host string `hcl:"Host,optional"`
} }
// ToArgs returns the structs arguments as a slice of strings.
func (bo BackupOpts) ToArgs() (args []string) { func (bo BackupOpts) ToArgs() (args []string) {
args = maybeAddArgsList(args, "--exclude", bo.Exclude) args = maybeAddArgsList(args, "--exclude", bo.Exclude)
args = maybeAddArgsList(args, "--include", bo.Include) args = maybeAddArgsList(args, "--include", bo.Include)
@ -110,7 +98,6 @@ type RestoreOpts struct {
Verify bool `hcl:"Verify,optional"` Verify bool `hcl:"Verify,optional"`
} }
// ToArgs returns the structs arguments as a slice of strings.
func (ro RestoreOpts) ToArgs() (args []string) { func (ro RestoreOpts) ToArgs() (args []string) {
args = maybeAddArgsList(args, "--exclude", ro.Exclude) args = maybeAddArgsList(args, "--exclude", ro.Exclude)
args = maybeAddArgsList(args, "--include", ro.Include) args = maybeAddArgsList(args, "--include", ro.Include)
@ -150,7 +137,6 @@ type ForgetOpts struct {
Prune bool `hcl:"Prune,optional"` Prune bool `hcl:"Prune,optional"`
} }
// ToArgs returns the structs arguments as a slice of strings.
func (fo ForgetOpts) ToArgs() (args []string) { func (fo ForgetOpts) ToArgs() (args []string) {
args = maybeAddArgInt(args, "--keep-last", fo.KeepLast) args = maybeAddArgInt(args, "--keep-last", fo.KeepLast)
args = maybeAddArgInt(args, "--keep-hourly", fo.KeepHourly) args = maybeAddArgInt(args, "--keep-hourly", fo.KeepHourly)
@ -207,15 +193,13 @@ type ResticGlobalOpts struct {
TLSClientCertFile string `hcl:"TlsClientCertFile,optional"` TLSClientCertFile string `hcl:"TlsClientCertFile,optional"`
LimitDownload int `hcl:"LimitDownload,optional"` LimitDownload int `hcl:"LimitDownload,optional"`
LimitUpload int `hcl:"LimitUpload,optional"` LimitUpload int `hcl:"LimitUpload,optional"`
VerboseLevel int `hcl:"VerboseLevel,optional"`
Options map[string]string `hcl:"Options,optional"` Options map[string]string `hcl:"Options,optional"`
VerboseLevel int `hcl:"VerboseLevel,optional"`
CleanupCache bool `hcl:"CleanupCache,optional"` CleanupCache bool `hcl:"CleanupCache,optional"`
InsecureTLS bool `hcl:"InsecureTls,optional"`
NoCache bool `hcl:"NoCache,optional"` NoCache bool `hcl:"NoCache,optional"`
NoLock bool `hcl:"NoLock,optional"` NoLock bool `hcl:"NoLock,optional"`
} }
// ToArgs returns the structs arguments as a slice of strings.
func (glo ResticGlobalOpts) ToArgs() (args []string) { func (glo ResticGlobalOpts) ToArgs() (args []string) {
args = maybeAddArgString(args, "--cacert", glo.CaCertFile) args = maybeAddArgString(args, "--cacert", glo.CaCertFile)
args = maybeAddArgString(args, "--cache-dir", glo.CacheDir) args = maybeAddArgString(args, "--cache-dir", glo.CacheDir)
@ -225,7 +209,6 @@ func (glo ResticGlobalOpts) ToArgs() (args []string) {
args = maybeAddArgInt(args, "--limit-upload", glo.LimitUpload) args = maybeAddArgInt(args, "--limit-upload", glo.LimitUpload)
args = maybeAddArgInt(args, "--verbose", glo.VerboseLevel) args = maybeAddArgInt(args, "--verbose", glo.VerboseLevel)
args = maybeAddArgBool(args, "--cleanup-cache", glo.CleanupCache) args = maybeAddArgBool(args, "--cleanup-cache", glo.CleanupCache)
args = maybeAddArgBool(args, "--insecure-tls", glo.InsecureTLS)
args = maybeAddArgBool(args, "--no-cache", glo.NoCache) args = maybeAddArgBool(args, "--no-cache", glo.NoCache)
args = maybeAddArgBool(args, "--no-lock", glo.NoLock) args = maybeAddArgBool(args, "--no-lock", glo.NoLock)
@ -290,11 +273,7 @@ func (e *ResticError) Unwrap() error {
return e.OriginalError return e.OriginalError
} }
func (rcmd Restic) RunRestic( func (rcmd Restic) RunRestic(command string, options CommandOptions, commandArgs ...string) ([]string, error) {
command string,
options CommandOptions,
commandArgs ...string,
) (*CapturedCommandLogWriter, error) {
args := []string{} args := []string{}
if rcmd.GlobalOpts != nil { if rcmd.GlobalOpts != nil {
args = rcmd.GlobalOpts.ToArgs() args = rcmd.GlobalOpts.ToArgs()
@ -306,22 +285,22 @@ func (rcmd Restic) RunRestic(
cmd := exec.Command("restic", args...) cmd := exec.Command("restic", args...)
output := NewCapturedCommandLogWriter(rcmd.Logger) output := NewCapturedLogWriter(rcmd.Logger)
cmd.Stdout = output.Stdout cmd.Stdout = output
cmd.Stderr = output.Stderr cmd.Stderr = output
cmd.Env = rcmd.BuildEnv() cmd.Env = rcmd.BuildEnv()
cmd.Dir = rcmd.Cwd cmd.Dir = rcmd.Cwd
if err := cmd.Run(); err != nil { if err := cmd.Run(); err != nil {
responseErr := ErrRestic responseErr := ErrRestic
if lineIn("Is there a repository at the following location?", output.Stderr.Lines) { if lineIn("Is there a repository at the following location?", output.Lines) {
responseErr = ErrRepoNotFound responseErr = ErrRepoNotFound
} }
return output, NewResticError(command, output.AllLines(), errors.Join(err, responseErr)) return output.Lines, NewResticError(command, output.Lines, responseErr)
} }
return output, nil return output.Lines, nil
} }
func (rcmd Restic) Backup(files []string, opts BackupOpts) error { func (rcmd Restic) Backup(files []string, opts BackupOpts) error {
@ -348,12 +327,6 @@ func (rcmd Restic) Check() error {
return err return err
} }
func (rcmd Restic) Unlock(unlockOpts UnlockOpts) error {
_, err := rcmd.RunRestic("unlock", unlockOpts)
return err
}
type Snapshot struct { type Snapshot struct {
UID int `json:"uid"` UID int `json:"uid"`
GID int `json:"gid"` GID int `json:"gid"`
@ -368,20 +341,15 @@ type Snapshot struct {
} }
func (rcmd Restic) ReadSnapshots() ([]Snapshot, error) { func (rcmd Restic) ReadSnapshots() ([]Snapshot, error) {
output, err := rcmd.RunRestic("snapshots", GenericOpts{"--json"}) lines, err := rcmd.RunRestic("snapshots", GenericOpts{"--json"})
if err != nil { if err != nil {
return nil, err return nil, err
} }
if len(output.Stdout.Lines) == 0 {
return nil, fmt.Errorf("no snapshot output to parse: %w", ErrRestic)
}
singleLineOutput := strings.Join(output.Stdout.Lines, "")
snapshots := new([]Snapshot) snapshots := new([]Snapshot)
if err = json.Unmarshal([]byte(singleLineOutput), snapshots); err != nil {
return nil, fmt.Errorf("failed parsing snapshot results from %s: %w", singleLineOutput, err) if err = json.Unmarshal([]byte(lines[0]), snapshots); err != nil {
return nil, fmt.Errorf("failed parsing snapshot results from %s: %w", lines[0], err)
} }
return *snapshots, nil return *snapshots, nil

View File

@ -32,7 +32,6 @@ func TestGlobalOptions(t *testing.T) {
LimitUpload: 1, LimitUpload: 1,
VerboseLevel: 1, VerboseLevel: 1,
CleanupCache: true, CleanupCache: true,
InsecureTLS: true,
NoCache: true, NoCache: true,
NoLock: true, NoLock: true,
Options: map[string]string{ Options: map[string]string{
@ -49,7 +48,6 @@ func TestGlobalOptions(t *testing.T) {
"--limit-upload", "1", "--limit-upload", "1",
"--verbose", "1", "--verbose", "1",
"--cleanup-cache", "--cleanup-cache",
"--insecure-tls",
"--no-cache", "--no-cache",
"--no-lock", "--no-lock",
"--option", "key='a long value'", "--option", "key='a long value'",
@ -152,20 +150,6 @@ func TestForgetOpts(t *testing.T) {
AssertEqual(t, "args didn't match", expected, args) AssertEqual(t, "args didn't match", expected, args)
} }
func TestUnlockOpts(t *testing.T) {
t.Parallel()
args := main.UnlockOpts{
RemoveAll: true,
}.ToArgs()
expected := []string{
"--remove-all",
}
AssertEqual(t, "args didn't match", expected, args)
}
func TestBuildEnv(t *testing.T) { func TestBuildEnv(t *testing.T) {
t.Parallel() t.Parallel()
@ -237,7 +221,7 @@ func TestResticInterface(t *testing.T) {
} }
// Write test file to the data dir // Write test file to the data dir
err := os.WriteFile(dataFile, []byte("testing"), 0o644) err := os.WriteFile(dataFile, []byte("testing"), 0644)
AssertEqualFail(t, "unexpected error writing to test file", nil, err) AssertEqualFail(t, "unexpected error writing to test file", nil, err)
// Make sure no existing repo is found // Make sure no existing repo is found
@ -297,7 +281,7 @@ func TestResticInterface(t *testing.T) {
AssertEqualFail(t, "unexpected error checking repo", nil, err) AssertEqualFail(t, "unexpected error checking repo", nil, err)
// Change the data file // Change the data file
err = os.WriteFile(dataFile, []byte("unexpected"), 0o644) err = os.WriteFile(dataFile, []byte("unexpected"), 0644)
AssertEqualFail(t, "unexpected error writing to test file", nil, err) AssertEqualFail(t, "unexpected error writing to test file", nil, err)
// Check that data wrote // Check that data wrote
@ -313,8 +297,4 @@ func TestResticInterface(t *testing.T) {
value, err = os.ReadFile(restoredDataFile) value, err = os.ReadFile(restoredDataFile)
AssertEqualFail(t, "unexpected error reading from test file", nil, err) AssertEqualFail(t, "unexpected error reading from test file", nil, err)
AssertEqualFail(t, "incorrect value in test file", "testing", string(value)) AssertEqualFail(t, "incorrect value in test file", "testing", string(value))
// Try to unlock the repo (repo shouldn't really be locked, but this should still run without error
err = restic.Unlock(main.UnlockOpts{}) //nolint:exhaustruct
AssertEqualFail(t, "unexpected error unlocking repo", nil, err)
} }

View File

@ -13,10 +13,8 @@ import (
"github.com/robfig/cron/v3" "github.com/robfig/cron/v3"
) )
var ( var jobResultsLock = sync.Mutex{}
jobResultsLock = sync.Mutex{} var jobResults = map[string]JobResult{}
jobResults = map[string]JobResult{}
)
type JobResult struct { type JobResult struct {
JobName string JobName string
@ -69,12 +67,9 @@ func healthHandleFunc(writer http.ResponseWriter, request *http.Request) {
func RunHTTPHandlers(addr string) error { func RunHTTPHandlers(addr string) error {
http.HandleFunc("/health", healthHandleFunc) http.HandleFunc("/health", healthHandleFunc)
http.Handle("/metrics", promhttp.HandlerFor( http.Handle("/metrics", promhttp.Handler())
Metrics.Registry,
promhttp.HandlerOpts{Registry: Metrics.Registry}, //nolint:exhaustruct
))
return fmt.Errorf("error on http server: %w", http.ListenAndServe(addr, nil)) //#nosec: g114 return fmt.Errorf("error on healthcheck: %w", http.ListenAndServe(addr, nil)) //#nosec: g114
} }
func ScheduleAndRunJobs(jobs []Job) error { func ScheduleAndRunJobs(jobs []Job) error {
@ -115,8 +110,6 @@ func ScheduleAndRunJobs(jobs []Job) error {
defer func() { defer func() {
ctx := scheduler.Stop() ctx := scheduler.Stop()
<-ctx.Done() <-ctx.Done()
fmt.Println("All jobs successfully stopped")
}() }()
return nil return nil

View File

@ -5,7 +5,6 @@ import (
"log" "log"
"os" "os"
"os/exec" "os/exec"
"sort"
"strings" "strings"
) )
@ -40,7 +39,6 @@ func NewCapturedLogWriter(logger *log.Logger) *CapturedLogWriter {
return &CapturedLogWriter{Lines: []string{}, logger: logger} return &CapturedLogWriter{Lines: []string{}, logger: logger}
} }
// Write writes the provided byte slice to the logger and stores each captured line.
func (w *CapturedLogWriter) Write(content []byte) (n int, err error) { func (w *CapturedLogWriter) Write(content []byte) (n int, err error) {
message := string(content) message := string(content)
for _, line := range strings.Split(message, "\n") { for _, line := range strings.Split(message, "\n") {
@ -51,33 +49,6 @@ func (w *CapturedLogWriter) Write(content []byte) (n int, err error) {
return len(content), nil return len(content), nil
} }
// LinesMergedWith returns a slice of lines from this logger merged with another.
func (w CapturedLogWriter) LinesMergedWith(other CapturedLogWriter) []string {
allLines := []string{}
allLines = append(allLines, w.Lines...)
allLines = append(allLines, other.Lines...)
sort.Strings(allLines)
return allLines
}
type CapturedCommandLogWriter struct {
Stdout *CapturedLogWriter
Stderr *CapturedLogWriter
}
func NewCapturedCommandLogWriter(logger *log.Logger) *CapturedCommandLogWriter {
return &CapturedCommandLogWriter{
Stdout: NewCapturedLogWriter(logger),
Stderr: NewCapturedLogWriter(logger),
}
}
func (cclw CapturedCommandLogWriter) AllLines() []string {
return cclw.Stdout.LinesMergedWith(*cclw.Stderr)
}
func RunShell(script string, cwd string, env map[string]string, logger *log.Logger) error { func RunShell(script string, cwd string, env map[string]string, logger *log.Logger) error {
cmd := exec.Command("sh", "-c", strings.TrimSpace(script)) //nolint:gosec cmd := exec.Command("sh", "-c", strings.TrimSpace(script)) //nolint:gosec

182
tasks.go
View File

@ -67,7 +67,7 @@ func (t *JobTaskScript) SetName(name string) {
t.name = name t.name = name
} }
// JobTaskMySQL is a MySQL backup task that performs required pre and post tasks. // JobTaskMySQL is a sqlite backup task that performs required pre and post tasks.
type JobTaskMySQL struct { type JobTaskMySQL struct {
Port int `hcl:"port,optional"` Port int `hcl:"port,optional"`
Name string `hcl:"name,label"` Name string `hcl:"name,label"`
@ -89,16 +89,11 @@ func (t JobTaskMySQL) Validate() error {
return fmt.Errorf("task %s is missing dump_to path: %w", t.Name, ErrMissingField) return fmt.Errorf("task %s is missing dump_to path: %w", t.Name, ErrMissingField)
} }
if stat, err := os.Stat(t.DumpToPath); err != nil { if s, err := os.Stat(t.DumpToPath); err != nil {
if !errors.Is(err, fs.ErrNotExist) { if !errors.Is(err, fs.ErrNotExist) {
return fmt.Errorf( return fmt.Errorf("task %s: invalid dump_to: could not stat path: %v: %w", t.Name, err, ErrInvalidConfigValue)
"task %s: invalid dump_to: could not stat path: %s: %w",
t.Name,
t.DumpToPath,
ErrInvalidConfigValue,
)
} }
} else if stat.Mode().IsDir() { } else if s.Mode().IsDir() {
return fmt.Errorf("task %s: dump_to cannot be a directory: %w", t.Name, ErrInvalidConfigValue) return fmt.Errorf("task %s: dump_to cannot be a directory: %w", t.Name, ErrInvalidConfigValue)
} }
@ -160,20 +155,12 @@ func (t JobTaskMySQL) GetPostTask() ExecutableTask {
command = append(command, "--host", t.Hostname) command = append(command, "--host", t.Hostname)
} }
if t.Port != 0 {
command = append(command, "--port", fmt.Sprintf("%d", t.Port))
}
if t.Username != "" { if t.Username != "" {
command = append(command, "--user", t.Username) command = append(command, "--user", t.Username)
} }
if t.Password != "" { if t.Password != "" {
command = append(command, fmt.Sprintf("--password=%s", t.Password)) command = append(command, "--password", t.Password)
}
if t.Database != "" {
command = append(command, t.Database)
} }
command = append(command, "<", t.DumpToPath) command = append(command, "<", t.DumpToPath)
@ -187,144 +174,6 @@ func (t JobTaskMySQL) GetPostTask() ExecutableTask {
} }
} }
// JobTaskPostgres is a postgres backup task that performs required pre and post tasks.
type JobTaskPostgres struct {
Port int `hcl:"port,optional"`
Name string `hcl:"name,label"`
Hostname string `hcl:"hostname,optional"`
Database string `hcl:"database,optional"`
Username string `hcl:"username,optional"`
Password string `hcl:"password,optional"`
Tables []string `hcl:"tables,optional"`
DumpToPath string `hcl:"dump_to"`
NoTablespaces bool `hcl:"no_tablespaces,optional"`
Clean bool `hcl:"clean,optional"`
Create bool `hcl:"create,optional"`
}
func (t JobTaskPostgres) Paths() []string {
return []string{t.DumpToPath}
}
func (t JobTaskPostgres) Validate() error {
if t.DumpToPath == "" {
return fmt.Errorf("task %s is missing dump_to path: %w", t.Name, ErrMissingField)
}
if stat, err := os.Stat(t.DumpToPath); err != nil {
if !errors.Is(err, fs.ErrNotExist) {
return fmt.Errorf(
"task %s: invalid dump_to: could not stat path: %s: %w",
t.Name,
t.DumpToPath,
ErrInvalidConfigValue,
)
}
} else if stat.Mode().IsDir() {
return fmt.Errorf("task %s: dump_to cannot be a directory: %w", t.Name, ErrInvalidConfigValue)
}
if len(t.Tables) > 0 && t.Database == "" {
return fmt.Errorf(
"task %s is invalid. Must specify a database to use tables: %w",
t.Name,
ErrMissingField,
)
}
return nil
}
//nolint:cyclop
func (t JobTaskPostgres) GetPreTask() ExecutableTask {
command := []string{"pg_dump"}
if t.Database == "" {
command = []string{"pg_dumpall"}
}
command = append(command, "--file", t.DumpToPath)
if t.Hostname != "" {
command = append(command, "--host", t.Hostname)
}
if t.Port != 0 {
command = append(command, "--port", fmt.Sprintf("%d", t.Port))
}
if t.Username != "" {
command = append(command, "--username", t.Username)
}
if t.NoTablespaces {
command = append(command, "--no-tablespaces")
}
if t.Clean {
command = append(command, "--clean")
}
if t.Create {
command = append(command, "--create")
}
for _, table := range t.Tables {
command = append(command, "--table", table)
}
if t.Database != "" {
command = append(command, t.Database)
}
env := map[string]string{}
if t.Password != "" {
env["PGPASSWORD"] = t.Password
}
return JobTaskScript{
name: t.Name,
env: env,
Cwd: ".",
OnBackup: strings.Join(command, " "),
OnRestore: "",
}
}
func (t JobTaskPostgres) GetPostTask() ExecutableTask {
command := []string{"psql"}
if t.Hostname != "" {
command = append(command, "--host", t.Hostname)
}
if t.Port != 0 {
command = append(command, "--port", fmt.Sprintf("%d", t.Port))
}
if t.Username != "" {
command = append(command, "--username", t.Username)
}
if t.Database != "" {
command = append(command, t.Database)
}
command = append(command, "<", t.DumpToPath)
env := map[string]string{}
if t.Password != "" {
env["PGPASSWORD"] = t.Password
}
return JobTaskScript{
name: t.Name,
env: env,
Cwd: ".",
OnBackup: "",
OnRestore: strings.Join(command, " "),
}
}
// JobTaskSqlite is a sqlite backup task that performs required pre and post tasks. // JobTaskSqlite is a sqlite backup task that performs required pre and post tasks.
type JobTaskSqlite struct { type JobTaskSqlite struct {
Name string `hcl:"name,label"` Name string `hcl:"name,label"`
@ -341,16 +190,11 @@ func (t JobTaskSqlite) Validate() error {
return fmt.Errorf("task %s is missing dump_to path: %w", t.Name, ErrMissingField) return fmt.Errorf("task %s is missing dump_to path: %w", t.Name, ErrMissingField)
} }
if stat, err := os.Stat(t.DumpToPath); err != nil { if s, err := os.Stat(t.DumpToPath); err != nil {
if !errors.Is(err, fs.ErrNotExist) { if !errors.Is(err, fs.ErrNotExist) {
return fmt.Errorf( return fmt.Errorf("task %s: invalid dump_to: could not stat path: %v: %w", t.Name, err, ErrInvalidConfigValue)
"task %s: invalid dump_to: could not stat path: %s: %w",
t.Name,
t.DumpToPath,
ErrInvalidConfigValue,
)
} }
} else if stat.Mode().IsDir() { } else if s.Mode().IsDir() {
return fmt.Errorf("task %s: dump_to cannot be a directory: %w", t.Name, ErrInvalidConfigValue) return fmt.Errorf("task %s: dump_to cannot be a directory: %w", t.Name, ErrInvalidConfigValue)
} }
@ -382,7 +226,6 @@ type BackupFilesTask struct {
BackupOpts *BackupOpts `hcl:"backup_opts,block"` BackupOpts *BackupOpts `hcl:"backup_opts,block"`
RestoreOpts *RestoreOpts `hcl:"restore_opts,block"` RestoreOpts *RestoreOpts `hcl:"restore_opts,block"`
name string name string
snapshot string
} }
func (t BackupFilesTask) RunBackup(cfg TaskConfig) error { func (t BackupFilesTask) RunBackup(cfg TaskConfig) error {
@ -405,11 +248,8 @@ func (t BackupFilesTask) RunRestore(cfg TaskConfig) error {
t.RestoreOpts = &RestoreOpts{} //nolint:exhaustruct t.RestoreOpts = &RestoreOpts{} //nolint:exhaustruct
} }
if t.snapshot == "" { // TODO: Make the snapshot configurable
t.snapshot = "latest" if err := cfg.Restic.Restore("latest", *t.RestoreOpts); err != nil {
}
if err := cfg.Restic.Restore(t.snapshot, *t.RestoreOpts); err != nil {
err = fmt.Errorf("failed restoring paths: %w", err) err = fmt.Errorf("failed restoring paths: %w", err)
cfg.Logger.Print(err) cfg.Logger.Print(err)
@ -441,12 +281,10 @@ type JobTask struct {
PreScripts []JobTaskScript `hcl:"pre_script,block"` PreScripts []JobTaskScript `hcl:"pre_script,block"`
PostScripts []JobTaskScript `hcl:"post_script,block"` PostScripts []JobTaskScript `hcl:"post_script,block"`
MySQL []JobTaskMySQL `hcl:"mysql,block"` MySQL []JobTaskMySQL `hcl:"mysql,block"`
Postgres []JobTaskPostgres `hcl:"postgres,block"`
Sqlite []JobTaskSqlite `hcl:"sqlite,block"` Sqlite []JobTaskSqlite `hcl:"sqlite,block"`
} }
func (t JobTask) Validate() error { func (t JobTask) Validate() error {
// NOTE: Might make task types mutually exclusive because order is confusing even if deterministic
if t.Name == "" { if t.Name == "" {
return fmt.Errorf("task is missing a name: %w", ErrMissingField) return fmt.Errorf("task is missing a name: %w", ErrMissingField)
} }

View File

@ -163,29 +163,7 @@ func TestJobTaskSql(t *testing.T) {
" --user user --password=pass --no-tablespaces db table1 table2", " --user user --password=pass --no-tablespaces db table1 table2",
postBackup: "", postBackup: "",
preRestore: "", preRestore: "",
postRestore: "mysql --host host --port 3306 --user user --password=pass db < ./simple.sql", postRestore: "mysql --host host --user user --password pass < ./simple.sql",
},
{
name: "psql all",
task: main.JobTaskPostgres{
Name: "simple",
Hostname: "host",
Port: 6543,
Username: "user",
Password: "pass",
Database: "db",
NoTablespaces: true,
Create: true,
Clean: true,
Tables: []string{"table1", "table2"},
DumpToPath: "./simple.sql",
},
validationErr: nil,
preBackup: "pg_dump --file ./simple.sql --host host --port 6543 --username user --no-tablespaces" +
" --clean --create --table table1 --table table2 db",
postBackup: "",
preRestore: "",
postRestore: "psql --host host --port 6543 --username user db < ./simple.sql",
}, },
// Sqlite // Sqlite
{ {

View File

@ -2,16 +2,6 @@ package main
import "fmt" import "fmt"
func lineIn(needle string, haystack []string) bool {
for _, line := range haystack {
if line == needle {
return true
}
}
return false
}
func MergeEnvMap(parent, child map[string]string) map[string]string { func MergeEnvMap(parent, child map[string]string) map[string]string {
result := map[string]string{} result := map[string]string{}