Compare commits
2 Commits
main
...
push-gatew
Author | SHA1 | Date | |
---|---|---|---|
ecf375511c | |||
e2c71d8e25 |
@ -4,7 +4,7 @@ name: test
|
||||
|
||||
steps:
|
||||
- name: test
|
||||
image: golang:1.21
|
||||
image: golang:1.17
|
||||
environment:
|
||||
VERSION: ${DRONE_TAG:-${DRONE_COMMIT}}
|
||||
commands:
|
||||
@ -13,7 +13,7 @@ steps:
|
||||
- make test
|
||||
|
||||
- name: check
|
||||
image: iamthefij/drone-pre-commit@sha256:30fa17489b86d7a4c3ad9c3ce2e152c25d82b8671e5609d322c6cae0baed89cd
|
||||
image: iamthefij/drone-pre-commit:personal
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
@ -32,7 +32,7 @@ trigger:
|
||||
|
||||
steps:
|
||||
- name: build all binaries
|
||||
image: golang:1.21
|
||||
image: golang:1.17
|
||||
environment:
|
||||
VERSION: ${DRONE_TAG:-${DRONE_COMMIT}}
|
||||
commands:
|
||||
@ -65,7 +65,7 @@ steps:
|
||||
- name: push images
|
||||
image: thegeeklab/drone-docker-buildx
|
||||
settings:
|
||||
repo: iamthefij/restic-scheduler
|
||||
repo: iamthefij/resticscheduler
|
||||
auto_tag: true
|
||||
platforms:
|
||||
- linux/amd64
|
||||
|
5
.gitignore
vendored
5
.gitignore
vendored
@ -20,9 +20,4 @@ dist/
|
||||
|
||||
# Built executable
|
||||
restic-scheduler
|
||||
resticscheduler
|
||||
data/
|
||||
|
||||
# Itest temp dirs
|
||||
itest/data
|
||||
itest/repo
|
||||
|
@ -16,6 +16,7 @@ linters:
|
||||
- contextcheck
|
||||
- cyclop
|
||||
- decorder
|
||||
- depguard
|
||||
- dupl
|
||||
- durationcheck
|
||||
- errchkjson
|
||||
@ -31,8 +32,10 @@ linters:
|
||||
- gocognit
|
||||
- goconst
|
||||
- gocritic
|
||||
# - gocyclo # Using cyclop
|
||||
- godot
|
||||
- gofumpt
|
||||
# - goerr113 # Using errorlint
|
||||
- gofmt
|
||||
- goheader
|
||||
- goimports
|
||||
- gomnd
|
||||
@ -42,6 +45,7 @@ linters:
|
||||
- gosec
|
||||
- grouper
|
||||
- importas
|
||||
# - ireturn
|
||||
- lll
|
||||
- maintidx
|
||||
- makezero
|
||||
@ -56,9 +60,11 @@ linters:
|
||||
- paralleltest
|
||||
- prealloc
|
||||
- predeclared
|
||||
# - promlinter # Not common enough
|
||||
- revive
|
||||
- rowserrcheck
|
||||
- sqlclosecheck
|
||||
# - stylecheck # Using revive
|
||||
- tagliatelle
|
||||
- tenv
|
||||
- testpackage
|
||||
@ -66,11 +72,27 @@ linters:
|
||||
- tparallel
|
||||
- unconvert
|
||||
- unparam
|
||||
- varnamelen
|
||||
- wastedassign
|
||||
- whitespace
|
||||
- wrapcheck
|
||||
- wsl
|
||||
|
||||
disable:
|
||||
- gochecknoglobals
|
||||
- godox
|
||||
- forbidigo
|
||||
# Deprecated
|
||||
- golint
|
||||
- interfacer
|
||||
- maligned
|
||||
- scopelint
|
||||
- ifshort
|
||||
- varcheck
|
||||
- structcheck
|
||||
- deadcode
|
||||
- exhaustivestruct
|
||||
|
||||
linters-settings:
|
||||
gomnd:
|
||||
settings:
|
||||
|
@ -1,7 +1,7 @@
|
||||
---
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0
|
||||
rev: v3.4.0
|
||||
hooks:
|
||||
- id: check-added-large-files
|
||||
- id: check-yaml
|
||||
@ -11,8 +11,10 @@ repos:
|
||||
- id: end-of-file-fixer
|
||||
- id: check-merge-conflict
|
||||
- repo: https://github.com/dnephin/pre-commit-golang
|
||||
rev: v0.5.1
|
||||
rev: v0.4.0
|
||||
hooks:
|
||||
- id: go-fmt
|
||||
- id: go-imports
|
||||
- id: golangci-lint
|
||||
args:
|
||||
- --timeout=3m
|
||||
|
23
Dockerfile
23
Dockerfile
@ -1,23 +1,18 @@
|
||||
FROM alpine:3.18
|
||||
FROM alpine:3.16
|
||||
|
||||
RUN apk add --no-cache \
|
||||
bash~=5 \
|
||||
consul~=1 \
|
||||
mariadb-client~=10 \
|
||||
mariadb-connector-c~=3 \
|
||||
nomad~=1 \
|
||||
postgresql15-client~=15 \
|
||||
rclone~=1.62 \
|
||||
redis~=7 \
|
||||
restic~=0.15 \
|
||||
consul~=1.12 \
|
||||
mariadb-client~=10.6 \
|
||||
mariadb-connector-c~=3.1 \
|
||||
rclone~=1.58 \
|
||||
redis~=7.0 \
|
||||
restic~=0.13 \
|
||||
sqlite~=3 \
|
||||
tzdata~=2024 \
|
||||
;
|
||||
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
COPY ./dist/restic-scheduler-$TARGETOS-$TARGETARCH /bin/restic-scheduler
|
||||
COPY ./dist/resticscheduler-$TARGETOS-$TARGETARCH /bin/resticscheduler
|
||||
|
||||
HEALTHCHECK CMD ["wget", "-O", "-", "http://localhost:8080/health"]
|
||||
|
||||
ENTRYPOINT [ "/bin/restic-scheduler" ]
|
||||
ENTRYPOINT [ "/bin/resticscheduler" ]
|
||||
|
15
Makefile
15
Makefile
@ -1,10 +1,11 @@
|
||||
APP_NAME = restic-scheduler
|
||||
APP_NAME = resticscheduler
|
||||
VERSION ?= $(shell git describe --tags --dirty)
|
||||
GOFILES = *.go
|
||||
# Multi-arch targets are generated from this
|
||||
TARGET_ALIAS = $(APP_NAME)-linux-amd64 $(APP_NAME)-linux-arm $(APP_NAME)-linux-arm64
|
||||
TARGET_ALIAS = $(APP_NAME)-linux-amd64 $(APP_NAME)-linux-arm $(APP_NAME)-linux-arm64 $(APP_NAME)-darwin-amd64 $(APP_NAME)-darwin-arm64
|
||||
TARGETS = $(addprefix dist/,$(TARGET_ALIAS))
|
||||
CURRENT_GOARCH = $(shell go env GOARCH)
|
||||
.QUOTE = "
|
||||
CURRENT_GOARCH = $(shell go env | awk -F "=" '/GOARCH/ { gsub(/$(.QUOTE)/,"", $$2); print $$2}')
|
||||
|
||||
# Default make target will run tests
|
||||
.DEFAULT_GOAL = test
|
||||
@ -28,13 +29,9 @@ build: $(APP_NAME)
|
||||
# Run all tests
|
||||
.PHONY: test
|
||||
test:
|
||||
go test -v -coverprofile=coverage.out # -short
|
||||
go test -coverprofile=coverage.out # -short
|
||||
go tool cover -func=coverage.out
|
||||
|
||||
.PHONY: itest
|
||||
itest: docker-build
|
||||
./itest/run.sh
|
||||
|
||||
# Installs pre-commit hooks
|
||||
.PHONY: install-hooks
|
||||
install-hooks:
|
||||
@ -54,7 +51,7 @@ clean:
|
||||
## Multi-arch targets
|
||||
$(TARGETS): $(GOFILES)
|
||||
mkdir -p ./dist
|
||||
GOOS=$(word 3, $(subst -, ,$(@))) GOARCH=$(word 4, $(subst -, ,$(@))) CGO_ENABLED=0 \
|
||||
GOOS=$(word 2, $(subst -, ,$(@))) GOARCH=$(word 3, $(subst -, ,$(@))) CGO_ENABLED=0 \
|
||||
go build -ldflags '-X "main.version=$(VERSION)"' -a -installsuffix nocgo \
|
||||
-o $@
|
||||
|
||||
|
210
README.md
210
README.md
@ -1,211 +1,3 @@
|
||||
# [restic-scheduler](/iamthefij/restic-scheduler)
|
||||
|
||||
## About
|
||||
|
||||
`restic-scheduler` is a tool designed to allow declarative scheduling of restic backups using HCL (HashiCorp Configuration Language). This tool simplifies the process of managing and automating backups by defining jobs in a configuration file.
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Installation
|
||||
|
||||
You can install `restic-scheduler` using the following command:
|
||||
|
||||
```sh
|
||||
go install git.iamthefij.com/iamthefij/restic-scheduler@latest
|
||||
```
|
||||
|
||||
You can also download the latest release from the [releases page](https://git.iamthefij.com/iamthefij/restic-scheduler/releases).
|
||||
|
||||
Finally, if you prefer to use Docker, you can run something like the following command:
|
||||
|
||||
```sh
|
||||
docker run -v /path/to/config:/config -v /path/to/data:/data iamthefij/restic-scheduler -config /config/jobs.hcl
|
||||
```
|
||||
|
||||
### Prerequisites
|
||||
|
||||
If you're not using Docker, you'll need to ensure that `restic` is installed and available in your system's PATH. You can download and install restic from [here](https://restic.net/).
|
||||
|
||||
## Usage
|
||||
|
||||
### Command Line Interface
|
||||
|
||||
The `restic-scheduler` command line interface provides several options for managing backup, restore, and unlock jobs. Below are some examples of how to use this tool.
|
||||
|
||||
#### Display Version
|
||||
|
||||
To display the version of `restic-scheduler`, use the `-version` flag:
|
||||
|
||||
```sh
|
||||
restic-scheduler -version
|
||||
```
|
||||
|
||||
#### Run Backup Jobs
|
||||
|
||||
To run backup jobs, use the `-backup` flag followed by a comma-separated list of job names. Use `all` to run all backup jobs:
|
||||
|
||||
```sh
|
||||
restic-scheduler -backup job1,job2
|
||||
```
|
||||
|
||||
#### Run Restore Jobs
|
||||
|
||||
To run restore jobs, use the `-restore` flag followed by a comma-separated list of job names. Use `all` to run all restore jobs:
|
||||
|
||||
```sh
|
||||
restic-scheduler -restore job1,job2
|
||||
```
|
||||
|
||||
#### Unlock Job Repositories
|
||||
|
||||
To unlock job repositories, use the `-unlock` flag followed by a comma-separated list of job names. Use `all` to unlock all job repositories:
|
||||
|
||||
```sh
|
||||
restic-scheduler -unlock job1,job2
|
||||
```
|
||||
|
||||
#### Run Jobs Once and Exit
|
||||
|
||||
To run specified backup and restore jobs once and exit, use the `-once` flag:
|
||||
|
||||
```sh
|
||||
restic-scheduler -backup job1 -restore job2 -once
|
||||
```
|
||||
|
||||
#### Health Check and metrics API
|
||||
|
||||
To bind the health check and Prometheus metrics API to a specific address, use the `-addr` flag:
|
||||
|
||||
```sh
|
||||
restic-scheduler -addr 0.0.0.0:8080
|
||||
```
|
||||
|
||||
#### Metrics Push Gateway
|
||||
|
||||
To specify the URL of a Prometheus push gateway service for batch runs, use the `-push-gateway` flag:
|
||||
|
||||
```sh
|
||||
restic-scheduler -push-gateway http://example.com
|
||||
```
|
||||
|
||||
## HCL Configuration
|
||||
|
||||
The configuration for `restic-scheduler` is defined using HCL. Below is a description and example of how to define a backup job in the configuration file.
|
||||
|
||||
### Job Configuration
|
||||
|
||||
A job in the configuration file is defined using the `job` block. Each job must have a unique name, a schedule, and a configuration for restic. Additionally, tasks can be defined to perform specific actions before and after the backup.
|
||||
|
||||
#### Fields
|
||||
|
||||
- `name`: The name of the job.
|
||||
- `schedule`: The cron schedule for the job.
|
||||
- `config`: The restic configuration block.
|
||||
- `repo`: The restic repository.
|
||||
- `passphrase`: (Optional) The passphrase for the repository.
|
||||
- `env`: (Optional) Environment variables for restic.
|
||||
- `options`: (Optional) Global options for restic. See the `restic` command for details.
|
||||
- `task`: (Optional) A list of tasks to run before and after the backup.
|
||||
- `mysql`, `postgres`, `sqlite`: (Optional) Database-specific tasks.
|
||||
- `backup`: The backup configuration block.
|
||||
- `forget`: (Optional) Options for forgetting old snapshots.
|
||||
|
||||
### Example
|
||||
|
||||
Below is an example of a job configuration in HCL:
|
||||
|
||||
```hcl
|
||||
// Example job file
|
||||
job "MyApp" {
|
||||
schedule = "* * * * *"
|
||||
|
||||
config {
|
||||
repo = "s3://..."
|
||||
passphrase = "foo"
|
||||
# Some alternate ways to pass the passphrase to restic
|
||||
# passphrase = env("RESTIC_PASSWORD")
|
||||
# passphrase = readfile("/path/to/passphrase")
|
||||
env = {
|
||||
"foo" = "bar",
|
||||
}
|
||||
options {
|
||||
VerboseLevel = 3
|
||||
# Another alternate way to pass the passphrase to restic
|
||||
# PasswordFile = "/path/to/passphrase"
|
||||
}
|
||||
}
|
||||
|
||||
mysql "DumpMainDB" {
|
||||
hostname = "foo"
|
||||
username = "bar"
|
||||
dump_to = "/data/main.sql"
|
||||
}
|
||||
|
||||
sqlite "DumpSqlite" {
|
||||
path = "/db/sqlite.db"
|
||||
dump_to = "/data/sqlite.db.bak"
|
||||
}
|
||||
|
||||
task "Create biz file" {
|
||||
|
||||
pre_script {
|
||||
on_backup = <<EOF
|
||||
echo bar >> /biz.txt
|
||||
EOF
|
||||
}
|
||||
|
||||
post_script {
|
||||
on_backup = <<EOF
|
||||
rm /biz.txt
|
||||
EOF
|
||||
}
|
||||
}
|
||||
|
||||
task "Run restore shell script" {
|
||||
pre_script {
|
||||
on_restore = "/foo/bar.sh"
|
||||
}
|
||||
}
|
||||
|
||||
backup {
|
||||
files =[
|
||||
"/data",
|
||||
"/biz.txt",
|
||||
]
|
||||
|
||||
backup_opts {
|
||||
Tags = ["service"]
|
||||
}
|
||||
|
||||
restore_opts {
|
||||
Verify = true
|
||||
# Since paths are absolute, restore to root
|
||||
Target = "/"
|
||||
}
|
||||
}
|
||||
|
||||
forget {
|
||||
KeepLast = 3
|
||||
KeepWeekly = 2
|
||||
KeepMonthly = 2
|
||||
KeepYearly = 2
|
||||
Prune = true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
```sh
|
||||
restic-scheduler jobs.hcl
|
||||
```
|
||||
|
||||
This will read the job definitions from `jobs.hcl` and execute the specified jobs.
|
||||
|
||||
For more examples, check out `./config.hcl` or some of the example integration test configs in `./test/`.
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome! Please open an issue or submit a pull request on the [GitHub repository](https://git.iamthefij.com/iamthefij/restic-scheduler).
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details.
|
||||
Job scheduler for Restic backups
|
||||
|
2
go.mod
2
go.mod
@ -1,6 +1,6 @@
|
||||
module git.iamthefij.com/iamthefij/restic-scheduler
|
||||
|
||||
go 1.20
|
||||
go 1.17
|
||||
|
||||
require (
|
||||
github.com/go-test/deep v1.0.8
|
||||
|
@ -1,46 +0,0 @@
|
||||
#! /bin/sh
|
||||
set -ex
|
||||
|
||||
# Create flat file
|
||||
echo "Hello" > /data/test.txt
|
||||
|
||||
# Create Sqlite database
|
||||
touch /data/test_database.db
|
||||
sqlite3 /data/test_database.db <<-EOF
|
||||
CREATE TABLE test_table (
|
||||
id INTEGER PRIMARY KEY,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO test_table(data)
|
||||
VALUES ("Test row");
|
||||
EOF
|
||||
|
||||
# Create MySql database
|
||||
until mysql --host "$MYSQL_HOST" --user "$MYSQL_USER" --password="$MYSQL_PWD" --execute "SHOW DATABASES;"; do
|
||||
sleep 1
|
||||
done
|
||||
mysql --host "$MYSQL_HOST" --user "$MYSQL_USER" --password="$MYSQL_PWD" main <<EOF
|
||||
CREATE TABLE test_table (
|
||||
id INTEGER AUTO_INCREMENT PRIMARY KEY,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO test_table(data)
|
||||
VALUES ("Test row");
|
||||
EOF
|
||||
|
||||
# Create Postgres database
|
||||
export PGPASSWORD="$PGSQL_PASS"
|
||||
until psql --host "$PGSQL_HOST" --username "$PGSQL_USER" --command "SELECT datname FROM pg_database;"; do
|
||||
sleep 1
|
||||
done
|
||||
psql -v ON_ERROR_STOP=1 --host "$PGSQL_HOST" --username "$PGSQL_USER" main <<EOF
|
||||
CREATE TABLE test_table (
|
||||
id SERIAL PRIMARY KEY,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO test_table(data)
|
||||
VALUES ('Test row');
|
||||
EOF
|
@ -1,57 +0,0 @@
|
||||
---
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
mysql:
|
||||
image: mysql
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: shhh
|
||||
MYSQL_DATABASE: main
|
||||
|
||||
postgres:
|
||||
image: postgres
|
||||
environment:
|
||||
POSTGRES_PASSWORD: shhh
|
||||
POSTGRES_DB: main
|
||||
|
||||
bootstrap:
|
||||
image: restic-scheduler
|
||||
entrypoint: /bootstrap-tests.sh
|
||||
environment:
|
||||
MYSQL_HOST: mysql
|
||||
MYSQL_USER: root
|
||||
MYSQL_PWD: shhh
|
||||
PGSQL_HOST: postgres
|
||||
PGSQL_USER: postgres
|
||||
PGSQL_PASS: shhh
|
||||
volumes:
|
||||
- ./bootstrap-tests.sh:/bootstrap-tests.sh
|
||||
- ./data:/data
|
||||
|
||||
main:
|
||||
image: restic-scheduler
|
||||
environment:
|
||||
MYSQL_HOST: mysql
|
||||
MYSQL_USER: root
|
||||
MYSQL_PWD: shhh
|
||||
PGSQL_HOST: postgres
|
||||
PGSQL_USER: postgres
|
||||
PGSQL_PASS: shhh
|
||||
volumes:
|
||||
- ./repo:/repo
|
||||
- ./data:/data
|
||||
- ./test-backup.hcl:/test-backup.hcl
|
||||
|
||||
validate:
|
||||
image: restic-scheduler
|
||||
entrypoint: /validate-tests.sh
|
||||
environment:
|
||||
MYSQL_HOST: mysql
|
||||
MYSQL_USER: root
|
||||
MYSQL_PWD: shhh
|
||||
PGSQL_HOST: postgres
|
||||
PGSQL_USER: postgres
|
||||
PGSQL_PASS: shhh
|
||||
volumes:
|
||||
- ./validate-tests.sh:/validate-tests.sh
|
||||
- ./data:/data
|
35
itest/run.sh
35
itest/run.sh
@ -1,35 +0,0 @@
|
||||
#! /bin/bash
|
||||
set -ex
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
mkdir -p ./repo ./data
|
||||
|
||||
echo Clean everything
|
||||
docker-compose down -v
|
||||
rm -fr ./repo/* ./data/*
|
||||
sleep 5
|
||||
|
||||
echo Boostrap databases and data
|
||||
docker-compose up -d mysql postgres
|
||||
docker-compose run bootstrap
|
||||
sleep 1
|
||||
|
||||
echo Run backup job
|
||||
docker-compose run main -backup IntegrationTest -once /test-backup.hcl
|
||||
|
||||
echo Clean data
|
||||
docker-compose down -v
|
||||
docker-compose up -d mysql postgres
|
||||
rm -fr ./data/*
|
||||
sleep 15
|
||||
|
||||
echo Run restore
|
||||
docker-compose run main -restore IntegrationTest -once /test-backup.hcl
|
||||
sleep 1
|
||||
|
||||
echo Validate data
|
||||
docker-compose run validate
|
||||
|
||||
echo Clean all again
|
||||
docker-compose down -v
|
||||
rm -fr ./repo/* ./data/*
|
@ -1,38 +0,0 @@
|
||||
job "IntegrationTest" {
|
||||
schedule = "@daily"
|
||||
|
||||
config {
|
||||
repo = "/repo"
|
||||
passphrase = "shh"
|
||||
}
|
||||
|
||||
mysql "MySQL" {
|
||||
hostname = env("MYSQL_HOST")
|
||||
database = "main"
|
||||
username = env("MYSQL_USER")
|
||||
password = env("MYSQL_PWD")
|
||||
dump_to = "/tmp/mysql.sql"
|
||||
}
|
||||
|
||||
postgres "Postgres" {
|
||||
hostname = env("PGSQL_HOST")
|
||||
database = "main"
|
||||
username = env("PGSQL_USER")
|
||||
password = env("PGSQL_PASS")
|
||||
create = true
|
||||
dump_to = "/tmp/psql.sql"
|
||||
}
|
||||
|
||||
sqlite "SQLite" {
|
||||
path = "/data/test_database.db"
|
||||
dump_to = "/data/test_database.db.bak"
|
||||
}
|
||||
|
||||
backup {
|
||||
paths = ["/data"]
|
||||
|
||||
restore_opts {
|
||||
Target = "/"
|
||||
}
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
#! /bin/sh
|
||||
set -ex
|
||||
|
||||
# Check flat file
|
||||
test -f /data/test.txt
|
||||
grep "^Hello" /data/test.txt
|
||||
|
||||
# Check Sqlite database
|
||||
test -f /data/test_database.db
|
||||
sqlite3 /data/test_database.db "select data from test_table where id = 1" | grep "^Test row"
|
||||
|
||||
# Check MySql database
|
||||
mysql --host "$MYSQL_HOST" --user "$MYSQL_USER" --password="$MYSQL_PWD" main <<EOF | grep "^Test row"
|
||||
select data from test_table where id = 1;
|
||||
EOF
|
||||
|
||||
# Check Postgres database
|
||||
export PGPASSWORD="$PGSQL_PASS"
|
||||
psql --host "$PGSQL_HOST" --user "$PGSQL_USER" main <<EOF | grep "Test row"
|
||||
select data from test_table where id = 1;
|
||||
EOF
|
69
job.go
69
job.go
@ -52,16 +52,13 @@ func (r ResticConfig) Validate() error {
|
||||
type Job struct {
|
||||
Name string `hcl:"name,label"`
|
||||
Schedule string `hcl:"schedule"`
|
||||
Config *ResticConfig `hcl:"config,block"`
|
||||
Config ResticConfig `hcl:"config,block"`
|
||||
Tasks []JobTask `hcl:"task,block"`
|
||||
Backup BackupFilesTask `hcl:"backup,block"`
|
||||
Forget *ForgetOpts `hcl:"forget,block"`
|
||||
|
||||
// Meta Tasks
|
||||
// NOTE: Now that these are also available within a task
|
||||
// these could be removed to make task order more obvious
|
||||
MySQL []JobTaskMySQL `hcl:"mysql,block"`
|
||||
Postgres []JobTaskPostgres `hcl:"postgres,block"`
|
||||
Sqlite []JobTaskSqlite `hcl:"sqlite,block"`
|
||||
|
||||
// Metrics and health
|
||||
@ -76,24 +73,6 @@ func (j Job) validateTasks() error {
|
||||
}
|
||||
}
|
||||
|
||||
for _, mysql := range j.MySQL {
|
||||
if err := mysql.Validate(); err != nil {
|
||||
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, pg := range j.Postgres {
|
||||
if err := pg.Validate(); err != nil {
|
||||
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, sqlite := range j.Sqlite {
|
||||
if err := sqlite.Validate(); err != nil {
|
||||
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -103,11 +82,7 @@ func (j Job) Validate() error {
|
||||
}
|
||||
|
||||
if _, err := cron.ParseStandard(j.Schedule); err != nil {
|
||||
return fmt.Errorf("job %s has an invalid schedule: %w: %w", j.Name, err, ErrInvalidConfigValue)
|
||||
}
|
||||
|
||||
if j.Config == nil {
|
||||
return fmt.Errorf("job %s is missing restic config: %w", j.Name, ErrMissingField)
|
||||
return fmt.Errorf("job %s has an invalid schedule: %v: %w", j.Name, err, ErrInvalidConfigValue)
|
||||
}
|
||||
|
||||
if err := j.Config.Validate(); err != nil {
|
||||
@ -118,6 +93,18 @@ func (j Job) Validate() error {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, mysql := range j.MySQL {
|
||||
if err := mysql.Validate(); err != nil {
|
||||
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, sqlite := range j.Sqlite {
|
||||
if err := sqlite.Validate(); err != nil {
|
||||
return fmt.Errorf("job %s has an invalid task: %w", j.Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
if err := j.Backup.Validate(); err != nil {
|
||||
return fmt.Errorf("job %s has an invalid backup config: %w", j.Name, err)
|
||||
}
|
||||
@ -133,10 +120,6 @@ func (j Job) AllTasks() []ExecutableTask {
|
||||
allTasks = append(allTasks, mysql.GetPreTask())
|
||||
}
|
||||
|
||||
for _, pg := range j.Postgres {
|
||||
allTasks = append(allTasks, pg.GetPreTask())
|
||||
}
|
||||
|
||||
for _, sqlite := range j.Sqlite {
|
||||
allTasks = append(allTasks, sqlite.GetPreTask())
|
||||
}
|
||||
@ -157,10 +140,6 @@ func (j Job) AllTasks() []ExecutableTask {
|
||||
allTasks = append(allTasks, mysql.GetPostTask())
|
||||
}
|
||||
|
||||
for _, pg := range j.Postgres {
|
||||
allTasks = append(allTasks, pg.GetPostTask())
|
||||
}
|
||||
|
||||
for _, sqlite := range j.Sqlite {
|
||||
allTasks = append(allTasks, sqlite.GetPostTask())
|
||||
}
|
||||
@ -175,10 +154,6 @@ func (j Job) BackupPaths() []string {
|
||||
paths = append(paths, t.DumpToPath)
|
||||
}
|
||||
|
||||
for _, t := range j.Postgres {
|
||||
paths = append(paths, t.DumpToPath)
|
||||
}
|
||||
|
||||
for _, t := range j.Sqlite {
|
||||
paths = append(paths, t.DumpToPath)
|
||||
}
|
||||
@ -222,7 +197,7 @@ func (j Job) Logger() *log.Logger {
|
||||
return GetLogger(j.Name)
|
||||
}
|
||||
|
||||
func (j Job) RunRestore(snapshot string) error {
|
||||
func (j Job) RunRestore() error {
|
||||
logger := j.Logger()
|
||||
restic := j.NewRestic()
|
||||
|
||||
@ -238,10 +213,6 @@ func (j Job) RunRestore(snapshot string) error {
|
||||
Env: nil,
|
||||
}
|
||||
|
||||
if backupTask, ok := exTask.(BackupFilesTask); ok {
|
||||
backupTask.snapshot = snapshot
|
||||
}
|
||||
|
||||
if err := exTask.RunRestore(taskCfg); err != nil {
|
||||
return fmt.Errorf("failed running job %s: %w", j.Name, err)
|
||||
}
|
||||
@ -280,11 +251,9 @@ func (j Job) Run() {
|
||||
result.LastError = err
|
||||
} else {
|
||||
Metrics.SnapshotCurrentCount.WithLabelValues(j.Name).Set(float64(len(snapshots)))
|
||||
if len(snapshots) > 0 {
|
||||
latestSnapshot := snapshots[len(snapshots)-1]
|
||||
Metrics.SnapshotLatestTime.WithLabelValues(j.Name).Set(float64(latestSnapshot.Time.Unix()))
|
||||
}
|
||||
}
|
||||
|
||||
if result.Success {
|
||||
Metrics.JobFailureCount.WithLabelValues(j.Name).Set(0.0)
|
||||
@ -307,7 +276,7 @@ func (j Job) NewRestic() *Restic {
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
DefaultConfig *ResticConfig `hcl:"default_config,block"`
|
||||
// GlobalConfig *ResticConfig `hcl:"global_config,block"`
|
||||
Jobs []Job `hcl:"job,block"`
|
||||
}
|
||||
|
||||
@ -317,12 +286,6 @@ func (c Config) Validate() error {
|
||||
}
|
||||
|
||||
for _, job := range c.Jobs {
|
||||
// Use default restic config if no job config is provided
|
||||
// TODO: Maybe merge values here
|
||||
if job.Config == nil {
|
||||
job.Config = c.DefaultConfig
|
||||
}
|
||||
|
||||
if err := job.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
83
job_test.go
83
job_test.go
@ -7,8 +7,8 @@ import (
|
||||
main "git.iamthefij.com/iamthefij/restic-scheduler"
|
||||
)
|
||||
|
||||
func ValidResticConfig() *main.ResticConfig {
|
||||
return &main.ResticConfig{
|
||||
func ValidResticConfig() main.ResticConfig {
|
||||
return main.ResticConfig{
|
||||
Passphrase: "shh",
|
||||
Repo: "./data",
|
||||
Env: nil,
|
||||
@ -92,7 +92,6 @@ func TestJobValidation(t *testing.T) {
|
||||
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
|
||||
Forget: nil,
|
||||
MySQL: []main.JobTaskMySQL{},
|
||||
Postgres: []main.JobTaskPostgres{},
|
||||
Sqlite: []main.JobTaskSqlite{},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@ -107,7 +106,6 @@ func TestJobValidation(t *testing.T) {
|
||||
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
|
||||
Forget: nil,
|
||||
MySQL: []main.JobTaskMySQL{},
|
||||
Postgres: []main.JobTaskPostgres{},
|
||||
Sqlite: []main.JobTaskSqlite{},
|
||||
},
|
||||
expectedErr: main.ErrMissingField,
|
||||
@ -122,7 +120,6 @@ func TestJobValidation(t *testing.T) {
|
||||
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
|
||||
Forget: nil,
|
||||
MySQL: []main.JobTaskMySQL{},
|
||||
Postgres: []main.JobTaskPostgres{},
|
||||
Sqlite: []main.JobTaskSqlite{},
|
||||
},
|
||||
expectedErr: main.ErrInvalidConfigValue,
|
||||
@ -132,12 +129,11 @@ func TestJobValidation(t *testing.T) {
|
||||
job: main.Job{
|
||||
Name: "Test job",
|
||||
Schedule: "@daily",
|
||||
Config: &main.ResticConfig{}, //nolint:exhaustruct
|
||||
Config: main.ResticConfig{}, //nolint:exhaustruct
|
||||
Tasks: []main.JobTask{},
|
||||
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
|
||||
Forget: nil,
|
||||
MySQL: []main.JobTaskMySQL{},
|
||||
Postgres: []main.JobTaskPostgres{},
|
||||
Sqlite: []main.JobTaskSqlite{},
|
||||
},
|
||||
expectedErr: main.ErrMutuallyExclusive,
|
||||
@ -148,13 +144,10 @@ func TestJobValidation(t *testing.T) {
|
||||
Name: "Test job",
|
||||
Schedule: "@daily",
|
||||
Config: ValidResticConfig(),
|
||||
Tasks: []main.JobTask{
|
||||
{}, //nolint:exhaustruct
|
||||
},
|
||||
Tasks: []main.JobTask{{}},
|
||||
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
|
||||
Forget: nil,
|
||||
MySQL: []main.JobTaskMySQL{},
|
||||
Postgres: []main.JobTaskPostgres{},
|
||||
Sqlite: []main.JobTaskSqlite{},
|
||||
},
|
||||
expectedErr: main.ErrMissingField,
|
||||
@ -168,10 +161,7 @@ func TestJobValidation(t *testing.T) {
|
||||
Tasks: []main.JobTask{},
|
||||
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
|
||||
Forget: nil,
|
||||
MySQL: []main.JobTaskMySQL{
|
||||
{}, //nolint:exhaustruct
|
||||
},
|
||||
Postgres: []main.JobTaskPostgres{},
|
||||
MySQL: []main.JobTaskMySQL{{}},
|
||||
Sqlite: []main.JobTaskSqlite{},
|
||||
},
|
||||
expectedErr: main.ErrMissingField,
|
||||
@ -186,10 +176,7 @@ func TestJobValidation(t *testing.T) {
|
||||
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
|
||||
Forget: nil,
|
||||
MySQL: []main.JobTaskMySQL{},
|
||||
Postgres: []main.JobTaskPostgres{},
|
||||
Sqlite: []main.JobTaskSqlite{
|
||||
{}, //nolint:exhaustruct
|
||||
},
|
||||
Sqlite: []main.JobTaskSqlite{{}},
|
||||
},
|
||||
expectedErr: main.ErrMissingField,
|
||||
},
|
||||
@ -220,53 +207,25 @@ func TestConfigValidation(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "Valid job",
|
||||
config: main.Config{
|
||||
DefaultConfig: nil,
|
||||
Jobs: []main.Job{{
|
||||
config: main.Config{Jobs: []main.Job{{
|
||||
Name: "Valid job",
|
||||
Schedule: "@daily",
|
||||
Config: ValidResticConfig(),
|
||||
Tasks: []main.JobTask{},
|
||||
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
|
||||
Forget: nil,
|
||||
MySQL: []main.JobTaskMySQL{},
|
||||
Postgres: []main.JobTaskPostgres{},
|
||||
Sqlite: []main.JobTaskSqlite{},
|
||||
}},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Valid job with default config",
|
||||
config: main.Config{
|
||||
DefaultConfig: ValidResticConfig(),
|
||||
Jobs: []main.Job{{
|
||||
Name: "Valid job",
|
||||
Schedule: "@daily",
|
||||
Config: nil,
|
||||
Tasks: []main.JobTask{},
|
||||
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
|
||||
Forget: nil,
|
||||
MySQL: []main.JobTaskMySQL{},
|
||||
Postgres: []main.JobTaskPostgres{},
|
||||
Sqlite: []main.JobTaskSqlite{},
|
||||
}},
|
||||
},
|
||||
}}},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "No jobs",
|
||||
config: main.Config{
|
||||
DefaultConfig: nil,
|
||||
Jobs: []main.Job{},
|
||||
},
|
||||
config: main.Config{Jobs: []main.Job{}},
|
||||
expectedErr: main.ErrNoJobsFound,
|
||||
},
|
||||
{
|
||||
name: "Invalid name",
|
||||
config: main.Config{
|
||||
DefaultConfig: nil,
|
||||
Jobs: []main.Job{{
|
||||
config: main.Config{Jobs: []main.Job{{
|
||||
Name: "",
|
||||
Schedule: "@daily",
|
||||
Config: ValidResticConfig(),
|
||||
@ -274,28 +233,8 @@ func TestConfigValidation(t *testing.T) {
|
||||
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
|
||||
Forget: nil,
|
||||
MySQL: []main.JobTaskMySQL{},
|
||||
Postgres: []main.JobTaskPostgres{},
|
||||
Sqlite: []main.JobTaskSqlite{},
|
||||
}},
|
||||
},
|
||||
expectedErr: main.ErrMissingField,
|
||||
},
|
||||
{
|
||||
name: "Missing config",
|
||||
config: main.Config{
|
||||
DefaultConfig: nil,
|
||||
Jobs: []main.Job{{
|
||||
Name: "",
|
||||
Schedule: "@daily",
|
||||
Config: nil,
|
||||
Tasks: []main.JobTask{},
|
||||
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
|
||||
Forget: nil,
|
||||
MySQL: []main.JobTaskMySQL{},
|
||||
Postgres: []main.JobTaskPostgres{},
|
||||
Sqlite: []main.JobTaskSqlite{},
|
||||
}},
|
||||
},
|
||||
}}},
|
||||
expectedErr: main.ErrMissingField,
|
||||
},
|
||||
}
|
||||
|
58
main.go
58
main.go
@ -31,10 +31,6 @@ func ParseConfig(path string) ([]Job, error) {
|
||||
Params: []function.Parameter{{
|
||||
Name: "var",
|
||||
Type: cty.String,
|
||||
AllowNull: false,
|
||||
AllowUnknown: false,
|
||||
AllowDynamicType: false,
|
||||
AllowMarked: false,
|
||||
}},
|
||||
VarParam: nil,
|
||||
Type: function.StaticReturnType(cty.String),
|
||||
@ -46,10 +42,6 @@ func ParseConfig(path string) ([]Job, error) {
|
||||
Params: []function.Parameter{{
|
||||
Name: "path",
|
||||
Type: cty.String,
|
||||
AllowNull: false,
|
||||
AllowUnknown: false,
|
||||
AllowDynamicType: false,
|
||||
AllowMarked: false,
|
||||
}},
|
||||
VarParam: nil,
|
||||
Type: function.StaticReturnType(cty.String),
|
||||
@ -148,10 +140,6 @@ func FilterJobs(jobs []Job, names []string) ([]Job, error) {
|
||||
}
|
||||
|
||||
func runBackupJobs(jobs []Job, names string) error {
|
||||
if names == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
namesSlice := strings.Split(names, ",")
|
||||
|
||||
if len(namesSlice) == 0 {
|
||||
@ -168,11 +156,7 @@ func runBackupJobs(jobs []Job, names string) error {
|
||||
return filterJobErr
|
||||
}
|
||||
|
||||
func runRestoreJobs(jobs []Job, names string, snapshot string) error {
|
||||
if names == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
func runRestoreJobs(jobs []Job, names string) error {
|
||||
namesSlice := strings.Split(names, ",")
|
||||
|
||||
if len(namesSlice) == 0 {
|
||||
@ -181,28 +165,7 @@ func runRestoreJobs(jobs []Job, names string, snapshot string) error {
|
||||
|
||||
jobs, filterJobErr := FilterJobs(jobs, namesSlice)
|
||||
for _, job := range jobs {
|
||||
if err := job.RunRestore(snapshot); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return filterJobErr
|
||||
}
|
||||
|
||||
func runUnlockJobs(jobs []Job, names string) error {
|
||||
if names == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
namesSlice := strings.Split(names, ",")
|
||||
|
||||
if len(namesSlice) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
jobs, filterJobErr := FilterJobs(jobs, namesSlice)
|
||||
for _, job := range jobs {
|
||||
if err := job.NewRestic().Unlock(UnlockOpts{RemoveAll: true}); err != nil {
|
||||
if err := job.RunRestore(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@ -214,8 +177,6 @@ type Flags struct {
|
||||
showVersion bool
|
||||
backup string
|
||||
restore string
|
||||
unlock string
|
||||
restoreSnapshot string
|
||||
once bool
|
||||
healthCheckAddr string
|
||||
metricsPushGateway string
|
||||
@ -226,30 +187,23 @@ func readFlags() Flags {
|
||||
flag.BoolVar(&flags.showVersion, "version", false, "Display the version and exit")
|
||||
flag.StringVar(&flags.backup, "backup", "", "Run backup jobs now. Names are comma separated. `all` will run all.")
|
||||
flag.StringVar(&flags.restore, "restore", "", "Run restore jobs now. Names are comma separated. `all` will run all.")
|
||||
flag.StringVar(&flags.unlock, "unlock", "", "Unlock job repos now. Names are comma separated. `all` will run all.")
|
||||
flag.BoolVar(&flags.once, "once", false, "Run jobs specified using -backup and -restore once and exit")
|
||||
flag.StringVar(&flags.healthCheckAddr, "addr", "0.0.0.0:8080", "address to bind health check API")
|
||||
flag.StringVar(&flags.metricsPushGateway, "push-gateway", "", "url of push gateway service for batch runs (optional)")
|
||||
flag.StringVar(&JobBaseDir, "base-dir", JobBaseDir, "Base dir to create intermediate job files like SQL dumps.")
|
||||
flag.StringVar(&flags.restoreSnapshot, "snapshot", "latest", "the snapshot to restore")
|
||||
flag.Parse()
|
||||
|
||||
return flags
|
||||
}
|
||||
|
||||
func runSpecifiedJobs(jobs []Job, backupJobs, restoreJobs, unlockJobs, snapshot string) error {
|
||||
// Run specified job unlocks
|
||||
if err := runUnlockJobs(jobs, unlockJobs); err != nil {
|
||||
return fmt.Errorf("Failed running unlock for jobs: %w", err)
|
||||
}
|
||||
|
||||
func runSpecifiedJobs(jobs []Job, backupJobs, restoreJobs string) error {
|
||||
// Run specified backup jobs
|
||||
if err := runBackupJobs(jobs, backupJobs); err != nil {
|
||||
return fmt.Errorf("Failed running backup jobs: %w", err)
|
||||
}
|
||||
|
||||
// Run specified restore jobs
|
||||
if err := runRestoreJobs(jobs, restoreJobs, snapshot); err != nil {
|
||||
if err := runRestoreJobs(jobs, restoreJobs); err != nil {
|
||||
return fmt.Errorf("Failed running restore jobs: %w", err)
|
||||
}
|
||||
|
||||
@ -258,8 +212,6 @@ func runSpecifiedJobs(jobs []Job, backupJobs, restoreJobs, unlockJobs, snapshot
|
||||
|
||||
func maybePushMetrics(metricsPushGateway string) error {
|
||||
if metricsPushGateway != "" {
|
||||
fmt.Println("Pushing metrics to push gateway")
|
||||
|
||||
if err := Metrics.PushToGateway(metricsPushGateway); err != nil {
|
||||
return fmt.Errorf("Failed pushing metrics after jobs run: %w", err)
|
||||
}
|
||||
@ -291,7 +243,7 @@ func main() {
|
||||
log.Fatalf("Failed to read jobs from files: %v", err)
|
||||
}
|
||||
|
||||
if err := runSpecifiedJobs(jobs, flags.backup, flags.restore, flags.unlock, flags.restoreSnapshot); err != nil {
|
||||
if err := runSpecifiedJobs(jobs, flags.backup, flags.restore); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
|
@ -18,9 +18,9 @@ func TestMain(m *testing.M) {
|
||||
if testResult == 0 && testing.CoverMode() != "" {
|
||||
c := testing.Coverage()
|
||||
if c < MinCoverage {
|
||||
fmt.Printf("WARNING: Tests passed but coverage failed at %0.2f and minimum to pass is %0.2f\n", c, MinCoverage)
|
||||
fmt.Printf("Tests passed but coverage failed at %0.2f and minimum to pass is %0.2f\n", c, MinCoverage)
|
||||
|
||||
testResult = 0
|
||||
testResult = -1
|
||||
}
|
||||
}
|
||||
|
||||
@ -31,6 +31,7 @@ func TestReadJobs(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
jobs, err := main.ReadJobs([]string{"./test/sample.hcl"})
|
||||
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error reading jobs: %v", err)
|
||||
}
|
||||
@ -51,7 +52,6 @@ func TestRunJobs(t *testing.T) {
|
||||
Backup: main.BackupFilesTask{Paths: []string{"/test"}}, //nolint:exhaustruct
|
||||
Forget: nil,
|
||||
MySQL: []main.JobTaskMySQL{},
|
||||
Postgres: []main.JobTaskPostgres{},
|
||||
Sqlite: []main.JobTaskSqlite{},
|
||||
}
|
||||
|
||||
|
@ -19,6 +19,7 @@ func (m ResticMetrics) PushToGateway(url string) error {
|
||||
err := push.New(url, "batch").
|
||||
Gatherer(m.Registry).
|
||||
Add()
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("error pushing to registry %s: %w", url, err)
|
||||
}
|
||||
|
80
restic.go
80
restic.go
@ -11,10 +11,18 @@ import (
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrRestic = errors.New("restic error")
|
||||
ErrRepoNotFound = errors.Join(errors.New("repository not found or uninitialized"), ErrRestic)
|
||||
)
|
||||
var ErrRestic = errors.New("restic error")
|
||||
var ErrRepoNotFound = errors.New("repository not found or uninitialized")
|
||||
|
||||
func lineIn(needle string, haystack []string) bool {
|
||||
for _, line := range haystack {
|
||||
if line == needle {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func maybeAddArgString(args []string, name, value string) []string {
|
||||
if value != "" {
|
||||
@ -48,41 +56,22 @@ func maybeAddArgsList(args []string, name string, value []string) []string {
|
||||
return args
|
||||
}
|
||||
|
||||
// CommandOptions interface dictates a ToArgs() method should return each commandline arg as a string slice.
|
||||
type CommandOptions interface {
|
||||
// ToArgs returns the structs arguments as a slice of strings.
|
||||
ToArgs() []string
|
||||
}
|
||||
|
||||
// GenericOpts allows passing an arbitrary string slice as a set of command line options compatible with CommandOptions.
|
||||
type GenericOpts []string
|
||||
|
||||
// ToArgs returns the structs arguments as a slice of strings.
|
||||
func (o GenericOpts) ToArgs() []string {
|
||||
return o
|
||||
}
|
||||
|
||||
// NoOpts is a struct that fulfils the CommandOptions interface but provides no arguments.
|
||||
type NoOpts struct{}
|
||||
|
||||
// ToArgs returns the structs arguments as a slice of strings.
|
||||
func (NoOpts) ToArgs() []string {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
// UnlockOpts holds optional arguments for unlock command.
|
||||
type UnlockOpts struct {
|
||||
RemoveAll bool `hcl:"RemoveAll,optional"`
|
||||
}
|
||||
|
||||
// ToArgs returns the structs arguments as a slice of strings.
|
||||
func (uo UnlockOpts) ToArgs() (args []string) {
|
||||
args = maybeAddArgBool(args, "--remove-all", uo.RemoveAll)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// BackupOpts holds optional arguments for the Restic backup command.
|
||||
type BackupOpts struct {
|
||||
Exclude []string `hcl:"Exclude,optional"`
|
||||
Include []string `hcl:"Include,optional"`
|
||||
@ -90,7 +79,6 @@ type BackupOpts struct {
|
||||
Host string `hcl:"Host,optional"`
|
||||
}
|
||||
|
||||
// ToArgs returns the structs arguments as a slice of strings.
|
||||
func (bo BackupOpts) ToArgs() (args []string) {
|
||||
args = maybeAddArgsList(args, "--exclude", bo.Exclude)
|
||||
args = maybeAddArgsList(args, "--include", bo.Include)
|
||||
@ -110,7 +98,6 @@ type RestoreOpts struct {
|
||||
Verify bool `hcl:"Verify,optional"`
|
||||
}
|
||||
|
||||
// ToArgs returns the structs arguments as a slice of strings.
|
||||
func (ro RestoreOpts) ToArgs() (args []string) {
|
||||
args = maybeAddArgsList(args, "--exclude", ro.Exclude)
|
||||
args = maybeAddArgsList(args, "--include", ro.Include)
|
||||
@ -150,7 +137,6 @@ type ForgetOpts struct {
|
||||
Prune bool `hcl:"Prune,optional"`
|
||||
}
|
||||
|
||||
// ToArgs returns the structs arguments as a slice of strings.
|
||||
func (fo ForgetOpts) ToArgs() (args []string) {
|
||||
args = maybeAddArgInt(args, "--keep-last", fo.KeepLast)
|
||||
args = maybeAddArgInt(args, "--keep-hourly", fo.KeepHourly)
|
||||
@ -207,15 +193,13 @@ type ResticGlobalOpts struct {
|
||||
TLSClientCertFile string `hcl:"TlsClientCertFile,optional"`
|
||||
LimitDownload int `hcl:"LimitDownload,optional"`
|
||||
LimitUpload int `hcl:"LimitUpload,optional"`
|
||||
VerboseLevel int `hcl:"VerboseLevel,optional"`
|
||||
Options map[string]string `hcl:"Options,optional"`
|
||||
VerboseLevel int `hcl:"VerboseLevel,optional"`
|
||||
CleanupCache bool `hcl:"CleanupCache,optional"`
|
||||
InsecureTLS bool `hcl:"InsecureTls,optional"`
|
||||
NoCache bool `hcl:"NoCache,optional"`
|
||||
NoLock bool `hcl:"NoLock,optional"`
|
||||
}
|
||||
|
||||
// ToArgs returns the structs arguments as a slice of strings.
|
||||
func (glo ResticGlobalOpts) ToArgs() (args []string) {
|
||||
args = maybeAddArgString(args, "--cacert", glo.CaCertFile)
|
||||
args = maybeAddArgString(args, "--cache-dir", glo.CacheDir)
|
||||
@ -225,7 +209,6 @@ func (glo ResticGlobalOpts) ToArgs() (args []string) {
|
||||
args = maybeAddArgInt(args, "--limit-upload", glo.LimitUpload)
|
||||
args = maybeAddArgInt(args, "--verbose", glo.VerboseLevel)
|
||||
args = maybeAddArgBool(args, "--cleanup-cache", glo.CleanupCache)
|
||||
args = maybeAddArgBool(args, "--insecure-tls", glo.InsecureTLS)
|
||||
args = maybeAddArgBool(args, "--no-cache", glo.NoCache)
|
||||
args = maybeAddArgBool(args, "--no-lock", glo.NoLock)
|
||||
|
||||
@ -290,11 +273,7 @@ func (e *ResticError) Unwrap() error {
|
||||
return e.OriginalError
|
||||
}
|
||||
|
||||
func (rcmd Restic) RunRestic(
|
||||
command string,
|
||||
options CommandOptions,
|
||||
commandArgs ...string,
|
||||
) (*CapturedCommandLogWriter, error) {
|
||||
func (rcmd Restic) RunRestic(command string, options CommandOptions, commandArgs ...string) ([]string, error) {
|
||||
args := []string{}
|
||||
if rcmd.GlobalOpts != nil {
|
||||
args = rcmd.GlobalOpts.ToArgs()
|
||||
@ -306,22 +285,22 @@ func (rcmd Restic) RunRestic(
|
||||
|
||||
cmd := exec.Command("restic", args...)
|
||||
|
||||
output := NewCapturedCommandLogWriter(rcmd.Logger)
|
||||
cmd.Stdout = output.Stdout
|
||||
cmd.Stderr = output.Stderr
|
||||
output := NewCapturedLogWriter(rcmd.Logger)
|
||||
cmd.Stdout = output
|
||||
cmd.Stderr = output
|
||||
cmd.Env = rcmd.BuildEnv()
|
||||
cmd.Dir = rcmd.Cwd
|
||||
|
||||
if err := cmd.Run(); err != nil {
|
||||
responseErr := ErrRestic
|
||||
if lineIn("Is there a repository at the following location?", output.Stderr.Lines) {
|
||||
if lineIn("Is there a repository at the following location?", output.Lines) {
|
||||
responseErr = ErrRepoNotFound
|
||||
}
|
||||
|
||||
return output, NewResticError(command, output.AllLines(), errors.Join(err, responseErr))
|
||||
return output.Lines, NewResticError(command, output.Lines, responseErr)
|
||||
}
|
||||
|
||||
return output, nil
|
||||
return output.Lines, nil
|
||||
}
|
||||
|
||||
func (rcmd Restic) Backup(files []string, opts BackupOpts) error {
|
||||
@ -348,12 +327,6 @@ func (rcmd Restic) Check() error {
|
||||
return err
|
||||
}
|
||||
|
||||
func (rcmd Restic) Unlock(unlockOpts UnlockOpts) error {
|
||||
_, err := rcmd.RunRestic("unlock", unlockOpts)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
type Snapshot struct {
|
||||
UID int `json:"uid"`
|
||||
GID int `json:"gid"`
|
||||
@ -368,20 +341,15 @@ type Snapshot struct {
|
||||
}
|
||||
|
||||
func (rcmd Restic) ReadSnapshots() ([]Snapshot, error) {
|
||||
output, err := rcmd.RunRestic("snapshots", GenericOpts{"--json"})
|
||||
lines, err := rcmd.RunRestic("snapshots", GenericOpts{"--json"})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(output.Stdout.Lines) == 0 {
|
||||
return nil, fmt.Errorf("no snapshot output to parse: %w", ErrRestic)
|
||||
}
|
||||
|
||||
singleLineOutput := strings.Join(output.Stdout.Lines, "")
|
||||
|
||||
snapshots := new([]Snapshot)
|
||||
if err = json.Unmarshal([]byte(singleLineOutput), snapshots); err != nil {
|
||||
return nil, fmt.Errorf("failed parsing snapshot results from %s: %w", singleLineOutput, err)
|
||||
|
||||
if err = json.Unmarshal([]byte(lines[0]), snapshots); err != nil {
|
||||
return nil, fmt.Errorf("failed parsing snapshot results from %s: %w", lines[0], err)
|
||||
}
|
||||
|
||||
return *snapshots, nil
|
||||
|
@ -32,7 +32,6 @@ func TestGlobalOptions(t *testing.T) {
|
||||
LimitUpload: 1,
|
||||
VerboseLevel: 1,
|
||||
CleanupCache: true,
|
||||
InsecureTLS: true,
|
||||
NoCache: true,
|
||||
NoLock: true,
|
||||
Options: map[string]string{
|
||||
@ -49,7 +48,6 @@ func TestGlobalOptions(t *testing.T) {
|
||||
"--limit-upload", "1",
|
||||
"--verbose", "1",
|
||||
"--cleanup-cache",
|
||||
"--insecure-tls",
|
||||
"--no-cache",
|
||||
"--no-lock",
|
||||
"--option", "key='a long value'",
|
||||
@ -152,20 +150,6 @@ func TestForgetOpts(t *testing.T) {
|
||||
AssertEqual(t, "args didn't match", expected, args)
|
||||
}
|
||||
|
||||
func TestUnlockOpts(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
args := main.UnlockOpts{
|
||||
RemoveAll: true,
|
||||
}.ToArgs()
|
||||
|
||||
expected := []string{
|
||||
"--remove-all",
|
||||
}
|
||||
|
||||
AssertEqual(t, "args didn't match", expected, args)
|
||||
}
|
||||
|
||||
func TestBuildEnv(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
@ -237,7 +221,7 @@ func TestResticInterface(t *testing.T) {
|
||||
}
|
||||
|
||||
// Write test file to the data dir
|
||||
err := os.WriteFile(dataFile, []byte("testing"), 0o644)
|
||||
err := os.WriteFile(dataFile, []byte("testing"), 0644)
|
||||
AssertEqualFail(t, "unexpected error writing to test file", nil, err)
|
||||
|
||||
// Make sure no existing repo is found
|
||||
@ -297,7 +281,7 @@ func TestResticInterface(t *testing.T) {
|
||||
AssertEqualFail(t, "unexpected error checking repo", nil, err)
|
||||
|
||||
// Change the data file
|
||||
err = os.WriteFile(dataFile, []byte("unexpected"), 0o644)
|
||||
err = os.WriteFile(dataFile, []byte("unexpected"), 0644)
|
||||
AssertEqualFail(t, "unexpected error writing to test file", nil, err)
|
||||
|
||||
// Check that data wrote
|
||||
@ -313,8 +297,4 @@ func TestResticInterface(t *testing.T) {
|
||||
value, err = os.ReadFile(restoredDataFile)
|
||||
AssertEqualFail(t, "unexpected error reading from test file", nil, err)
|
||||
AssertEqualFail(t, "incorrect value in test file", "testing", string(value))
|
||||
|
||||
// Try to unlock the repo (repo shouldn't really be locked, but this should still run without error
|
||||
err = restic.Unlock(main.UnlockOpts{}) //nolint:exhaustruct
|
||||
AssertEqualFail(t, "unexpected error unlocking repo", nil, err)
|
||||
}
|
||||
|
15
scheduler.go
15
scheduler.go
@ -13,10 +13,8 @@ import (
|
||||
"github.com/robfig/cron/v3"
|
||||
)
|
||||
|
||||
var (
|
||||
jobResultsLock = sync.Mutex{}
|
||||
jobResults = map[string]JobResult{}
|
||||
)
|
||||
var jobResultsLock = sync.Mutex{}
|
||||
var jobResults = map[string]JobResult{}
|
||||
|
||||
type JobResult struct {
|
||||
JobName string
|
||||
@ -69,12 +67,9 @@ func healthHandleFunc(writer http.ResponseWriter, request *http.Request) {
|
||||
|
||||
func RunHTTPHandlers(addr string) error {
|
||||
http.HandleFunc("/health", healthHandleFunc)
|
||||
http.Handle("/metrics", promhttp.HandlerFor(
|
||||
Metrics.Registry,
|
||||
promhttp.HandlerOpts{Registry: Metrics.Registry}, //nolint:exhaustruct
|
||||
))
|
||||
http.Handle("/metrics", promhttp.Handler())
|
||||
|
||||
return fmt.Errorf("error on http server: %w", http.ListenAndServe(addr, nil)) //#nosec: g114
|
||||
return fmt.Errorf("error on healthcheck: %w", http.ListenAndServe(addr, nil)) //#nosec: g114
|
||||
}
|
||||
|
||||
func ScheduleAndRunJobs(jobs []Job) error {
|
||||
@ -115,8 +110,6 @@ func ScheduleAndRunJobs(jobs []Job) error {
|
||||
defer func() {
|
||||
ctx := scheduler.Stop()
|
||||
<-ctx.Done()
|
||||
|
||||
fmt.Println("All jobs successfully stopped")
|
||||
}()
|
||||
|
||||
return nil
|
||||
|
29
shell.go
29
shell.go
@ -5,7 +5,6 @@ import (
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
@ -40,7 +39,6 @@ func NewCapturedLogWriter(logger *log.Logger) *CapturedLogWriter {
|
||||
return &CapturedLogWriter{Lines: []string{}, logger: logger}
|
||||
}
|
||||
|
||||
// Write writes the provided byte slice to the logger and stores each captured line.
|
||||
func (w *CapturedLogWriter) Write(content []byte) (n int, err error) {
|
||||
message := string(content)
|
||||
for _, line := range strings.Split(message, "\n") {
|
||||
@ -51,33 +49,6 @@ func (w *CapturedLogWriter) Write(content []byte) (n int, err error) {
|
||||
return len(content), nil
|
||||
}
|
||||
|
||||
// LinesMergedWith returns a slice of lines from this logger merged with another.
|
||||
func (w CapturedLogWriter) LinesMergedWith(other CapturedLogWriter) []string {
|
||||
allLines := []string{}
|
||||
allLines = append(allLines, w.Lines...)
|
||||
allLines = append(allLines, other.Lines...)
|
||||
|
||||
sort.Strings(allLines)
|
||||
|
||||
return allLines
|
||||
}
|
||||
|
||||
type CapturedCommandLogWriter struct {
|
||||
Stdout *CapturedLogWriter
|
||||
Stderr *CapturedLogWriter
|
||||
}
|
||||
|
||||
func NewCapturedCommandLogWriter(logger *log.Logger) *CapturedCommandLogWriter {
|
||||
return &CapturedCommandLogWriter{
|
||||
Stdout: NewCapturedLogWriter(logger),
|
||||
Stderr: NewCapturedLogWriter(logger),
|
||||
}
|
||||
}
|
||||
|
||||
func (cclw CapturedCommandLogWriter) AllLines() []string {
|
||||
return cclw.Stdout.LinesMergedWith(*cclw.Stderr)
|
||||
}
|
||||
|
||||
func RunShell(script string, cwd string, env map[string]string, logger *log.Logger) error {
|
||||
cmd := exec.Command("sh", "-c", strings.TrimSpace(script)) //nolint:gosec
|
||||
|
||||
|
182
tasks.go
182
tasks.go
@ -67,7 +67,7 @@ func (t *JobTaskScript) SetName(name string) {
|
||||
t.name = name
|
||||
}
|
||||
|
||||
// JobTaskMySQL is a MySQL backup task that performs required pre and post tasks.
|
||||
// JobTaskMySQL is a sqlite backup task that performs required pre and post tasks.
|
||||
type JobTaskMySQL struct {
|
||||
Port int `hcl:"port,optional"`
|
||||
Name string `hcl:"name,label"`
|
||||
@ -89,16 +89,11 @@ func (t JobTaskMySQL) Validate() error {
|
||||
return fmt.Errorf("task %s is missing dump_to path: %w", t.Name, ErrMissingField)
|
||||
}
|
||||
|
||||
if stat, err := os.Stat(t.DumpToPath); err != nil {
|
||||
if s, err := os.Stat(t.DumpToPath); err != nil {
|
||||
if !errors.Is(err, fs.ErrNotExist) {
|
||||
return fmt.Errorf(
|
||||
"task %s: invalid dump_to: could not stat path: %s: %w",
|
||||
t.Name,
|
||||
t.DumpToPath,
|
||||
ErrInvalidConfigValue,
|
||||
)
|
||||
return fmt.Errorf("task %s: invalid dump_to: could not stat path: %v: %w", t.Name, err, ErrInvalidConfigValue)
|
||||
}
|
||||
} else if stat.Mode().IsDir() {
|
||||
} else if s.Mode().IsDir() {
|
||||
return fmt.Errorf("task %s: dump_to cannot be a directory: %w", t.Name, ErrInvalidConfigValue)
|
||||
}
|
||||
|
||||
@ -160,20 +155,12 @@ func (t JobTaskMySQL) GetPostTask() ExecutableTask {
|
||||
command = append(command, "--host", t.Hostname)
|
||||
}
|
||||
|
||||
if t.Port != 0 {
|
||||
command = append(command, "--port", fmt.Sprintf("%d", t.Port))
|
||||
}
|
||||
|
||||
if t.Username != "" {
|
||||
command = append(command, "--user", t.Username)
|
||||
}
|
||||
|
||||
if t.Password != "" {
|
||||
command = append(command, fmt.Sprintf("--password=%s", t.Password))
|
||||
}
|
||||
|
||||
if t.Database != "" {
|
||||
command = append(command, t.Database)
|
||||
command = append(command, "--password", t.Password)
|
||||
}
|
||||
|
||||
command = append(command, "<", t.DumpToPath)
|
||||
@ -187,144 +174,6 @@ func (t JobTaskMySQL) GetPostTask() ExecutableTask {
|
||||
}
|
||||
}
|
||||
|
||||
// JobTaskPostgres is a postgres backup task that performs required pre and post tasks.
|
||||
type JobTaskPostgres struct {
|
||||
Port int `hcl:"port,optional"`
|
||||
Name string `hcl:"name,label"`
|
||||
Hostname string `hcl:"hostname,optional"`
|
||||
Database string `hcl:"database,optional"`
|
||||
Username string `hcl:"username,optional"`
|
||||
Password string `hcl:"password,optional"`
|
||||
Tables []string `hcl:"tables,optional"`
|
||||
DumpToPath string `hcl:"dump_to"`
|
||||
NoTablespaces bool `hcl:"no_tablespaces,optional"`
|
||||
Clean bool `hcl:"clean,optional"`
|
||||
Create bool `hcl:"create,optional"`
|
||||
}
|
||||
|
||||
func (t JobTaskPostgres) Paths() []string {
|
||||
return []string{t.DumpToPath}
|
||||
}
|
||||
|
||||
func (t JobTaskPostgres) Validate() error {
|
||||
if t.DumpToPath == "" {
|
||||
return fmt.Errorf("task %s is missing dump_to path: %w", t.Name, ErrMissingField)
|
||||
}
|
||||
|
||||
if stat, err := os.Stat(t.DumpToPath); err != nil {
|
||||
if !errors.Is(err, fs.ErrNotExist) {
|
||||
return fmt.Errorf(
|
||||
"task %s: invalid dump_to: could not stat path: %s: %w",
|
||||
t.Name,
|
||||
t.DumpToPath,
|
||||
ErrInvalidConfigValue,
|
||||
)
|
||||
}
|
||||
} else if stat.Mode().IsDir() {
|
||||
return fmt.Errorf("task %s: dump_to cannot be a directory: %w", t.Name, ErrInvalidConfigValue)
|
||||
}
|
||||
|
||||
if len(t.Tables) > 0 && t.Database == "" {
|
||||
return fmt.Errorf(
|
||||
"task %s is invalid. Must specify a database to use tables: %w",
|
||||
t.Name,
|
||||
ErrMissingField,
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
//nolint:cyclop
|
||||
func (t JobTaskPostgres) GetPreTask() ExecutableTask {
|
||||
command := []string{"pg_dump"}
|
||||
if t.Database == "" {
|
||||
command = []string{"pg_dumpall"}
|
||||
}
|
||||
|
||||
command = append(command, "--file", t.DumpToPath)
|
||||
|
||||
if t.Hostname != "" {
|
||||
command = append(command, "--host", t.Hostname)
|
||||
}
|
||||
|
||||
if t.Port != 0 {
|
||||
command = append(command, "--port", fmt.Sprintf("%d", t.Port))
|
||||
}
|
||||
|
||||
if t.Username != "" {
|
||||
command = append(command, "--username", t.Username)
|
||||
}
|
||||
|
||||
if t.NoTablespaces {
|
||||
command = append(command, "--no-tablespaces")
|
||||
}
|
||||
|
||||
if t.Clean {
|
||||
command = append(command, "--clean")
|
||||
}
|
||||
|
||||
if t.Create {
|
||||
command = append(command, "--create")
|
||||
}
|
||||
|
||||
for _, table := range t.Tables {
|
||||
command = append(command, "--table", table)
|
||||
}
|
||||
|
||||
if t.Database != "" {
|
||||
command = append(command, t.Database)
|
||||
}
|
||||
|
||||
env := map[string]string{}
|
||||
if t.Password != "" {
|
||||
env["PGPASSWORD"] = t.Password
|
||||
}
|
||||
|
||||
return JobTaskScript{
|
||||
name: t.Name,
|
||||
env: env,
|
||||
Cwd: ".",
|
||||
OnBackup: strings.Join(command, " "),
|
||||
OnRestore: "",
|
||||
}
|
||||
}
|
||||
|
||||
func (t JobTaskPostgres) GetPostTask() ExecutableTask {
|
||||
command := []string{"psql"}
|
||||
|
||||
if t.Hostname != "" {
|
||||
command = append(command, "--host", t.Hostname)
|
||||
}
|
||||
|
||||
if t.Port != 0 {
|
||||
command = append(command, "--port", fmt.Sprintf("%d", t.Port))
|
||||
}
|
||||
|
||||
if t.Username != "" {
|
||||
command = append(command, "--username", t.Username)
|
||||
}
|
||||
|
||||
if t.Database != "" {
|
||||
command = append(command, t.Database)
|
||||
}
|
||||
|
||||
command = append(command, "<", t.DumpToPath)
|
||||
|
||||
env := map[string]string{}
|
||||
if t.Password != "" {
|
||||
env["PGPASSWORD"] = t.Password
|
||||
}
|
||||
|
||||
return JobTaskScript{
|
||||
name: t.Name,
|
||||
env: env,
|
||||
Cwd: ".",
|
||||
OnBackup: "",
|
||||
OnRestore: strings.Join(command, " "),
|
||||
}
|
||||
}
|
||||
|
||||
// JobTaskSqlite is a sqlite backup task that performs required pre and post tasks.
|
||||
type JobTaskSqlite struct {
|
||||
Name string `hcl:"name,label"`
|
||||
@ -341,16 +190,11 @@ func (t JobTaskSqlite) Validate() error {
|
||||
return fmt.Errorf("task %s is missing dump_to path: %w", t.Name, ErrMissingField)
|
||||
}
|
||||
|
||||
if stat, err := os.Stat(t.DumpToPath); err != nil {
|
||||
if s, err := os.Stat(t.DumpToPath); err != nil {
|
||||
if !errors.Is(err, fs.ErrNotExist) {
|
||||
return fmt.Errorf(
|
||||
"task %s: invalid dump_to: could not stat path: %s: %w",
|
||||
t.Name,
|
||||
t.DumpToPath,
|
||||
ErrInvalidConfigValue,
|
||||
)
|
||||
return fmt.Errorf("task %s: invalid dump_to: could not stat path: %v: %w", t.Name, err, ErrInvalidConfigValue)
|
||||
}
|
||||
} else if stat.Mode().IsDir() {
|
||||
} else if s.Mode().IsDir() {
|
||||
return fmt.Errorf("task %s: dump_to cannot be a directory: %w", t.Name, ErrInvalidConfigValue)
|
||||
}
|
||||
|
||||
@ -382,7 +226,6 @@ type BackupFilesTask struct {
|
||||
BackupOpts *BackupOpts `hcl:"backup_opts,block"`
|
||||
RestoreOpts *RestoreOpts `hcl:"restore_opts,block"`
|
||||
name string
|
||||
snapshot string
|
||||
}
|
||||
|
||||
func (t BackupFilesTask) RunBackup(cfg TaskConfig) error {
|
||||
@ -405,11 +248,8 @@ func (t BackupFilesTask) RunRestore(cfg TaskConfig) error {
|
||||
t.RestoreOpts = &RestoreOpts{} //nolint:exhaustruct
|
||||
}
|
||||
|
||||
if t.snapshot == "" {
|
||||
t.snapshot = "latest"
|
||||
}
|
||||
|
||||
if err := cfg.Restic.Restore(t.snapshot, *t.RestoreOpts); err != nil {
|
||||
// TODO: Make the snapshot configurable
|
||||
if err := cfg.Restic.Restore("latest", *t.RestoreOpts); err != nil {
|
||||
err = fmt.Errorf("failed restoring paths: %w", err)
|
||||
cfg.Logger.Print(err)
|
||||
|
||||
@ -441,12 +281,10 @@ type JobTask struct {
|
||||
PreScripts []JobTaskScript `hcl:"pre_script,block"`
|
||||
PostScripts []JobTaskScript `hcl:"post_script,block"`
|
||||
MySQL []JobTaskMySQL `hcl:"mysql,block"`
|
||||
Postgres []JobTaskPostgres `hcl:"postgres,block"`
|
||||
Sqlite []JobTaskSqlite `hcl:"sqlite,block"`
|
||||
}
|
||||
|
||||
func (t JobTask) Validate() error {
|
||||
// NOTE: Might make task types mutually exclusive because order is confusing even if deterministic
|
||||
if t.Name == "" {
|
||||
return fmt.Errorf("task is missing a name: %w", ErrMissingField)
|
||||
}
|
||||
|
@ -163,29 +163,7 @@ func TestJobTaskSql(t *testing.T) {
|
||||
" --user user --password=pass --no-tablespaces db table1 table2",
|
||||
postBackup: "",
|
||||
preRestore: "",
|
||||
postRestore: "mysql --host host --port 3306 --user user --password=pass db < ./simple.sql",
|
||||
},
|
||||
{
|
||||
name: "psql all",
|
||||
task: main.JobTaskPostgres{
|
||||
Name: "simple",
|
||||
Hostname: "host",
|
||||
Port: 6543,
|
||||
Username: "user",
|
||||
Password: "pass",
|
||||
Database: "db",
|
||||
NoTablespaces: true,
|
||||
Create: true,
|
||||
Clean: true,
|
||||
Tables: []string{"table1", "table2"},
|
||||
DumpToPath: "./simple.sql",
|
||||
},
|
||||
validationErr: nil,
|
||||
preBackup: "pg_dump --file ./simple.sql --host host --port 6543 --username user --no-tablespaces" +
|
||||
" --clean --create --table table1 --table table2 db",
|
||||
postBackup: "",
|
||||
preRestore: "",
|
||||
postRestore: "psql --host host --port 6543 --username user db < ./simple.sql",
|
||||
postRestore: "mysql --host host --user user --password pass < ./simple.sql",
|
||||
},
|
||||
// Sqlite
|
||||
{
|
||||
|
Loading…
Reference in New Issue
Block a user