Add env and readfile functions and a more extensive sample

This commit is contained in:
IamTheFij 2022-03-28 22:53:49 -07:00
parent d65603ba4b
commit f064b2de12
7 changed files with 164 additions and 13 deletions

2
go.mod
View File

@ -6,6 +6,7 @@ require (
github.com/go-test/deep v1.0.8 github.com/go-test/deep v1.0.8
github.com/hashicorp/hcl/v2 v2.11.1 github.com/hashicorp/hcl/v2 v2.11.1
github.com/robfig/cron/v3 v3.0.1 github.com/robfig/cron/v3 v3.0.1
github.com/zclconf/go-cty v1.8.0
) )
require ( require (
@ -13,6 +14,5 @@ require (
github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect
github.com/google/go-cmp v0.3.1 // indirect github.com/google/go-cmp v0.3.1 // indirect
github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect
github.com/zclconf/go-cty v1.8.0 // indirect
golang.org/x/text v0.3.5 // indirect golang.org/x/text v0.3.5 // indirect
) )

8
job.go
View File

@ -147,17 +147,17 @@ func (j Job) AllTasks() []ExecutableTask {
} }
func (j Job) BackupPaths() []string { func (j Job) BackupPaths() []string {
files := j.Backup.Paths paths := j.Backup.Paths
for _, t := range j.MySQL { for _, t := range j.MySQL {
files = append(files, t.DumpToPath) paths = append(paths, t.DumpToPath)
} }
for _, t := range j.Sqlite { for _, t := range j.Sqlite {
files = append(files, t.DumpToPath) paths = append(paths, t.DumpToPath)
} }
return files return paths
} }
func (j Job) RunBackup() error { func (j Job) RunBackup() error {

47
main.go
View File

@ -4,9 +4,13 @@ import (
"flag" "flag"
"fmt" "fmt"
"log" "log"
"os"
"strings" "strings"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsimple" "github.com/hashicorp/hcl/v2/hclsimple"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/function"
) )
var ( var (
@ -14,10 +18,43 @@ var (
version = "dev" version = "dev"
) )
func parseConfig(path string) ([]Job, error) { func ParseConfig(path string) ([]Job, error) {
var config Config var config Config
if err := hclsimple.DecodeFile(path, nil, &config); err != nil { ctx := hcl.EvalContext{
Variables: nil,
Functions: map[string]function.Function{
"env": function.New(&function.Spec{
Params: []function.Parameter{{
Name: "var",
Type: cty.String,
}},
VarParam: nil,
Type: function.StaticReturnType(cty.String),
Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
return cty.StringVal(os.Getenv(args[0].AsString())), nil
},
}),
"readfile": function.New(&function.Spec{
Params: []function.Parameter{{
Name: "path",
Type: cty.String,
}},
VarParam: nil,
Type: function.StaticReturnType(cty.String),
Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
content, err := os.ReadFile(args[0].AsString())
if err != nil {
return cty.StringVal(""), err
}
return cty.StringVal(string(content)), nil
},
}),
},
}
if err := hclsimple.DecodeFile(path, &ctx, &config); err != nil {
return nil, fmt.Errorf("%s: Failed to decode file: %w", path, err) return nil, fmt.Errorf("%s: Failed to decode file: %w", path, err)
} }
@ -36,11 +73,11 @@ func parseConfig(path string) ([]Job, error) {
return config.Jobs, nil return config.Jobs, nil
} }
func readJobs(paths []string) ([]Job, error) { func ReadJobs(paths []string) ([]Job, error) {
allJobs := []Job{} allJobs := []Job{}
for _, path := range paths { for _, path := range paths {
jobs, err := parseConfig(path) jobs, err := ParseConfig(path)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -113,7 +150,7 @@ func main() {
log.Fatalf("Requires a path to a job file, but found none") log.Fatalf("Requires a path to a job file, but found none")
} }
jobs, err := readJobs(flag.Args()) jobs, err := ReadJobs(flag.Args())
if err != nil { if err != nil {
log.Fatalf("Failed to read jobs from files: %v", err) log.Fatalf("Failed to read jobs from files: %v", err)
} }

View File

@ -4,6 +4,8 @@ import (
"fmt" "fmt"
"os" "os"
"testing" "testing"
main "git.iamthefij.com/iamthefij/restic-scheduler"
) )
const MinCoverage = 0.5 const MinCoverage = 0.5
@ -22,3 +24,17 @@ func TestMain(m *testing.M) {
os.Exit(testResult) os.Exit(testResult)
} }
func TestReadJobs(t *testing.T) {
t.Parallel()
jobs, err := main.ReadJobs([]string{"./test/sample.hcl"})
if err != nil {
t.Errorf("Unexpected error reading jobs: %v", err)
}
if len(jobs) == 0 {
t.Error("Expected read jobs but found none")
}
}

View File

@ -210,7 +210,7 @@ func (t JobTaskSqlite) GetPostTask() ExecutableTask {
} }
type BackupFilesTask struct { type BackupFilesTask struct {
Paths []string `hcl:"files"` Paths []string `hcl:"paths"`
BackupOpts *BackupOpts `hcl:"backup_opts,block"` BackupOpts *BackupOpts `hcl:"backup_opts,block"`
RestoreOpts *RestoreOpts `hcl:"restore_opts,block"` RestoreOpts *RestoreOpts `hcl:"restore_opts,block"`
name string name string
@ -222,7 +222,7 @@ func (t BackupFilesTask) RunBackup(cfg TaskConfig) error {
} }
if err := cfg.Restic.Backup(cfg.BackupPaths, *t.BackupOpts); err != nil { if err := cfg.Restic.Backup(cfg.BackupPaths, *t.BackupOpts); err != nil {
err = fmt.Errorf("failed backing up files: %w", err) err = fmt.Errorf("failed backing up paths: %w", err)
cfg.Logger.Fatal(err) cfg.Logger.Fatal(err)
return err return err
@ -238,7 +238,7 @@ func (t BackupFilesTask) RunRestore(cfg TaskConfig) error {
// TODO: Make the snapshot configurable // TODO: Make the snapshot configurable
if err := cfg.Restic.Restore("latest", *t.RestoreOpts); err != nil { if err := cfg.Restic.Restore("latest", *t.RestoreOpts); err != nil {
err = fmt.Errorf("failed restoring files: %w", err) err = fmt.Errorf("failed restoring paths: %w", err)
cfg.Logger.Fatal(err) cfg.Logger.Fatal(err)
return err return err

97
test/sample.hcl Normal file
View File

@ -0,0 +1,97 @@
// A simple backup job
job "BackupDataDir" {
schedule = "@daily"
config {
repo = "./backups"
passphrase = "secret phrase"
}
backup {
paths = ["./data"]
restore_opts {
// Since backup paths are relative to cwd, we're going to restore relative to cwd as well
Target = "."
}
}
forget {
KeepLast = 2
Prune = true
}
}
job "PassphraseFile" {
schedule = "@daily"
config {
repo = "./backups"
options {
// A more secure method of specifying password
PasswordFile = "./test/samplepassphrase.txt"
}
}
backup {
paths = ["./data"]
restore_opts {
// Since backup paths are relative to cwd, we're going to restore relative to cwd as well
Target = "."
}
}
}
job "BackupDataAndSqlite" {
schedule = "@daily"
config {
repo = "./backups"
// Another safe way of not inlining the passphrase
passphrase = readfile("./test/samplepassphrase.txt")
}
sqlite "Backup database" {
path = "./sqlite.db"
dump_to = "./data/sqlite.db.bak"
}
backup {
paths = ["./data"]
restore_opts {
// Since backup paths are relative to cwd, we're going to restore relative to cwd as well
Target = "."
}
}
}
job "BackupMySQLDatabase" {
schedule = "@daily"
config {
repo = "./backups"
passphrase = "secret phrase"
}
mysql "Backup database" {
hostname = "localhost"
database = "dbname"
username = "username"
// Values can be read from the env to avoid inlining as well
password = env("TEST_PASSWORD")
dump_to = "./data/sqlite.db.bak"
}
backup {
paths = ["./data"]
restore_opts {
// Since backup paths are relative to cwd, we're going to restore relative to cwd as well
Target = "."
}
}
}

View File

@ -0,0 +1 @@
supersecret