Compare commits
2 Commits
Author | SHA1 | Date | |
---|---|---|---|
cd74de45cc | |||
be2fd9c327 |
@ -18,17 +18,9 @@ repos:
|
||||
- id: check-added-large-files
|
||||
- id: check-merge-conflict
|
||||
- id: end-of-file-fixer
|
||||
exclude: "^ansible_playbooks/vars/nomad_vars.sample.yml$"
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/Yelp/detect-secrets
|
||||
rev: v1.4.0
|
||||
hooks:
|
||||
- id: detect-secrets
|
||||
args: ['--baseline', '.secrets-baseline']
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: variable-sample
|
||||
name: generate variable sample file
|
||||
language: system
|
||||
entry: bash -c 'venv/bin/python scripts/nomad_vars.py print > ./ansible_playbooks/vars/nomad_vars.sample.yml'
|
||||
types: [file]
|
||||
|
@ -75,10 +75,6 @@
|
||||
{
|
||||
"path": "detect_secrets.filters.allowlist.is_line_allowlisted"
|
||||
},
|
||||
{
|
||||
"path": "detect_secrets.filters.common.is_baseline_file",
|
||||
"filename": ".secrets-baseline"
|
||||
},
|
||||
{
|
||||
"path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies",
|
||||
"min_level": 2
|
||||
@ -118,74 +114,89 @@
|
||||
}
|
||||
],
|
||||
"results": {
|
||||
"ansible_playbooks/vars/vault_hashi_vault_values.example.yml": [
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
"filename": "ansible_playbooks/vars/vault_hashi_vault_values.example.yml",
|
||||
"hashed_secret": "f2baa52d02ca888455ce47823f47bf372d5eecb3",
|
||||
"is_verified": false,
|
||||
"line_number": 8
|
||||
},
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
"filename": "ansible_playbooks/vars/vault_hashi_vault_values.example.yml",
|
||||
"hashed_secret": "18960546905b75c869e7de63961dc185f9a0a7c9",
|
||||
"is_verified": false,
|
||||
"line_number": 10
|
||||
},
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
"filename": "ansible_playbooks/vars/vault_hashi_vault_values.example.yml",
|
||||
"hashed_secret": "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33",
|
||||
"is_verified": false,
|
||||
"line_number": 22
|
||||
}
|
||||
],
|
||||
"core/authelia.yml": [
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
"filename": "core/authelia.yml",
|
||||
"hashed_secret": "7cb6efb98ba5972a9b5090dc2e517fe14d12cb04",
|
||||
"is_verified": false,
|
||||
"line_number": 54,
|
||||
"is_secret": false
|
||||
"line_number": 54
|
||||
},
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
"filename": "core/authelia.yml",
|
||||
"hashed_secret": "a32b08d97b1615dc27f58b6b17f67624c04e2c4f",
|
||||
"is_verified": false,
|
||||
"line_number": 201,
|
||||
"is_secret": false
|
||||
"line_number": 191
|
||||
}
|
||||
],
|
||||
"core/grafana/grafana.ini": [
|
||||
"core/metrics/grafana/grafana.ini": [
|
||||
{
|
||||
"type": "Basic Auth Credentials",
|
||||
"filename": "core/grafana/grafana.ini",
|
||||
"filename": "core/metrics/grafana/grafana.ini",
|
||||
"hashed_secret": "e5e9fa1ba31ecd1ae84f75caaa474f3a663f05f4",
|
||||
"is_verified": false,
|
||||
"line_number": 78,
|
||||
"is_secret": false
|
||||
"line_number": 78
|
||||
},
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
"filename": "core/grafana/grafana.ini",
|
||||
"filename": "core/metrics/grafana/grafana.ini",
|
||||
"hashed_secret": "55ebda65c08313526e7ba08ad733e5ebea9900bd",
|
||||
"is_verified": false,
|
||||
"line_number": 109,
|
||||
"is_secret": false
|
||||
"line_number": 109
|
||||
},
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
"filename": "core/grafana/grafana.ini",
|
||||
"filename": "core/metrics/grafana/grafana.ini",
|
||||
"hashed_secret": "d033e22ae348aeb5660fc2140aec35850c4da997",
|
||||
"is_verified": false,
|
||||
"line_number": 151,
|
||||
"is_secret": false
|
||||
"line_number": 151
|
||||
},
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
"filename": "core/grafana/grafana.ini",
|
||||
"filename": "core/metrics/grafana/grafana.ini",
|
||||
"hashed_secret": "10bea62ff1e1a7540dc7a6bc10f5fa992349023f",
|
||||
"is_verified": false,
|
||||
"line_number": 154,
|
||||
"is_secret": false
|
||||
"line_number": 154
|
||||
},
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
"filename": "core/grafana/grafana.ini",
|
||||
"filename": "core/metrics/grafana/grafana.ini",
|
||||
"hashed_secret": "5718bce97710e6be87ea160b36eaefb5032857d3",
|
||||
"is_verified": false,
|
||||
"line_number": 239,
|
||||
"is_secret": false
|
||||
"line_number": 239
|
||||
},
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
"filename": "core/grafana/grafana.ini",
|
||||
"filename": "core/metrics/grafana/grafana.ini",
|
||||
"hashed_secret": "10aed9d7ebef778a9b3033dba3f7813b639e0d50",
|
||||
"is_verified": false,
|
||||
"line_number": 252,
|
||||
"is_secret": false
|
||||
"line_number": 252
|
||||
}
|
||||
]
|
||||
},
|
||||
"generated_at": "2024-08-30T18:12:43Z"
|
||||
"generated_at": "2023-07-11T19:43:38Z"
|
||||
}
|
||||
|
45
.terraform.lock.hcl
generated
45
.terraform.lock.hcl
generated
@ -2,39 +2,20 @@
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.terraform.io/hashicorp/nomad" {
|
||||
version = "2.2.0"
|
||||
version = "1.4.20"
|
||||
hashes = [
|
||||
"h1:BAjqzVkuXxHtRKG+l9unaZJPk2kWZpSTCEcQPRcl2so=",
|
||||
"zh:052f909d25121e93dc799290216292fca67943ccde12ba515068b838a6ff8c66",
|
||||
"zh:20e29aeb9989f7a1e04bb4093817c7acc4e1e737bb21a3066f3ea46f2001feff",
|
||||
"zh:2326d101ef427599b72cce30c0e0c1d18ae783f1a897c20f2319fbf54bab0a61",
|
||||
"zh:3420cbe4fd19cdc96d715d0ae8e79c272608023a76033bbf582c30637f6d570f",
|
||||
"zh:41ec570f87f578f1c57655e2e4fbdb9932d94cf92dc9cd11828cccedf36dd4a4",
|
||||
"zh:5f90dcc58e3356ffead82ea211ecb4a2d7094d3c2fbd14ff85527c3652a595a2",
|
||||
"zh:64aaa48609d2db868fcfd347490df0e12c6c3fcb8e4f12908c5d52b1a0adf73f",
|
||||
"h1:M/QVXHPfeySejJZI3I8mBYrL/J9VsbnyF/dKIMlUhXo=",
|
||||
"zh:02989edcebe724fc0aa873b22176fd20074c4f46295e728010711a8fc5dfa72c",
|
||||
"zh:089ba7d19bcf5c6bab3f8b8c5920eb6d78c52cf79bb0c5dfeb411c600e7efcba",
|
||||
"zh:235865a2182ca372bcbf440201a8b8cc0715ad5dbc4de893d99b6f32b5be53ab",
|
||||
"zh:67ea718764f3f344ecc6e027d20c1327b86353c8064aa90da3ec12cec4a88954",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:86b4923e10e6ba407d1d2aab83740b702058e8b01460af4f5f0e4008f40e492c",
|
||||
"zh:ae89dcba33097af33a306344d20e4e25181f15dcc1a860b42db5b7199a97c6a6",
|
||||
"zh:ce56d68cdfba60891765e94f9c0bf69eddb985d44d97db9f91874bea027f08e2",
|
||||
"zh:e993bcde5dbddaedf3331e3014ffab904f98ab0f5e8b5d6082b7ca5083e0a2f1",
|
||||
]
|
||||
}
|
||||
|
||||
provider "registry.terraform.io/hashicorp/random" {
|
||||
version = "3.6.0"
|
||||
hashes = [
|
||||
"h1:R5Ucn26riKIEijcsiOMBR3uOAjuOMfI1x7XvH4P6B1w=",
|
||||
"zh:03360ed3ecd31e8c5dac9c95fe0858be50f3e9a0d0c654b5e504109c2159287d",
|
||||
"zh:1c67ac51254ba2a2bb53a25e8ae7e4d076103483f55f39b426ec55e47d1fe211",
|
||||
"zh:24a17bba7f6d679538ff51b3a2f378cedadede97af8a1db7dad4fd8d6d50f829",
|
||||
"zh:30ffb297ffd1633175d6545d37c2217e2cef9545a6e03946e514c59c0859b77d",
|
||||
"zh:454ce4b3dbc73e6775f2f6605d45cee6e16c3872a2e66a2c97993d6e5cbd7055",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:91df0a9fab329aff2ff4cf26797592eb7a3a90b4a0c04d64ce186654e0cc6e17",
|
||||
"zh:aa57384b85622a9f7bfb5d4512ca88e61f22a9cea9f30febaa4c98c68ff0dc21",
|
||||
"zh:c4a3e329ba786ffb6f2b694e1fd41d413a7010f3a53c20b432325a94fa71e839",
|
||||
"zh:e2699bc9116447f96c53d55f2a00570f982e6f9935038c3810603572693712d0",
|
||||
"zh:e747c0fd5d7684e5bfad8aa0ca441903f15ae7a98a737ff6aca24ba223207e2c",
|
||||
"zh:f1ca75f417ce490368f047b63ec09fd003711ae48487fba90b4aba2ccf71920e",
|
||||
"zh:8c68c540f0df4980568bdd688c2adec86eda62eb2de154e3db215b16de0a7ae0",
|
||||
"zh:911969c63a69a733be57b96d54c5966c9424e1abec8d5f20038c8cef3a504c65",
|
||||
"zh:a673c92ddc9d47e8d53dcb9b376f1adcb4543488202fc83a3e7eab8677530684",
|
||||
"zh:a94a73eae89fd8c8ebf872013079be41161d3f293f4026c92d45c4c5667dd613",
|
||||
"zh:db6b89f8b696040c0344f00928e4cf6e0a75034421ba14cdcd8a4d23bc865dce",
|
||||
"zh:e512c0b1239e3d66b60d22c2b4de19fea288e492cde90dff9277cc475fd9dbbf",
|
||||
"zh:ef6eccecbdef3bb8ce629cabfb5550c1db5c3e952943dda1786ef6cb470a8c23",
|
||||
]
|
||||
}
|
||||
|
12
Makefile
12
Makefile
@ -62,7 +62,7 @@ ansible-cluster: $(VENV) ansible_galaxy
|
||||
bootstrap-values: $(VENV)
|
||||
env NOMAD_ADDR=http://192.168.2.101:4646 \
|
||||
NOMAD_TOKEN=$(shell jq -r .SecretID nomad_bootstrap.json) \
|
||||
$(VENV)/bin/python ./scripts/nomad_vars.py
|
||||
$(VENV)/bin/python ./nomad_vars.py
|
||||
|
||||
.PHONY: recover-nomad
|
||||
recover-nomad: $(VENV)
|
||||
@ -87,16 +87,6 @@ apply:
|
||||
-auto-approve \
|
||||
-var "nomad_secret_id=$(shell jq -r .SecretID nomad_bootstrap.json)" \
|
||||
|
||||
.PHONY: refresh
|
||||
refresh:
|
||||
@terraform refresh \
|
||||
-var "nomad_secret_id=$(shell jq -r .SecretID nomad_bootstrap.json)" \
|
||||
|
||||
.PHONY: destroy
|
||||
destroy:
|
||||
@terraform destroy \
|
||||
-var "nomad_secret_id=$(shell jq -r .SecretID nomad_bootstrap.json)" \
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
env VIRTUAL_ENV=$(VENV) $(VENV)/bin/ansible-playbook -vv \
|
||||
|
27
README.md
27
README.md
@ -2,8 +2,6 @@
|
||||
|
||||
My configuration for creating my home Nomad cluster and deploying services to it.
|
||||
|
||||
This repo is not designed as general purpose templates, but rather to fit my specific needs. That said, I have made an effort for things to be as useful as possible for someone wanting to use or modify this.
|
||||
|
||||
## Running
|
||||
|
||||
make all
|
||||
@ -14,33 +12,10 @@ Both Ansible and Terraform are used as part of this configuration. All hosts mus
|
||||
|
||||
To begin, Ansible runs a playbook to setup the cluster. This includes installing Nomad, bootstrapping the cluster and ACLs, setting up NFS shares, creating Nomad Host Volumes, and setting up Wesher as a Wireguard mesh between hosts.
|
||||
|
||||
After this is complete, Nomad variables must be set for services to access and configure correctly. This depends on variables to be set based on the sample file.
|
||||
After this is complete, Ansible variables must be set for services to access and configure correctly. This depends on variables to be set based on the sample file.
|
||||
|
||||
Finally, the Terraform configuration can be applied setting up all services deployed on the cluster.
|
||||
|
||||
The configuration of new services is intended to be as templated as possible and to avoid requiring changes in multiple places. For example, most services are configured with a template that provides reverse proxy, DNS records, database tunnels, database bootstrapping, metrics scraping, and authentication. The only real exception is backups, which requires a distinct job file, for now.
|
||||
|
||||
## What does it do?
|
||||
|
||||
* Nomad cluster for scheduling and configuring all services
|
||||
* Blocky DNS servers with integrated ad blocking. This also provides service discovery
|
||||
* Prometheus with autodiscovery of service metrics
|
||||
* Loki and Promtail aggregating logs
|
||||
* Minitor for service availability checks
|
||||
* Grafana providing dashboards, alerting, and log searching
|
||||
* Photoprism for photo management
|
||||
* Remote and shared volumes over NFS
|
||||
* Authelia for OIDC and Proxy based authentication with 2FA
|
||||
* Sonarr and Lidarr for multimedia management
|
||||
* Automated block based backups using Restic
|
||||
|
||||
## Step by step
|
||||
|
||||
1. Update hosts in `ansible_playbooks/ansible_hosts.yml`
|
||||
2. Update `ansible_playbook/setup-cluster.yml`
|
||||
1. Update backup DNS server
|
||||
2. Update NFS shares from NAS
|
||||
3. Update volumes to make sure they are valid paths
|
||||
3. Create `ansible_playbooks/vars/nomad_vars.yml` based on the sample file. TODO: This is quite specific and probably impossible without more documentation
|
||||
4. Run `make all`
|
||||
5. Update your network DNS settings to use the new servers IP addresses
|
||||
|
@ -1,6 +1,6 @@
|
||||
resource "nomad_acl_policy" "anon_policy" {
|
||||
name = "anonymous"
|
||||
description = "Anon read only"
|
||||
description = "Anon RO"
|
||||
rules_hcl = file("${path.module}/nomad-anon-policy.hcl")
|
||||
}
|
||||
|
||||
|
@ -1,72 +1,68 @@
|
||||
---
|
||||
all:
|
||||
hosts:
|
||||
n1.thefij:
|
||||
nomad_node_class: ingress
|
||||
nomad_reserved_memory: 1024
|
||||
# nomad_meta:
|
||||
# hw_transcode.device: /dev/dri
|
||||
# hw_transcode.type: intel
|
||||
nfs_mounts:
|
||||
- src: 10.50.250.2:/srv/volumes
|
||||
path: /srv/volumes/moxy
|
||||
opts: proto=tcp,rw
|
||||
nomad_unique_host_volumes:
|
||||
- name: mysql-data
|
||||
path: /srv/volumes/mysql
|
||||
owner: "999"
|
||||
group: "100"
|
||||
mode: "0755"
|
||||
read_only: false
|
||||
- name: postgres-data
|
||||
path: /srv/volumes/postgres
|
||||
owner: "999"
|
||||
group: "999"
|
||||
mode: "0755"
|
||||
read_only: false
|
||||
# n2.thefij:
|
||||
# nomad_node_class: ingress
|
||||
# nomad_reserved_memory: 1024
|
||||
# nfs_mounts:
|
||||
# - src: 10.50.250.2:/srv/volumes
|
||||
# path: /srv/volumes/moxy
|
||||
# opts: proto=tcp,rw
|
||||
# nomad_unique_host_volumes:
|
||||
# - name: nextcloud-data
|
||||
# path: /srv/volumes/nextcloud
|
||||
# owner: "root"
|
||||
# group: "bin"
|
||||
# mode: "0755"
|
||||
# read_only: false
|
||||
pi4:
|
||||
nomad_node_class: ingress
|
||||
nomad_reserved_memory: 512
|
||||
nomad_meta:
|
||||
hw_transcode.device: /dev/video11
|
||||
hw_transcode.type: raspberry
|
||||
qnomad.thefij:
|
||||
ansible_host: 192.168.2.234
|
||||
nomad_reserved_memory: 1024
|
||||
# This VM uses a non-standard interface
|
||||
nomad_network_interface: ens3
|
||||
|
||||
nomad_instances:
|
||||
vars:
|
||||
nomad_network_interface: eth0
|
||||
children:
|
||||
nomad_servers: {}
|
||||
nomad_clients: {}
|
||||
nomad_servers:
|
||||
hosts:
|
||||
nonopi.thefij:
|
||||
ansible_host: 192.168.2.170
|
||||
n1.thefij: {}
|
||||
# n2.thefij: {}
|
||||
pi4: {}
|
||||
# qnomad.thefij: {}
|
||||
nomad_clients:
|
||||
hosts:
|
||||
n1.thefij: {}
|
||||
# n2.thefij: {}
|
||||
pi4: {}
|
||||
# qnomad.thefij: {}
|
||||
servers:
|
||||
hosts:
|
||||
n1.thefij:
|
||||
nomad_node_role: both
|
||||
# nomad_meta:
|
||||
# hw_transcode.device: /dev/dri
|
||||
# hw_transcode.type: intel
|
||||
nfs_mounts:
|
||||
- src: 10.50.250.2:/srv/volumes
|
||||
path: /srv/volumes/moxy
|
||||
opts: proto=tcp,rw
|
||||
nomad_unique_host_volumes:
|
||||
- name: mysql-data
|
||||
path: /srv/volumes/mysql
|
||||
owner: "999"
|
||||
group: "100"
|
||||
mode: "0755"
|
||||
read_only: false
|
||||
- name: postgres-data
|
||||
path: /srv/volumes/postgres
|
||||
owner: "999"
|
||||
group: "999"
|
||||
mode: "0755"
|
||||
read_only: false
|
||||
n2.thefij:
|
||||
nfs_mounts:
|
||||
- src: 10.50.250.2:/srv/volumes
|
||||
path: /srv/volumes/moxy
|
||||
opts: proto=tcp,rw
|
||||
nomad_node_class: ingress
|
||||
nomad_node_role: both
|
||||
nomad_unique_host_volumes:
|
||||
- name: nextcloud-data
|
||||
path: /srv/volumes/nextcloud
|
||||
owner: "root"
|
||||
group: "bin"
|
||||
mode: "0755"
|
||||
read_only: false
|
||||
- name: gitea-data
|
||||
path: /srv/volumes/gitea
|
||||
owner: "root"
|
||||
group: "bin"
|
||||
mode: "0755"
|
||||
read_only: false
|
||||
- name: sonarr-data
|
||||
path: /srv/volumes/sonarr
|
||||
owner: "root"
|
||||
group: "bin"
|
||||
mode: "0755"
|
||||
read_only: false
|
||||
pi4:
|
||||
nomad_node_role: both
|
||||
nomad_meta:
|
||||
hw_transcode.device: /dev/video11
|
||||
hw_transcode.type: raspberry
|
||||
|
||||
consul_instances:
|
||||
children:
|
||||
servers: {}
|
||||
vault_instances:
|
||||
children:
|
||||
servers: {}
|
||||
nomad_instances:
|
||||
children:
|
||||
servers: {}
|
||||
|
80
ansible_playbooks/bootstrap-values.yml
Normal file
80
ansible_playbooks/bootstrap-values.yml
Normal file
@ -0,0 +1,80 @@
|
||||
---
|
||||
- name: Bootstrap Consul values
|
||||
hosts: consul_instances
|
||||
gather_facts: false
|
||||
|
||||
vars_files:
|
||||
- vars/consul_values.yml
|
||||
|
||||
tasks:
|
||||
- name: Add values
|
||||
delegate_to: localhost
|
||||
run_once: true
|
||||
block:
|
||||
- name: Install python-consul
|
||||
pip:
|
||||
name: python-consul
|
||||
extra_args: --index-url https://pypi.org/simple
|
||||
|
||||
- name: Write values
|
||||
consul_kv:
|
||||
host: "{{ inventory_hostname }}"
|
||||
key: "{{ item.key }}"
|
||||
value: "{{ item.value }}"
|
||||
loop: "{{ consul_values | default({}) | dict2items }}"
|
||||
|
||||
- name: Bootstrap value values
|
||||
hosts: vault_instances
|
||||
gather_facts: false
|
||||
|
||||
vars_files:
|
||||
- vars/vault_hashi_vault_values.yml
|
||||
|
||||
tasks:
|
||||
- name: Bootstrap Vault secrets
|
||||
delegate_to: localhost
|
||||
run_once: true
|
||||
block:
|
||||
- name: Install hvac
|
||||
pip:
|
||||
name: hvac
|
||||
extra_args: --index-url https://pypi.org/simple
|
||||
|
||||
- name: Check mount
|
||||
community.hashi_vault.vault_read:
|
||||
url: "http://{{ inventory_hostname }}:8200"
|
||||
token: "{{ root_token }}"
|
||||
path: "/sys/mounts/kv"
|
||||
ignore_errors: true
|
||||
register: check_mount
|
||||
|
||||
- name: Create kv mount
|
||||
community.hashi_vault.vault_write:
|
||||
url: "http://{{ inventory_hostname }}:8200"
|
||||
token: "{{ root_token }}"
|
||||
path: "/sys/mounts/kv"
|
||||
data:
|
||||
type: kv-v2
|
||||
when: check_mount is not succeeded
|
||||
|
||||
- name: Write values
|
||||
no_log: true
|
||||
community.hashi_vault.vault_write:
|
||||
url: "http://{{ inventory_hostname }}:8200"
|
||||
token: "{{ root_token }}"
|
||||
path: "kv/data/{{ item.key }}"
|
||||
data:
|
||||
data:
|
||||
"{{ item.value }}"
|
||||
loop: "{{ hashi_vault_values | default({}) | dict2items }}"
|
||||
retries: 2
|
||||
delay: 10
|
||||
|
||||
- name: Write userpass
|
||||
no_log: true
|
||||
community.hashi_vault.vault_write:
|
||||
url: "http://{{ inventory_hostname }}:8200"
|
||||
token: "{{ root_token }}"
|
||||
path: "auth/userpass/users/{{ item.name }}"
|
||||
data: '{"password": "{{ item.password }}", "policies": "{{ item.policies }}"}'
|
||||
loop: "{{ vault_userpass }}"
|
@ -1,5 +1,27 @@
|
||||
# Stops Nomad and clears all data from its ata dirs
|
||||
# Stops Consul, Vault, and Nomad and clears all data from their data dirs
|
||||
---
|
||||
- name: Delete Consul data
|
||||
hosts: consul_instances
|
||||
|
||||
tasks:
|
||||
- name: Stop consul
|
||||
systemd:
|
||||
name: consul
|
||||
state: stopped
|
||||
become: true
|
||||
|
||||
- name: Stop vault
|
||||
systemd:
|
||||
name: vault
|
||||
state: stopped
|
||||
become: true
|
||||
|
||||
- name: Remove data dir
|
||||
file:
|
||||
path: /opt/consul
|
||||
state: absent
|
||||
become: true
|
||||
|
||||
- name: Delete Nomad data
|
||||
hosts: nomad_instances
|
||||
|
||||
|
@ -14,14 +14,8 @@
|
||||
state: restarted
|
||||
become: true
|
||||
|
||||
- name: Start Docker
|
||||
systemd:
|
||||
name: docker
|
||||
state: started
|
||||
become: true
|
||||
|
||||
- name: Start Nomad
|
||||
systemd:
|
||||
name: nomad
|
||||
state: started
|
||||
state: stopped
|
||||
become: true
|
||||
|
88
ansible_playbooks/recover-consul.yaml
Normal file
88
ansible_playbooks/recover-consul.yaml
Normal file
@ -0,0 +1,88 @@
|
||||
---
|
||||
- name: Stop Nomad
|
||||
hosts: nomad_instances
|
||||
|
||||
tasks:
|
||||
- name: Stop Nomad
|
||||
systemd:
|
||||
name: nomad
|
||||
state: stopped
|
||||
become: true
|
||||
|
||||
- name: Stop Vault
|
||||
hosts: vault_instances
|
||||
gather_facts: false
|
||||
|
||||
tasks:
|
||||
- name: Stop Vault
|
||||
systemd:
|
||||
name: vault
|
||||
state: stopped
|
||||
become: true
|
||||
|
||||
- name: Recover Consul
|
||||
hosts: consul_instances
|
||||
gather_facts: false
|
||||
|
||||
tasks:
|
||||
- name: Stop Consul
|
||||
systemd:
|
||||
name: consul
|
||||
state: stopped
|
||||
become: true
|
||||
|
||||
- name: Get node-id
|
||||
slurp:
|
||||
src: /opt/consul/node-id
|
||||
register: consul_node_id
|
||||
become: true
|
||||
|
||||
- name: Node Info
|
||||
debug:
|
||||
msg: |
|
||||
node_id: {{ consul_node_id.content | b64decode }}
|
||||
address: {{ ansible_default_ipv4.address }}
|
||||
|
||||
- name: Save
|
||||
copy:
|
||||
dest: "/opt/consul/raft/peers.json"
|
||||
# I used to have reject('equalto', inventory_hostname) in the loop, but I'm not sure if I should
|
||||
content: |
|
||||
[
|
||||
{% for host in ansible_play_hosts -%}
|
||||
{
|
||||
"id": "{{ hostvars[host].consul_node_id.content | b64decode }}",
|
||||
"address": "{{ hostvars[host].ansible_default_ipv4.address }}:8300",
|
||||
"non_voter": false
|
||||
}{% if not loop.last %},{% endif %}
|
||||
{% endfor -%}
|
||||
]
|
||||
become: true
|
||||
|
||||
- name: Restart Consul
|
||||
systemd:
|
||||
name: consul
|
||||
state: restarted
|
||||
become: true
|
||||
|
||||
- name: Start Vault
|
||||
hosts: vault_instances
|
||||
gather_facts: false
|
||||
|
||||
tasks:
|
||||
- name: Start Vault
|
||||
systemd:
|
||||
name: vault
|
||||
state: started
|
||||
become: true
|
||||
|
||||
- name: Start Nomad
|
||||
hosts: nomad_instances
|
||||
gather_facts: false
|
||||
|
||||
tasks:
|
||||
- name: Start Nomad
|
||||
systemd:
|
||||
name: nomad
|
||||
state: started
|
||||
become: true
|
@ -1,7 +1,6 @@
|
||||
---
|
||||
- name: Recover Nomad
|
||||
hosts: nomad_servers
|
||||
any_errors_fatal: true
|
||||
hosts: nomad_instances
|
||||
|
||||
tasks:
|
||||
- name: Stop Nomad
|
||||
@ -10,10 +9,6 @@
|
||||
state: stopped
|
||||
become: true
|
||||
|
||||
- name: Remount all shares
|
||||
command: mount -a
|
||||
become: true
|
||||
|
||||
- name: Get node-id
|
||||
slurp:
|
||||
src: /var/nomad/server/node-id
|
||||
|
@ -1,6 +1,6 @@
|
||||
---
|
||||
- name: Update DNS for bootstrapping with non-Nomad host
|
||||
hosts: nomad_instances
|
||||
hosts: consul_instances
|
||||
become: true
|
||||
gather_facts: false
|
||||
vars:
|
||||
@ -14,7 +14,7 @@
|
||||
line: "nameserver {{ non_nomad_dns }}"
|
||||
|
||||
- name: Install Docker
|
||||
hosts: nomad_clients
|
||||
hosts: nomad_instances
|
||||
become: true
|
||||
vars:
|
||||
docker_architecture_map:
|
||||
@ -44,7 +44,7 @@
|
||||
# state: present
|
||||
|
||||
- name: Create NFS mounts
|
||||
hosts: nomad_clients
|
||||
hosts: nomad_instances
|
||||
become: true
|
||||
vars:
|
||||
shared_nfs_mounts:
|
||||
@ -56,10 +56,6 @@
|
||||
path: /srv/volumes/media-write
|
||||
opts: proto=tcp,port=2049,rw
|
||||
|
||||
- src: 192.168.2.10:/Overflow
|
||||
path: /srv/volumes/nas-overflow
|
||||
opts: proto=tcp,port=2049,rw
|
||||
|
||||
- src: 192.168.2.10:/Photos
|
||||
path: /srv/volumes/photos
|
||||
opts: proto=tcp,port=2049,rw
|
||||
@ -101,12 +97,6 @@
|
||||
group: "root"
|
||||
mode: "0755"
|
||||
read_only: false
|
||||
- name: media-overflow-write
|
||||
path: /srv/volumes/nas-overflow/Media
|
||||
owner: "root"
|
||||
group: "root"
|
||||
mode: "0755"
|
||||
read_only: false
|
||||
- name: media-downloads
|
||||
path: /srv/volumes/media-write/Downloads
|
||||
read_only: false
|
||||
@ -122,26 +112,8 @@
|
||||
- name: nzbget-config
|
||||
path: /srv/volumes/nas-container/nzbget
|
||||
read_only: false
|
||||
- name: sonarr-config
|
||||
path: /srv/volumes/nas-container/sonarr
|
||||
read_only: false
|
||||
- name: lidarr-config
|
||||
path: /srv/volumes/nas-container/lidarr
|
||||
read_only: false
|
||||
- name: radarr-config
|
||||
path: /srv/volumes/nas-container/radarr
|
||||
read_only: false
|
||||
- name: bazarr-config
|
||||
path: /srv/volumes/nas-container/bazarr
|
||||
read_only: false
|
||||
- name: gitea-data
|
||||
path: /srv/volumes/nas-container/gitea
|
||||
read_only: false
|
||||
- name: ytdl-web
|
||||
path: /srv/volumes/nas-container/ytdl-web
|
||||
read_only: false
|
||||
- name: christmas-community
|
||||
path: /srv/volumes/nas-container/christmas-community
|
||||
- name: tubesync-config
|
||||
path: /srv/volumes/nas-container/tubesync
|
||||
read_only: false
|
||||
- name: all-volumes
|
||||
path: /srv/volumes
|
||||
@ -153,10 +125,9 @@
|
||||
roles:
|
||||
- name: ansible-nomad
|
||||
vars:
|
||||
nomad_version: "1.9.3-1"
|
||||
nomad_version: "1.6.0-1"
|
||||
nomad_install_upgrade: true
|
||||
nomad_allow_purge_config: true
|
||||
nomad_node_role: "{% if 'nomad_clients' in group_names %}{% if 'nomad_servers' in group_names %}both{% else %}client{% endif %}{% else %}server{% endif %}"
|
||||
|
||||
# Where nomad gets installed to
|
||||
nomad_bin_dir: /usr/bin
|
||||
@ -210,8 +181,7 @@
|
||||
nomad_bind_address: 0.0.0.0
|
||||
|
||||
# Default interface for binding tasks
|
||||
# This is now set at the inventory level
|
||||
# nomad_network_interface: eth0
|
||||
nomad_network_interface: eth0
|
||||
|
||||
# Create networks for binding task ports
|
||||
nomad_host_networks:
|
||||
@ -230,7 +200,7 @@
|
||||
enabled: true
|
||||
|
||||
- name: Bootstrap Nomad ACLs and scheduler
|
||||
hosts: nomad_servers
|
||||
hosts: nomad_instances
|
||||
|
||||
tasks:
|
||||
- name: Start Nomad
|
||||
@ -260,7 +230,6 @@
|
||||
run_once: true
|
||||
ignore_errors: true
|
||||
register: bootstrap_result
|
||||
changed_when: bootstrap_result is succeeded
|
||||
|
||||
- name: Save bootstrap result
|
||||
copy:
|
||||
@ -292,15 +261,13 @@
|
||||
- list
|
||||
environment:
|
||||
NOMAD_TOKEN: "{{ read_secretid.stdout }}"
|
||||
register: policies
|
||||
run_once: true
|
||||
changed_when: false
|
||||
register: policies
|
||||
|
||||
- name: Copy policy
|
||||
copy:
|
||||
src: ../acls/nomad-anon-policy.hcl
|
||||
dest: /tmp/anonymous.policy.hcl
|
||||
delegate_to: "{{ play_hosts[0] }}"
|
||||
run_once: true
|
||||
register: anon_policy
|
||||
|
||||
@ -311,7 +278,7 @@
|
||||
- acl
|
||||
- policy
|
||||
- apply
|
||||
- -description=Anon read only
|
||||
- -description="Anon read only"
|
||||
- anonymous
|
||||
- /tmp/anonymous.policy.hcl
|
||||
environment:
|
||||
@ -320,18 +287,6 @@
|
||||
delegate_to: "{{ play_hosts[0] }}"
|
||||
run_once: true
|
||||
|
||||
- name: Read scheduler config
|
||||
command:
|
||||
argv:
|
||||
- nomad
|
||||
- operator
|
||||
- scheduler
|
||||
- get-config
|
||||
- -json
|
||||
run_once: true
|
||||
register: scheduler_config
|
||||
changed_when: false
|
||||
|
||||
- name: Enable service scheduler preemption
|
||||
command:
|
||||
argv:
|
||||
@ -339,24 +294,12 @@
|
||||
- operator
|
||||
- scheduler
|
||||
- set-config
|
||||
- -preempt-system-scheduler=true
|
||||
- -preempt-service-scheduler=true
|
||||
environment:
|
||||
NOMAD_TOKEN: "{{ read_secretid.stdout }}"
|
||||
delegate_to: "{{ play_hosts[0] }}"
|
||||
run_once: true
|
||||
when: (scheduler_config.stdout | from_json)["SchedulerConfig"]["PreemptionConfig"]["ServiceSchedulerEnabled"] is false
|
||||
|
||||
- name: Enable system scheduler preemption
|
||||
command:
|
||||
argv:
|
||||
- nomad
|
||||
- operator
|
||||
- scheduler
|
||||
- set-config
|
||||
- -preempt-system-scheduler=true
|
||||
environment:
|
||||
NOMAD_TOKEN: "{{ read_secretid.stdout }}"
|
||||
run_once: true
|
||||
when: (scheduler_config.stdout | from_json)["SchedulerConfig"]["PreemptionConfig"]["SystemSchedulerEnabled"] is false
|
||||
|
||||
# - name: Set up Nomad backend and roles in Vault
|
||||
# community.general.terraform:
|
||||
|
27
ansible_playbooks/unseal-vault.yml
Normal file
27
ansible_playbooks/unseal-vault.yml
Normal file
@ -0,0 +1,27 @@
|
||||
---
|
||||
- name: Unseal Vault
|
||||
hosts: vault_instances
|
||||
|
||||
tasks:
|
||||
- name: Get Vault status
|
||||
uri:
|
||||
url: http://127.0.0.1:8200/v1/sys/health
|
||||
method: GET
|
||||
status_code: 200, 429, 472, 473, 501, 503
|
||||
body_format: json
|
||||
return_content: true
|
||||
register: vault_status
|
||||
|
||||
- name: Unseal Vault
|
||||
no_log: true
|
||||
command:
|
||||
argv:
|
||||
- "vault"
|
||||
- "operator"
|
||||
- "unseal"
|
||||
- "-address=http://127.0.0.1:8200/"
|
||||
- "{{ item }}"
|
||||
loop: "{{ unseal_keys_hex }}"
|
||||
when:
|
||||
- unseal_keys_hex is defined
|
||||
- vault_status.json["sealed"]
|
@ -2,53 +2,56 @@ nomad/jobs:
|
||||
base_hostname: VALUE
|
||||
db_user_ro: VALUE
|
||||
ldap_base_dn: VALUE
|
||||
mysql_root_password: VALUE
|
||||
notify_email: VALUE
|
||||
smtp_password: VALUE
|
||||
smtp_port: VALUE
|
||||
smtp_server: VALUE
|
||||
smtp_tls: VALUE
|
||||
smtp_user: VALUE
|
||||
nomad/jobs/adminer:
|
||||
mysql_stunnel_psk: VALUE
|
||||
nomad/jobs/authelia:
|
||||
db_name: VALUE
|
||||
db_pass: VALUE
|
||||
db_user: VALUE
|
||||
email_sender: VALUE
|
||||
jwt_secret: VALUE
|
||||
ldap_stunnel_psk: VALUE
|
||||
lldap_admin_password: VALUE
|
||||
lldap_admin_user: VALUE
|
||||
mysql_stunnel_psk: VALUE
|
||||
oidc_clients: VALUE
|
||||
oidc_hmac_secret: VALUE
|
||||
oidc_issuer_certificate_chain: VALUE
|
||||
oidc_issuer_private_key: VALUE
|
||||
redis_stunnel_psk: VALUE
|
||||
session_secret: VALUE
|
||||
storage_encryption_key: VALUE
|
||||
nomad/jobs/authelia/authelia/stunnel:
|
||||
redis_stunnel_psk: VALUE
|
||||
nomad/jobs/backup:
|
||||
backup_passphrase: VALUE
|
||||
mysql_stunnel_psk: VALUE
|
||||
nas_ftp_host: VALUE
|
||||
nas_ftp_pass: VALUE
|
||||
nas_ftp_user: VALUE
|
||||
nas_minio_access_key_id: VALUE
|
||||
nas_minio_secret_access_key: VALUE
|
||||
nomad/jobs/backup-oneoff-n1:
|
||||
backup_passphrase: VALUE
|
||||
mysql_stunnel_psk: VALUE
|
||||
nas_ftp_host: VALUE
|
||||
nas_ftp_pass: VALUE
|
||||
nas_ftp_user: VALUE
|
||||
nas_minio_access_key_id: VALUE
|
||||
nas_minio_secret_access_key: VALUE
|
||||
nomad/jobs/backup-oneoff-n2:
|
||||
backup_passphrase: VALUE
|
||||
mysql_stunnel_psk: VALUE
|
||||
nas_ftp_host: VALUE
|
||||
nas_ftp_pass: VALUE
|
||||
nas_ftp_user: VALUE
|
||||
nas_minio_access_key_id: VALUE
|
||||
nas_minio_secret_access_key: VALUE
|
||||
nomad/jobs/backup-oneoff-pi4:
|
||||
backup_passphrase: VALUE
|
||||
mysql_stunnel_psk: VALUE
|
||||
nas_ftp_host: VALUE
|
||||
nas_ftp_pass: VALUE
|
||||
nas_ftp_user: VALUE
|
||||
nas_minio_access_key_id: VALUE
|
||||
nas_minio_secret_access_key: VALUE
|
||||
nomad/jobs/bazarr:
|
||||
db_name: VALUE
|
||||
db_pass: VALUE
|
||||
db_user: VALUE
|
||||
nomad/jobs/blocky:
|
||||
db_name: VALUE
|
||||
db_pass: VALUE
|
||||
@ -56,6 +59,7 @@ nomad/jobs/blocky:
|
||||
mappings: VALUE
|
||||
whitelists_ads: VALUE
|
||||
nomad/jobs/blocky/blocky/stunnel:
|
||||
mysql_stunnel_psk: VALUE
|
||||
redis_stunnel_psk: VALUE
|
||||
nomad/jobs/ddclient:
|
||||
domain: VALUE
|
||||
@ -63,13 +67,11 @@ nomad/jobs/ddclient:
|
||||
zone: VALUE
|
||||
nomad/jobs/diun:
|
||||
slack_hook_url: VALUE
|
||||
nomad/jobs/git:
|
||||
nomad/jobs/gitea:
|
||||
db_name: VALUE
|
||||
db_pass: VALUE
|
||||
db_user: VALUE
|
||||
oidc_secret: VALUE
|
||||
secret_key: VALUE
|
||||
smtp_sender: VALUE
|
||||
nomad/jobs/grafana:
|
||||
admin_pw: VALUE
|
||||
alert_email_addresses: VALUE
|
||||
@ -86,19 +88,34 @@ nomad/jobs/grafana:
|
||||
slack_hook_url: VALUE
|
||||
smtp_password: VALUE
|
||||
smtp_user: VALUE
|
||||
nomad/jobs/grafana/grafana/stunnel:
|
||||
mysql_stunnel_psk: VALUE
|
||||
nomad/jobs/immich:
|
||||
db_name: VALUE
|
||||
db_pass: VALUE
|
||||
db_user: VALUE
|
||||
nomad/jobs/lego:
|
||||
acme_email: VALUE
|
||||
domain_lego_dns: VALUE
|
||||
usersfile: VALUE
|
||||
nomad/jobs/lidarr:
|
||||
db_name: VALUE
|
||||
nomad/jobs/ipdvr/bazarr:
|
||||
db_pass: VALUE
|
||||
db_user: VALUE
|
||||
nomad/jobs/ipdvr/bazarr/bootstrap:
|
||||
superuser: VALUE
|
||||
superuser_pass: VALUE
|
||||
nomad/jobs/ipdvr/lidarr:
|
||||
db_pass: VALUE
|
||||
db_user: VALUE
|
||||
nomad/jobs/ipdvr/lidarr/bootstrap:
|
||||
superuser: VALUE
|
||||
superuser_pass: VALUE
|
||||
nomad/jobs/ipdvr/radarr:
|
||||
db_pass: VALUE
|
||||
db_user: VALUE
|
||||
nomad/jobs/ipdvr/radarr/bootstrap:
|
||||
superuser: VALUE
|
||||
superuser_pass: VALUE
|
||||
nomad/jobs/lldap:
|
||||
admin_email: VALUE
|
||||
admin_password: VALUE
|
||||
admin_user: VALUE
|
||||
db_name: VALUE
|
||||
db_pass: VALUE
|
||||
db_user: VALUE
|
||||
@ -106,48 +123,38 @@ nomad/jobs/lldap:
|
||||
key_seed: VALUE
|
||||
smtp_from: VALUE
|
||||
smtp_reply_to: VALUE
|
||||
nomad/jobs/lldap/lldap/bootstrap:
|
||||
mysql_root_password: VALUE
|
||||
nomad/jobs/lldap/lldap/stunnel:
|
||||
allowed_psks: VALUE
|
||||
mysql_stunnel_psk: VALUE
|
||||
nomad/jobs/minitor:
|
||||
mailgun_api_key: VALUE
|
||||
nomad/jobs/mysql-server:
|
||||
mysql_root_password: VALUE
|
||||
allowed_psks: VALUE
|
||||
root_password: VALUE
|
||||
nomad/jobs/photoprism:
|
||||
admin_password: VALUE
|
||||
admin_user: VALUE
|
||||
db_name: VALUE
|
||||
db_pass: VALUE
|
||||
db_user: VALUE
|
||||
oidc_secret: VALUE
|
||||
mysql_stunnel_psk: VALUE
|
||||
nomad/jobs/postgres-server:
|
||||
superuser: VALUE
|
||||
superuser_pass: VALUE
|
||||
nomad/jobs/radarr:
|
||||
db_name: VALUE
|
||||
db_pass: VALUE
|
||||
db_user: VALUE
|
||||
nomad/jobs/redis-authelia:
|
||||
allowed_psks: VALUE
|
||||
nomad/jobs/redis-blocky:
|
||||
allowed_psks: VALUE
|
||||
nomad/jobs/rediscommander:
|
||||
redis_stunnel_psk: VALUE
|
||||
nomad/jobs/sonarr:
|
||||
db_name: VALUE
|
||||
db_pass: VALUE
|
||||
db_user: VALUE
|
||||
nomad/jobs/traefik:
|
||||
external: VALUE
|
||||
acme_email: VALUE
|
||||
domain_lego_dns: VALUE
|
||||
usersfile: VALUE
|
||||
nomad/jobs/unifi-traffic-route-ips:
|
||||
unifi_password: VALUE
|
||||
unifi_username: VALUE
|
||||
nomad/jobs/wishlist:
|
||||
guest_password: VALUE
|
||||
nomad/oidc:
|
||||
secret: VALUE
|
||||
secrets/ldap:
|
||||
admin_email: VALUE
|
||||
admin_password: VALUE
|
||||
admin_user: VALUE
|
||||
secrets/mysql:
|
||||
mysql_root_password: VALUE
|
||||
secrets/postgres:
|
||||
@ -159,4 +166,3 @@ secrets/smtp:
|
||||
server: VALUE
|
||||
tls: VALUE
|
||||
user: VALUE
|
||||
|
||||
|
40
backups/.terraform.lock.hcl
generated
40
backups/.terraform.lock.hcl
generated
@ -1,40 +0,0 @@
|
||||
# This file is maintained automatically by "terraform init".
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.terraform.io/hashicorp/nomad" {
|
||||
version = "2.0.0"
|
||||
hashes = [
|
||||
"h1:lIHIxA6ZmfyTGL3J9YIddhxlfit4ipSS09BLxkwo6L0=",
|
||||
"zh:09b897d64db293f9a904a4a0849b11ec1e3fff5c638f734d82ae36d8dc044b72",
|
||||
"zh:435cc106799290f64078ec24b6c59cb32b33784d609088638ed32c6d12121199",
|
||||
"zh:7073444bd064e8c4ec115ca7d9d7f030cc56795c0a83c27f6668bba519e6849a",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:79d238c35d650d2d83a439716182da63f3b2767e72e4cbd0b69cb13d9b1aebfc",
|
||||
"zh:7ef5f49344278fe0bbc5447424e6aa5425ff1821d010d944a444d7fa2c751acf",
|
||||
"zh:92179091638c8ba03feef371c4361a790190f9955caea1fa59de2055c701a251",
|
||||
"zh:a8a34398851761368eb8e7c171f24e55efa6e9fdbb5c455f6dec34dc17f631bc",
|
||||
"zh:b38fd5338625ebace5a4a94cea1a28b11bd91995d834e318f47587cfaf6ec599",
|
||||
"zh:b71b273a2aca7ad5f1e07c767b25b5a888881ba9ca93b30044ccc39c2937f03c",
|
||||
"zh:cd14357e520e0f09fb25badfb4f2ee37d7741afdc3ed47c7bcf54c1683772543",
|
||||
"zh:e05e025f4bb95138c3c8a75c636e97cd7cfd2fc1525b0c8bd097db8c5f02df6e",
|
||||
]
|
||||
}
|
||||
|
||||
provider "registry.terraform.io/hashicorp/random" {
|
||||
version = "3.5.1"
|
||||
hashes = [
|
||||
"h1:VSnd9ZIPyfKHOObuQCaKfnjIHRtR7qTw19Rz8tJxm+k=",
|
||||
"zh:04e3fbd610cb52c1017d282531364b9c53ef72b6bc533acb2a90671957324a64",
|
||||
"zh:119197103301ebaf7efb91df8f0b6e0dd31e6ff943d231af35ee1831c599188d",
|
||||
"zh:4d2b219d09abf3b1bb4df93d399ed156cadd61f44ad3baf5cf2954df2fba0831",
|
||||
"zh:6130bdde527587bbe2dcaa7150363e96dbc5250ea20154176d82bc69df5d4ce3",
|
||||
"zh:6cc326cd4000f724d3086ee05587e7710f032f94fc9af35e96a386a1c6f2214f",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:b6d88e1d28cf2dfa24e9fdcc3efc77adcdc1c3c3b5c7ce503a423efbdd6de57b",
|
||||
"zh:ba74c592622ecbcef9dc2a4d81ed321c4e44cddf7da799faa324da9bf52a22b2",
|
||||
"zh:c7c5cde98fe4ef1143bd1b3ec5dc04baf0d4cc3ca2c5c7d40d17c0e9b2076865",
|
||||
"zh:dac4bad52c940cd0dfc27893507c1e92393846b024c5a9db159a93c534a3da03",
|
||||
"zh:de8febe2a2acd9ac454b844a4106ed295ae9520ef54dc8ed2faf29f12716b602",
|
||||
"zh:eab0d0495e7e711cca367f7d4df6e322e6c562fc52151ec931176115b83ed014",
|
||||
]
|
||||
}
|
@ -1,136 +0,0 @@
|
||||
resource "nomad_job" "backup" {
|
||||
jobspec = templatefile("${path.module}/backup.nomad", {
|
||||
module_path = path.module,
|
||||
batch_node = null,
|
||||
use_wesher = var.use_wesher
|
||||
})
|
||||
}
|
||||
|
||||
resource "nomad_job" "backup-oneoff" {
|
||||
# TODO: Get list of nomad hosts dynamically
|
||||
for_each = toset(["n1", "pi4"])
|
||||
# for_each = toset([
|
||||
# for node in data.consul_service.nomad.service :
|
||||
# node.node_name
|
||||
# ])
|
||||
|
||||
jobspec = templatefile("${path.module}/backup.nomad", {
|
||||
module_path = path.module,
|
||||
batch_node = each.key,
|
||||
use_wesher = var.use_wesher
|
||||
})
|
||||
}
|
||||
|
||||
locals {
|
||||
# NOTE: This can't be dynamic in first deploy since these values are not known
|
||||
# all_job_ids = toset(flatten([[for job in resource.nomad_job.backup-oneoff : job.id], [resource.nomad_job.backup.id]]))
|
||||
all_job_ids = toset(["backup", "backup-oneoff-n1", "backup-oneoff-pi4"])
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "secrets_mysql" {
|
||||
for_each = local.all_job_ids
|
||||
|
||||
name = "${each.key}-secrets-mysql"
|
||||
description = "Give access to MySQL secrets"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/mysql" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
job_id = each.key
|
||||
}
|
||||
}
|
||||
|
||||
resource "random_password" "mysql_psk" {
|
||||
length = 32
|
||||
override_special = "!@#%&*-_="
|
||||
}
|
||||
|
||||
resource "nomad_variable" "mysql_psk" {
|
||||
path = "secrets/mysql/allowed_psks/backups"
|
||||
items = {
|
||||
psk = "backups:${resource.random_password.mysql_psk.result}"
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "mysql_psk" {
|
||||
for_each = local.all_job_ids
|
||||
|
||||
name = "${each.key}-secrets-mysql-psk"
|
||||
description = "Give access to MySQL PSK secrets"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/mysql/allowed_psks/backups" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
job_id = each.key
|
||||
group = "backup"
|
||||
task = "stunnel"
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "secrets_postgres" {
|
||||
for_each = local.all_job_ids
|
||||
|
||||
name = "${each.key}-secrets-postgres"
|
||||
description = "Give access to Postgres secrets"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/postgres" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
job_id = each.key
|
||||
}
|
||||
}
|
||||
|
||||
resource "random_password" "postgres_psk" {
|
||||
length = 32
|
||||
override_special = "!@#%&*-_="
|
||||
}
|
||||
|
||||
resource "nomad_variable" "postgres_psk" {
|
||||
path = "secrets/postgres/allowed_psks/backups"
|
||||
items = {
|
||||
psk = "backups:${resource.random_password.postgres_psk.result}"
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "postgres_psk" {
|
||||
for_each = local.all_job_ids
|
||||
|
||||
name = "${each.key}-secrets-postgres-psk"
|
||||
description = "Give access to Postgres PSK secrets"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/postgres/allowed_psks/backups" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
job_id = each.key
|
||||
group = "backup"
|
||||
task = "stunnel"
|
||||
}
|
||||
}
|
@ -1,54 +0,0 @@
|
||||
job "authelia" {
|
||||
schedule = "@daily"
|
||||
|
||||
config {
|
||||
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/authelia"
|
||||
passphrase = env("BACKUP_PASSPHRASE")
|
||||
|
||||
options {
|
||||
InsecureTls = true
|
||||
}
|
||||
}
|
||||
|
||||
task "Create local authelia dir" {
|
||||
pre_script {
|
||||
on_backup = "mkdir -p /local/authelia"
|
||||
}
|
||||
}
|
||||
|
||||
task "Backup database" {
|
||||
mysql "Backup database" {
|
||||
hostname = env("MYSQL_HOST")
|
||||
port = env("MYSQL_PORT")
|
||||
database = "authelia"
|
||||
username = env("MYSQL_USER")
|
||||
password = env("MYSQL_PASSWORD")
|
||||
no_tablespaces = true
|
||||
dump_to = "/local/authelia/dump.sql"
|
||||
}
|
||||
}
|
||||
|
||||
backup {
|
||||
paths = ["/local/authelia"]
|
||||
|
||||
backup_opts {
|
||||
Host = "nomad"
|
||||
}
|
||||
|
||||
restore_opts {
|
||||
Host = ["nomad"]
|
||||
# Because path is absolute
|
||||
Target = "/"
|
||||
}
|
||||
}
|
||||
|
||||
forget {
|
||||
KeepLast = 2
|
||||
KeepHourly = 24
|
||||
KeepDaily = 30
|
||||
KeepWeekly = 8
|
||||
KeepMonthly = 6
|
||||
KeepYearly = 2
|
||||
Prune = true
|
||||
}
|
||||
}
|
@ -1,57 +0,0 @@
|
||||
job "git" {
|
||||
schedule = "@daily"
|
||||
|
||||
config {
|
||||
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/gitea"
|
||||
passphrase = env("BACKUP_PASSPHRASE")
|
||||
|
||||
options {
|
||||
InsecureTls = true
|
||||
}
|
||||
}
|
||||
|
||||
task "Create local gitea dir" {
|
||||
pre_script {
|
||||
on_backup = "mkdir -p /local/gitea"
|
||||
}
|
||||
}
|
||||
|
||||
task "Backup database" {
|
||||
mysql "Backup database" {
|
||||
hostname = env("MYSQL_HOST")
|
||||
port = env("MYSQL_PORT")
|
||||
database = "gitea"
|
||||
username = env("MYSQL_USER")
|
||||
password = env("MYSQL_PASSWORD")
|
||||
no_tablespaces = true
|
||||
dump_to = "/local/gitea/dump.sql"
|
||||
}
|
||||
}
|
||||
|
||||
backup {
|
||||
paths = [
|
||||
"/local/gitea",
|
||||
"/data/nas-container/gitea",
|
||||
]
|
||||
|
||||
backup_opts {
|
||||
Host = "nomad"
|
||||
}
|
||||
|
||||
restore_opts {
|
||||
Host = ["nomad"]
|
||||
# Because path is absolute
|
||||
Target = "/"
|
||||
}
|
||||
}
|
||||
|
||||
forget {
|
||||
KeepLast = 2
|
||||
KeepHourly = 24
|
||||
KeepDaily = 30
|
||||
KeepWeekly = 8
|
||||
KeepMonthly = 6
|
||||
KeepYearly = 2
|
||||
Prune = true
|
||||
}
|
||||
}
|
@ -1,64 +0,0 @@
|
||||
job "lidarr" {
|
||||
schedule = "@daily"
|
||||
|
||||
config {
|
||||
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/lidarr"
|
||||
passphrase = env("BACKUP_PASSPHRASE")
|
||||
|
||||
options {
|
||||
InsecureTls = true
|
||||
}
|
||||
}
|
||||
|
||||
task "Backup main database" {
|
||||
postgres "Backup database" {
|
||||
hostname = env("POSTGRES_HOST")
|
||||
port = env("POSTGRES_PORT")
|
||||
username = env("POSTGRES_USER")
|
||||
password = env("POSTGRES_PASSWORD")
|
||||
database = "lidarr"
|
||||
no_tablespaces = true
|
||||
dump_to = "/data/nas-container/lidarr/Backups/dump-lidarr.sql"
|
||||
}
|
||||
}
|
||||
|
||||
task "Backup logs database" {
|
||||
postgres "Backup database" {
|
||||
hostname = env("POSTGRES_HOST")
|
||||
port = env("POSTGRES_PORT")
|
||||
username = env("POSTGRES_USER")
|
||||
password = env("POSTGRES_PASSWORD")
|
||||
database = "lidarr-logs"
|
||||
no_tablespaces = true
|
||||
dump_to = "/data/nas-container/lidarr/Backups/dump-lidarr-logs.sql"
|
||||
}
|
||||
}
|
||||
|
||||
backup {
|
||||
paths = ["/data/nas-container/lidarr"]
|
||||
|
||||
backup_opts {
|
||||
Exclude = [
|
||||
"lidarr_backup_*.zip",
|
||||
"/data/nas-container/lidarr/MediaCover",
|
||||
"/data/nas-container/lidarr/logs",
|
||||
]
|
||||
Host = "nomad"
|
||||
}
|
||||
|
||||
restore_opts {
|
||||
Host = ["nomad"]
|
||||
# Because path is absolute
|
||||
Target = "/"
|
||||
}
|
||||
}
|
||||
|
||||
forget {
|
||||
KeepLast = 2
|
||||
KeepDaily = 30
|
||||
KeepWeekly = 8
|
||||
KeepMonthly = 6
|
||||
KeepYearly = 2
|
||||
Prune = true
|
||||
}
|
||||
}
|
@ -1,64 +0,0 @@
|
||||
job "radarr" {
|
||||
schedule = "@daily"
|
||||
|
||||
config {
|
||||
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/radarr"
|
||||
passphrase = env("BACKUP_PASSPHRASE")
|
||||
|
||||
options {
|
||||
InsecureTls = true
|
||||
}
|
||||
}
|
||||
|
||||
task "Backup main database" {
|
||||
postgres "Backup database" {
|
||||
hostname = env("POSTGRES_HOST")
|
||||
port = env("POSTGRES_PORT")
|
||||
username = env("POSTGRES_USER")
|
||||
password = env("POSTGRES_PASSWORD")
|
||||
database = "radarr"
|
||||
no_tablespaces = true
|
||||
dump_to = "/data/nas-container/radarr/Backups/dump-radarr.sql"
|
||||
}
|
||||
}
|
||||
|
||||
task "Backup logs database" {
|
||||
postgres "Backup database" {
|
||||
hostname = env("POSTGRES_HOST")
|
||||
port = env("POSTGRES_PORT")
|
||||
username = env("POSTGRES_USER")
|
||||
password = env("POSTGRES_PASSWORD")
|
||||
database = "radarr-logs"
|
||||
no_tablespaces = true
|
||||
dump_to = "/data/nas-container/radarr/Backups/dump-radarr-logs.sql"
|
||||
}
|
||||
}
|
||||
|
||||
backup {
|
||||
paths = ["/data/nas-container/radarr"]
|
||||
|
||||
backup_opts {
|
||||
Exclude = [
|
||||
"radarr_backup_*.zip",
|
||||
"/data/nas-container/radarr/MediaCover",
|
||||
"/data/nas-container/radarr/logs",
|
||||
]
|
||||
Host = "nomad"
|
||||
}
|
||||
|
||||
restore_opts {
|
||||
Host = ["nomad"]
|
||||
# Because path is absolute
|
||||
Target = "/"
|
||||
}
|
||||
}
|
||||
|
||||
forget {
|
||||
KeepLast = 2
|
||||
KeepDaily = 30
|
||||
KeepWeekly = 8
|
||||
KeepMonthly = 6
|
||||
KeepYearly = 2
|
||||
Prune = true
|
||||
}
|
||||
}
|
@ -1,67 +0,0 @@
|
||||
job "sonarr" {
|
||||
schedule = "@daily"
|
||||
|
||||
config {
|
||||
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/sonarr"
|
||||
passphrase = env("BACKUP_PASSPHRASE")
|
||||
|
||||
options {
|
||||
InsecureTls = true
|
||||
}
|
||||
}
|
||||
|
||||
task "Backup main database" {
|
||||
postgres "Backup database" {
|
||||
hostname = env("POSTGRES_HOST")
|
||||
port = env("POSTGRES_PORT")
|
||||
username = env("POSTGRES_USER")
|
||||
password = env("POSTGRES_PASSWORD")
|
||||
database = "sonarr"
|
||||
no_tablespaces = true
|
||||
dump_to = "/data/nas-container/sonarr/Backups/dump-sonarr.sql"
|
||||
}
|
||||
}
|
||||
|
||||
task "Backup logs database" {
|
||||
postgres "Backup database" {
|
||||
hostname = env("POSTGRES_HOST")
|
||||
port = env("POSTGRES_PORT")
|
||||
username = env("POSTGRES_USER")
|
||||
password = env("POSTGRES_PASSWORD")
|
||||
database = "sonarr-logs"
|
||||
no_tablespaces = true
|
||||
dump_to = "/data/nas-container/sonarr/Backups/dump-sonarr-logs.sql"
|
||||
}
|
||||
}
|
||||
|
||||
backup {
|
||||
paths = ["/data/nas-container/sonarr"]
|
||||
|
||||
backup_opts {
|
||||
Exclude = [
|
||||
"sonarr_backup_*.zip",
|
||||
"/data/nas-container/sonarr/MediaCover",
|
||||
"/data/nas-container/sonarr/logs",
|
||||
"*.db",
|
||||
"*.db-shm",
|
||||
"*.db-wal",
|
||||
]
|
||||
Host = "nomad"
|
||||
}
|
||||
|
||||
restore_opts {
|
||||
Host = ["nomad"]
|
||||
# Because path is absolute
|
||||
Target = "/"
|
||||
}
|
||||
}
|
||||
|
||||
forget {
|
||||
KeepLast = 2
|
||||
KeepDaily = 30
|
||||
KeepWeekly = 8
|
||||
KeepMonthly = 6
|
||||
KeepYearly = 2
|
||||
Prune = true
|
||||
}
|
||||
}
|
@ -1,5 +0,0 @@
|
||||
variable "use_wesher" {
|
||||
type = bool
|
||||
description = "Indicates whether or not services should expose themselves on the wesher network"
|
||||
default = true
|
||||
}
|
12
core.tf
Normal file
12
core.tf
Normal file
@ -0,0 +1,12 @@
|
||||
module "databases" {
|
||||
source = "./databases"
|
||||
}
|
||||
|
||||
module "core" {
|
||||
source = "./core"
|
||||
|
||||
base_hostname = var.base_hostname
|
||||
|
||||
# Metrics and Blocky depend on databases
|
||||
depends_on = [module.databases]
|
||||
}
|
45
core/.terraform.lock.hcl
generated
45
core/.terraform.lock.hcl
generated
@ -2,39 +2,20 @@
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.terraform.io/hashicorp/nomad" {
|
||||
version = "2.1.1"
|
||||
version = "1.4.20"
|
||||
hashes = [
|
||||
"h1:liQBgBXfQEYmwpoGZUfSsu0U0t/nhvuRZbMhaMz7wcQ=",
|
||||
"zh:28bc6922e8a21334568410760150d9d413d7b190d60b5f0b4aab2f4ef52efeeb",
|
||||
"zh:2d4283740e92ce1403875486cd5ff2c8acf9df28c190873ab4d769ce37db10c1",
|
||||
"zh:457e16d70075eae714a7df249d3ba42c2176f19b6750650152c56f33959028d9",
|
||||
"zh:49ee88371e355c00971eefee6b5001392431b47b3e760a5c649dda76f59fb8fa",
|
||||
"zh:614ad3bf07155ed8a5ced41dafb09042afbd1868490a379654b3e970def8e33d",
|
||||
"zh:75be7199d76987e7549e1f27439922973d1bf27353b44a593bfbbc2e3b9f698f",
|
||||
"h1:M/QVXHPfeySejJZI3I8mBYrL/J9VsbnyF/dKIMlUhXo=",
|
||||
"zh:02989edcebe724fc0aa873b22176fd20074c4f46295e728010711a8fc5dfa72c",
|
||||
"zh:089ba7d19bcf5c6bab3f8b8c5920eb6d78c52cf79bb0c5dfeb411c600e7efcba",
|
||||
"zh:235865a2182ca372bcbf440201a8b8cc0715ad5dbc4de893d99b6f32b5be53ab",
|
||||
"zh:67ea718764f3f344ecc6e027d20c1327b86353c8064aa90da3ec12cec4a88954",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:888e14a24410d56b37212fbea373a3e0401d0ff8f8e4f4dd00ba8b29de9fed39",
|
||||
"zh:aa261925e8b152636a0886b3a2149864707632d836e98a88dacea6cfb6302082",
|
||||
"zh:ac10cefb4064b3bb63d4b0379624a416a45acf778eac0004466f726ead686196",
|
||||
"zh:b1a3c8b4d5b2dc9b510eac5e9e02665582862c24eb819ab74f44d3d880246d4f",
|
||||
"zh:c552e2fe5670b6d3ad9a5faf78e3a27197eeedbe2b13928d2c491fa509bc47c7",
|
||||
]
|
||||
}
|
||||
|
||||
provider "registry.terraform.io/hashicorp/random" {
|
||||
version = "3.6.0"
|
||||
hashes = [
|
||||
"h1:R5Ucn26riKIEijcsiOMBR3uOAjuOMfI1x7XvH4P6B1w=",
|
||||
"zh:03360ed3ecd31e8c5dac9c95fe0858be50f3e9a0d0c654b5e504109c2159287d",
|
||||
"zh:1c67ac51254ba2a2bb53a25e8ae7e4d076103483f55f39b426ec55e47d1fe211",
|
||||
"zh:24a17bba7f6d679538ff51b3a2f378cedadede97af8a1db7dad4fd8d6d50f829",
|
||||
"zh:30ffb297ffd1633175d6545d37c2217e2cef9545a6e03946e514c59c0859b77d",
|
||||
"zh:454ce4b3dbc73e6775f2f6605d45cee6e16c3872a2e66a2c97993d6e5cbd7055",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:91df0a9fab329aff2ff4cf26797592eb7a3a90b4a0c04d64ce186654e0cc6e17",
|
||||
"zh:aa57384b85622a9f7bfb5d4512ca88e61f22a9cea9f30febaa4c98c68ff0dc21",
|
||||
"zh:c4a3e329ba786ffb6f2b694e1fd41d413a7010f3a53c20b432325a94fa71e839",
|
||||
"zh:e2699bc9116447f96c53d55f2a00570f982e6f9935038c3810603572693712d0",
|
||||
"zh:e747c0fd5d7684e5bfad8aa0ca441903f15ae7a98a737ff6aca24ba223207e2c",
|
||||
"zh:f1ca75f417ce490368f047b63ec09fd003711ae48487fba90b4aba2ccf71920e",
|
||||
"zh:8c68c540f0df4980568bdd688c2adec86eda62eb2de154e3db215b16de0a7ae0",
|
||||
"zh:911969c63a69a733be57b96d54c5966c9424e1abec8d5f20038c8cef3a504c65",
|
||||
"zh:a673c92ddc9d47e8d53dcb9b376f1adcb4543488202fc83a3e7eab8677530684",
|
||||
"zh:a94a73eae89fd8c8ebf872013079be41161d3f293f4026c92d45c4c5667dd613",
|
||||
"zh:db6b89f8b696040c0344f00928e4cf6e0a75034421ba14cdcd8a4d23bc865dce",
|
||||
"zh:e512c0b1239e3d66b60d22c2b4de19fea288e492cde90dff9277cc475fd9dbbf",
|
||||
"zh:ef6eccecbdef3bb8ce629cabfb5550c1db5c3e952943dda1786ef6cb470a8c23",
|
||||
]
|
||||
}
|
||||
|
204
core/authelia.tf
204
core/authelia.tf
@ -1,204 +0,0 @@
|
||||
module "authelia" {
|
||||
source = "../services/service"
|
||||
|
||||
name = "authelia"
|
||||
instance_count = 2
|
||||
priority = 70
|
||||
image = "authelia/authelia:4.38"
|
||||
args = ["--config", "$${NOMAD_TASK_DIR}/authelia.yml"]
|
||||
ingress = true
|
||||
service_port = 9999
|
||||
service_port_static = true
|
||||
use_wesher = var.use_wesher
|
||||
# metrics_port = 9959
|
||||
|
||||
env = {
|
||||
AUTHELIA_AUTHENTICATION_BACKEND_LDAP_PASSWORD_FILE = "$${NOMAD_SECRETS_DIR}/ldap_password.txt"
|
||||
AUTHELIA_JWT_SECRET_FILE = "$${NOMAD_SECRETS_DIR}/jwt_secret.txt"
|
||||
AUTHELIA_SESSION_SECRET_FILE = "$${NOMAD_SECRETS_DIR}/session_secret.txt"
|
||||
AUTHELIA_STORAGE_ENCRYPTION_KEY_FILE = "$${NOMAD_SECRETS_DIR}/storage_encryption_key.txt"
|
||||
AUTHELIA_STORAGE_MYSQL_PASSWORD_FILE = "$${NOMAD_SECRETS_DIR}/mysql_password.txt"
|
||||
AUTHELIA_NOTIFIER_SMTP_PASSWORD_FILE = "$${NOMAD_SECRETS_DIR}/smtp_password.txt"
|
||||
AUTHELIA_IDENTITY_PROVIDERS_OIDC_HMAC_SECRET_FILE = "$${NOMAD_SECRETS_DIR}/oidc_hmac_secret.txt"
|
||||
AUTHELIA_IDENTITY_PROVIDERS_OIDC_ISSUER_PRIVATE_KEY_FILE = "$${NOMAD_SECRETS_DIR}/oidc_issuer_private_key.txt"
|
||||
# AUTHELIA_IDENTITY_PROVIDERS_OIDC_ISSUER_CERTIFICATE_CHAIN_FILE = "$${NOMAD_SECRETS_DIR}/oidc_issuer_certificate_chain.txt"
|
||||
}
|
||||
|
||||
use_mysql = true
|
||||
use_ldap = true
|
||||
use_redis = true
|
||||
use_smtp = true
|
||||
mysql_bootstrap = {
|
||||
enabled = true
|
||||
}
|
||||
|
||||
service_tags = [
|
||||
# Configure traefik to add this middleware
|
||||
"traefik.http.middlewares.authelia.forwardAuth.address=http://authelia.nomad:$${NOMAD_PORT_main}/api/verify?rd=https%3A%2F%2Fauthelia.${var.base_hostname}%2F",
|
||||
"traefik.http.middlewares.authelia.forwardAuth.trustForwardHeader=true",
|
||||
"traefik.http.middlewares.authelia.forwardAuth.authResponseHeaders=Remote-User,Remote-Groups,Remote-Name,Remote-Email",
|
||||
"traefik.http.middlewares.authelia-basic.forwardAuth.address=http://authelia.nomad:$${NOMAD_PORT_main}/api/verify?auth=basic",
|
||||
"traefik.http.middlewares.authelia-basic.forwardAuth.trustForwardHeader=true",
|
||||
"traefik.http.middlewares.authelia-basic.forwardAuth.authResponseHeaders=Remote-User,Remote-Groups,Remote-Name,Remote-Email",
|
||||
]
|
||||
|
||||
templates = [
|
||||
{
|
||||
data = file("${path.module}/authelia.yml")
|
||||
dest = "authelia.yml"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"secrets/ldap\" }}{{ .admin_password }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "ldap_password.txt"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"nomad/jobs/authelia\" }}{{ .jwt_secret }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "jwt_secret.txt"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"nomad/jobs/authelia\" }}{{ .session_secret }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "session_secret.txt"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"nomad/jobs/authelia\" }}{{ .storage_encryption_key }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "storage_encryption_key.txt"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"nomad/jobs/authelia\" }}{{ .db_pass }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "mysql_password.txt"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"nomad/jobs/authelia\" }}{{ .oidc_hmac_secret }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "oidc_hmac_secret.txt"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"nomad/jobs/authelia\" }}{{ .oidc_issuer_private_key }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "oidc_issuer_private_key.txt"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"nomad/jobs/authelia\" }}{{ .oidc_issuer_certificate_chain }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "oidc_issuer_certificate_chain.txt"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"secrets/smtp\" }}{{ .password }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "smtp_password.txt"
|
||||
mount = false
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "authelia" {
|
||||
name = "authelia"
|
||||
description = "Give access to shared authelia variables"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "authelia/*" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
path "secrets/authelia/*" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
job_id = module.authelia.job_id
|
||||
}
|
||||
}
|
||||
|
||||
# Give access to ldap secrets
|
||||
resource "nomad_acl_policy" "authelia_ldap_secrets" {
|
||||
name = "authelia-secrets-ldap"
|
||||
description = "Give access to LDAP secrets"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/ldap" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
job_id = module.authelia.job_id
|
||||
}
|
||||
}
|
||||
|
||||
# Enable oidc for nomad clients
|
||||
module "nomad_oidc_client" {
|
||||
source = "./oidc_client"
|
||||
|
||||
name = "nomad"
|
||||
oidc_client_config = {
|
||||
description = "Nomad"
|
||||
authorization_policy = "two_factor"
|
||||
redirect_uris = [
|
||||
"https://nomad.${var.base_hostname}/oidc/callback",
|
||||
"https://nomad.${var.base_hostname}/ui/settings/tokens",
|
||||
]
|
||||
scopes = ["openid", "groups"]
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_acl_auth_method" "nomad_authelia" {
|
||||
name = "authelia"
|
||||
type = "OIDC"
|
||||
token_locality = "global"
|
||||
max_token_ttl = "1h0m0s"
|
||||
default = true
|
||||
|
||||
config {
|
||||
oidc_discovery_url = "https://authelia.${var.base_hostname}"
|
||||
oidc_client_id = module.nomad_oidc_client.client_id
|
||||
oidc_client_secret = module.nomad_oidc_client.secret
|
||||
bound_audiences = [module.nomad_oidc_client.client_id]
|
||||
oidc_scopes = [
|
||||
"groups",
|
||||
"openid",
|
||||
]
|
||||
allowed_redirect_uris = [
|
||||
"https://nomad.${var.base_hostname}/oidc/callback",
|
||||
"https://nomad.${var.base_hostname}/ui/settings/tokens",
|
||||
]
|
||||
list_claim_mappings = {
|
||||
"groups" : "roles"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_acl_binding_rule" "nomad_authelia_admin" {
|
||||
description = "engineering rule"
|
||||
auth_method = nomad_acl_auth_method.nomad_authelia.name
|
||||
selector = "\"nomad-admin\" in list.roles"
|
||||
bind_type = "role"
|
||||
bind_name = "admin" # acls.nomad_acl_role.admin.name
|
||||
}
|
||||
|
||||
resource "nomad_acl_binding_rule" "nomad_authelia_deploy" {
|
||||
description = "engineering rule"
|
||||
auth_method = nomad_acl_auth_method.nomad_authelia.name
|
||||
selector = "\"nomad-deploy\" in list.roles"
|
||||
bind_type = "role"
|
||||
bind_name = "deploy" # acls.nomad_acl_role.deploy.name
|
||||
}
|
@ -13,7 +13,7 @@ default_2fa_method: ""
|
||||
|
||||
server:
|
||||
host: 0.0.0.0
|
||||
port: {{ env "NOMAD_PORT_main" }}
|
||||
port: 9091
|
||||
disable_healthcheck: false
|
||||
|
||||
log:
|
||||
@ -89,8 +89,8 @@ authentication_backend:
|
||||
groups_filter: (member={dn})
|
||||
|
||||
## The username and password of the admin user.
|
||||
{{ with nomadVar "secrets/ldap" }}
|
||||
user: uid={{ .admin_user }},ou=people,{{ with nomadVar "nomad/jobs" }}{{ .ldap_base_dn }}{{ end }}
|
||||
{{ with nomadVar "nomad/jobs/authelia" }}
|
||||
user: uid={{ .lldap_admin_user }},ou=people,{{ with nomadVar "nomad/jobs" }}{{ .ldap_base_dn }}{{ end }}
|
||||
{{ end }}
|
||||
# password set using secrets file
|
||||
# password: <secret>
|
||||
@ -151,22 +151,6 @@ access_control:
|
||||
networks: 192.168.5.0/24
|
||||
|
||||
rules:
|
||||
## Allow favicons on internal network
|
||||
- domain: '*.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}'
|
||||
resources:
|
||||
- '^/apple-touch-icon-precomposed\.png$'
|
||||
- '^/assets/safari-pinned-tab\.svg$'
|
||||
- '^/apple-touch-icon-180x180\.png$'
|
||||
- '^/apple-touch-icon\.png$'
|
||||
- '^/favicon\.ico$'
|
||||
networks:
|
||||
- internal
|
||||
policy: bypass
|
||||
|
||||
{{ range nomadVarList "authelia/access_control/service_rules" }}{{ with nomadVar .Path }}
|
||||
- domain: '{{ .name }}.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}'
|
||||
{{ .rule.Value | indent 6 }}
|
||||
{{ end }}{{ end }}
|
||||
## Rules applied to everyone
|
||||
- domain: '*.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}'
|
||||
networks:
|
||||
@ -235,13 +219,13 @@ storage:
|
||||
## The available providers are: filesystem, smtp. You must use only one of these providers.
|
||||
notifier:
|
||||
## You can disable the notifier startup check by setting this to true.
|
||||
disable_startup_check: true
|
||||
disable_startup_check: false
|
||||
|
||||
{{ with nomadVar "secrets/smtp" }}
|
||||
{{ with nomadVar "nomad/jobs" }}
|
||||
smtp:
|
||||
host: {{ .server }}
|
||||
port: {{ .port }}
|
||||
username: {{ .user }}
|
||||
host: {{ .smtp_server }}
|
||||
port: {{ .smtp_port }}
|
||||
username: {{ .smtp_user }}
|
||||
# password: <in file>
|
||||
|
||||
{{- end }}
|
||||
@ -261,18 +245,4 @@ identity_providers:
|
||||
# hmac_secret: <file>
|
||||
# issuer_private_key: <file>
|
||||
|
||||
clients:
|
||||
{{ range nomadVarList "authelia/access_control/oidc_clients" -}}
|
||||
{{- $name := (sprig_last (sprig_splitList "/" .Path)) -}}
|
||||
{{ "-" | indent 6 }}
|
||||
{{ with nomadVar .Path }}
|
||||
|
||||
{{- $im := .ItemsMap -}}
|
||||
{{- $im = sprig_set $im "redirect_uris" (.redirect_uris.Value | parseYAML) -}}
|
||||
{{- $im = sprig_set $im "scopes" (.scopes.Value | parseYAML) -}}
|
||||
{{- with nomadVar (printf "secrets/authelia/%s" $name) -}}
|
||||
{{- $im = sprig_set $im "secret" .secret_hash.Value -}}
|
||||
{{- end -}}
|
||||
{{ $im | toYAML | indent 8 }}
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
clients: {{ with nomadVar "nomad/jobs/authelia" }}{{ .oidc_clients.Value }}{{ end }}
|
||||
|
47
core/blocky/.terraform.lock.hcl
generated
47
core/blocky/.terraform.lock.hcl
generated
@ -2,39 +2,20 @@
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.terraform.io/hashicorp/nomad" {
|
||||
version = "2.0.0"
|
||||
version = "1.4.16"
|
||||
hashes = [
|
||||
"h1:lIHIxA6ZmfyTGL3J9YIddhxlfit4ipSS09BLxkwo6L0=",
|
||||
"zh:09b897d64db293f9a904a4a0849b11ec1e3fff5c638f734d82ae36d8dc044b72",
|
||||
"zh:435cc106799290f64078ec24b6c59cb32b33784d609088638ed32c6d12121199",
|
||||
"zh:7073444bd064e8c4ec115ca7d9d7f030cc56795c0a83c27f6668bba519e6849a",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:79d238c35d650d2d83a439716182da63f3b2767e72e4cbd0b69cb13d9b1aebfc",
|
||||
"zh:7ef5f49344278fe0bbc5447424e6aa5425ff1821d010d944a444d7fa2c751acf",
|
||||
"zh:92179091638c8ba03feef371c4361a790190f9955caea1fa59de2055c701a251",
|
||||
"zh:a8a34398851761368eb8e7c171f24e55efa6e9fdbb5c455f6dec34dc17f631bc",
|
||||
"zh:b38fd5338625ebace5a4a94cea1a28b11bd91995d834e318f47587cfaf6ec599",
|
||||
"zh:b71b273a2aca7ad5f1e07c767b25b5a888881ba9ca93b30044ccc39c2937f03c",
|
||||
"zh:cd14357e520e0f09fb25badfb4f2ee37d7741afdc3ed47c7bcf54c1683772543",
|
||||
"zh:e05e025f4bb95138c3c8a75c636e97cd7cfd2fc1525b0c8bd097db8c5f02df6e",
|
||||
]
|
||||
}
|
||||
|
||||
provider "registry.terraform.io/hashicorp/random" {
|
||||
version = "3.5.1"
|
||||
hashes = [
|
||||
"h1:VSnd9ZIPyfKHOObuQCaKfnjIHRtR7qTw19Rz8tJxm+k=",
|
||||
"zh:04e3fbd610cb52c1017d282531364b9c53ef72b6bc533acb2a90671957324a64",
|
||||
"zh:119197103301ebaf7efb91df8f0b6e0dd31e6ff943d231af35ee1831c599188d",
|
||||
"zh:4d2b219d09abf3b1bb4df93d399ed156cadd61f44ad3baf5cf2954df2fba0831",
|
||||
"zh:6130bdde527587bbe2dcaa7150363e96dbc5250ea20154176d82bc69df5d4ce3",
|
||||
"zh:6cc326cd4000f724d3086ee05587e7710f032f94fc9af35e96a386a1c6f2214f",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:b6d88e1d28cf2dfa24e9fdcc3efc77adcdc1c3c3b5c7ce503a423efbdd6de57b",
|
||||
"zh:ba74c592622ecbcef9dc2a4d81ed321c4e44cddf7da799faa324da9bf52a22b2",
|
||||
"zh:c7c5cde98fe4ef1143bd1b3ec5dc04baf0d4cc3ca2c5c7d40d17c0e9b2076865",
|
||||
"zh:dac4bad52c940cd0dfc27893507c1e92393846b024c5a9db159a93c534a3da03",
|
||||
"zh:de8febe2a2acd9ac454b844a4106ed295ae9520ef54dc8ed2faf29f12716b602",
|
||||
"zh:eab0d0495e7e711cca367f7d4df6e322e6c562fc52151ec931176115b83ed014",
|
||||
"h1:PQxNPNmMVOErxryTWIJwr22k95DTSODmgRylqjc2TjI=",
|
||||
"h1:tyfjD/maKzb0RxxD9KWgLnkJu9lnYziYsQgGw85Giz8=",
|
||||
"zh:0d4fbb7030d9caac3b123e60afa44f50c83cc2a983e1866aec7f30414abe7b0e",
|
||||
"zh:0db080228e07c72d6d8ca8c45249d6f97cd0189fce82a77abbdcd49a52e57572",
|
||||
"zh:0df88393271078533a217654b96f0672c60eb59570d72e6aefcb839eea87a7a0",
|
||||
"zh:2883b335bb6044b0db6a00e602d6926c047c7f330294a73a90d089f98b24d084",
|
||||
"zh:390158d928009a041b3a182bdd82376b50530805ae92be2b84ed7c3b0fa902a0",
|
||||
"zh:7169b8f8df4b8e9659c49043848fd5f7f8473d0471f67815e8b04980f827f5ef",
|
||||
"zh:9417ee1383b1edd137024882d7035be4dca51fb4f725ca00ed87729086ec1755",
|
||||
"zh:a22910b5a29eeab5610350700b4899267c1b09b66cf21f7e4d06afc61d425800",
|
||||
"zh:a6185c9cd7aa458cd81861058ba568b6411fbac344373a20155e20256f4a7557",
|
||||
"zh:b6260ca9f034df1b47905b4e2a9c33b67dbf77224a694d5b10fb09ae92ffad4c",
|
||||
"zh:d87c12a6a7768f2b6c2a59495c7dc00f9ecc52b1b868331d4c284f791e278a1e",
|
||||
]
|
||||
}
|
||||
|
@ -1,23 +1,20 @@
|
||||
variable "config_data" {
|
||||
type = string
|
||||
description = "Plain text config file for blocky"
|
||||
}
|
||||
|
||||
job "blocky" {
|
||||
datacenters = ["dc1"]
|
||||
type = "service"
|
||||
type = "system"
|
||||
priority = 100
|
||||
|
||||
constraint {
|
||||
distinct_hosts = true
|
||||
}
|
||||
|
||||
update {
|
||||
max_parallel = 1
|
||||
auto_revert = true
|
||||
min_healthy_time = "60s"
|
||||
healthy_deadline = "5m"
|
||||
# TODO: maybe switch to service job from system so we can use canary and autorollback
|
||||
# auto_revert = true
|
||||
}
|
||||
|
||||
group "blocky" {
|
||||
# TODO: This must be updated to match the nubmer of servers (possibly grabbed from TF)
|
||||
# I am moving away from `system` jobs because of https://github.com/hashicorp/nomad/issues/12023
|
||||
count = 2
|
||||
|
||||
network {
|
||||
mode = "bridge"
|
||||
@ -27,17 +24,13 @@ job "blocky" {
|
||||
}
|
||||
|
||||
port "api" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
to = "4000"
|
||||
}
|
||||
|
||||
dns {
|
||||
# Set expclicit DNS servers because tasks, by default, use this task
|
||||
servers = [
|
||||
"192.168.2.1",
|
||||
]
|
||||
servers = ["1.1.1.1", "1.0.0.1"]
|
||||
}
|
||||
}
|
||||
|
||||
@ -65,11 +58,6 @@ job "blocky" {
|
||||
path = "/"
|
||||
interval = "10s"
|
||||
timeout = "3s"
|
||||
|
||||
check_restart {
|
||||
limit = 3
|
||||
grace = "5m"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -77,32 +65,20 @@ job "blocky" {
|
||||
driver = "docker"
|
||||
|
||||
config {
|
||||
image = "ghcr.io/0xerr0r/blocky:v0.24"
|
||||
args = ["-c", "$${NOMAD_TASK_DIR}/config.yml"]
|
||||
image = "ghcr.io/0xerr0r/blocky"
|
||||
args = ["-c", "${NOMAD_TASK_DIR}/config.yml"]
|
||||
ports = ["dns", "api"]
|
||||
}
|
||||
|
||||
action "refresh-lists" {
|
||||
command = "/app/blocky"
|
||||
args = ["lists", "refresh"]
|
||||
}
|
||||
|
||||
action "healthcheck" {
|
||||
command = "/app/blocky"
|
||||
args = ["healthcheck"]
|
||||
}
|
||||
|
||||
resources {
|
||||
cpu = 50
|
||||
memory = 75
|
||||
memory_max = 150
|
||||
memory = 50
|
||||
memory_max = 100
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
${file("${module_path}/config.yml")}
|
||||
EOF
|
||||
destination = "$${NOMAD_TASK_DIR}/config.yml"
|
||||
data = var.config_data
|
||||
destination = "${NOMAD_TASK_DIR}/config.yml"
|
||||
splay = "1m"
|
||||
|
||||
wait {
|
||||
@ -119,7 +95,7 @@ EOF
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
EOF
|
||||
destination = "$${NOMAD_TASK_DIR}/nomad.hosts"
|
||||
destination = "${NOMAD_TASK_DIR}/nomad.hosts"
|
||||
change_mode = "noop"
|
||||
|
||||
wait {
|
||||
@ -127,121 +103,6 @@ EOF
|
||||
max = "20s"
|
||||
}
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{ if nomadVarExists "blocky_lists/user" }}
|
||||
{{ with nomadVar "blocky_lists/user" -}}
|
||||
{{ .block_list.Value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
EOF
|
||||
destination = "$${NOMAD_TASK_DIR}/block"
|
||||
change_mode = "script"
|
||||
|
||||
change_script {
|
||||
command = "/app/blocky"
|
||||
args = ["lists", "refresh"]
|
||||
timeout = "20s"
|
||||
}
|
||||
|
||||
wait {
|
||||
min = "30s"
|
||||
max = "1m"
|
||||
}
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{ if nomadVarExists "blocky_lists/user" }}
|
||||
{{ with nomadVar "blocky_lists/user" -}}
|
||||
{{ .allow_list.Value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
EOF
|
||||
destination = "$${NOMAD_TASK_DIR}/allow"
|
||||
change_mode = "script"
|
||||
|
||||
change_script {
|
||||
command = "/app/blocky"
|
||||
args = ["lists", "refresh"]
|
||||
timeout = "20s"
|
||||
}
|
||||
|
||||
wait {
|
||||
min = "30s"
|
||||
max = "1m"
|
||||
}
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{ if nomadVarExists "blocky_lists/terraform" }}
|
||||
{{ with nomadVar "blocky_lists/terraform" -}}
|
||||
{{ .smarttv_regex.Value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
EOF
|
||||
destination = "$${NOMAD_TASK_DIR}/smarttv-regex.txt"
|
||||
change_mode = "script"
|
||||
|
||||
change_script {
|
||||
command = "/app/blocky"
|
||||
args = ["lists", "refresh"]
|
||||
timeout = "20s"
|
||||
}
|
||||
|
||||
wait {
|
||||
min = "10s"
|
||||
max = "20s"
|
||||
}
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{ if nomadVarExists "blocky_lists/terraform" }}
|
||||
{{ with nomadVar "blocky_lists/terraform" -}}
|
||||
{{ .wemo.Value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
EOF
|
||||
destination = "$${NOMAD_TASK_DIR}/wemo.txt"
|
||||
change_mode = "script"
|
||||
|
||||
change_script {
|
||||
command = "/app/blocky"
|
||||
args = ["lists", "refresh"]
|
||||
timeout = "20s"
|
||||
}
|
||||
|
||||
wait {
|
||||
min = "10s"
|
||||
max = "20s"
|
||||
}
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{ if nomadVarExists "blocky_lists/terraform" }}
|
||||
{{ with nomadVar "blocky_lists/terraform" -}}
|
||||
{{ .sonos.Value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
EOF
|
||||
destination = "$${NOMAD_TASK_DIR}/sonos.txt"
|
||||
change_mode = "script"
|
||||
|
||||
change_script {
|
||||
command = "/app/blocky"
|
||||
args = ["lists", "refresh"]
|
||||
timeout = "20s"
|
||||
}
|
||||
|
||||
wait {
|
||||
min = "10s"
|
||||
max = "20s"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task "stunnel" {
|
||||
@ -253,9 +114,9 @@ EOF
|
||||
}
|
||||
|
||||
config {
|
||||
image = "iamthefij/stunnel:1.0.0"
|
||||
args = ["$${NOMAD_TASK_DIR}/stunnel.conf"]
|
||||
image = "alpine:3.17"
|
||||
ports = ["tls"]
|
||||
args = ["/bin/sh", "${NOMAD_TASK_DIR}/start.sh"]
|
||||
}
|
||||
|
||||
resources {
|
||||
@ -265,39 +126,41 @@ EOF
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
syslog = no
|
||||
foreground = yes
|
||||
delay = yes
|
||||
|
||||
[dns_server]
|
||||
# Dummy server to keep stunnel running if no mysql is present
|
||||
accept = 8053
|
||||
connect = 127.0.0.1:53
|
||||
ciphers = PSK
|
||||
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/mysql_stunnel_psk.txt
|
||||
|
||||
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-tls" -}}
|
||||
[mysql_client]
|
||||
client = yes
|
||||
accept = 127.0.0.1:3306
|
||||
connect = {{ .Address }}:{{ .Port }}
|
||||
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/mysql_stunnel_psk.txt
|
||||
{{- end }}
|
||||
|
||||
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "redis-blocky" -}}
|
||||
[redis_client]
|
||||
client = yes
|
||||
accept = 127.0.0.1:6379
|
||||
connect = {{ .Address }}:{{ .Port }}
|
||||
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/stunnel_psk.txt
|
||||
{{- end }}
|
||||
set -e
|
||||
apk add stunnel
|
||||
exec stunnel {{ env "NOMAD_TASK_DIR" }}/stunnel.conf
|
||||
EOF
|
||||
destination = "$${NOMAD_TASK_DIR}/stunnel.conf"
|
||||
destination = "${NOMAD_TASK_DIR}/start.sh"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{- with nomadVar "secrets/mysql/allowed_psks/blocky" }}{{ .psk }}{{ end -}}
|
||||
syslog = no
|
||||
foreground = yes
|
||||
delay = yes
|
||||
|
||||
[mysql_client]
|
||||
client = yes
|
||||
accept = 127.0.0.1:3306
|
||||
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-tls" -}}
|
||||
connect = {{ .Address }}:{{ .Port }}
|
||||
{{- end }}
|
||||
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/mysql_stunnel_psk.txt
|
||||
|
||||
[redis_client]
|
||||
client = yes
|
||||
accept = 127.0.0.1:6379
|
||||
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "redis-blocky" -}}
|
||||
connect = {{ .Address }}:{{ .Port }}
|
||||
{{- end }}
|
||||
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/stunnel_psk.txt
|
||||
EOF
|
||||
destination = "${NOMAD_TASK_DIR}/stunnel.conf"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{- with nomadVar "nomad/jobs/blocky/blocky/stunnel" }}{{ .mysql_stunnel_psk }}{{ end -}}
|
||||
EOF
|
||||
destination = "$${NOMAD_SECRETS_DIR}/mysql_stunnel_psk.txt"
|
||||
}
|
||||
@ -306,11 +169,11 @@ EOF
|
||||
data = <<EOF
|
||||
{{- with nomadVar "nomad/jobs/blocky/blocky/stunnel" -}}{{ .redis_stunnel_psk }}{{ end -}}
|
||||
EOF
|
||||
destination = "$${NOMAD_SECRETS_DIR}/stunnel_psk.txt"
|
||||
destination = "${NOMAD_SECRETS_DIR}/stunnel_psk.txt"
|
||||
}
|
||||
}
|
||||
|
||||
task "mysql-bootstrap" {
|
||||
task "blocky-bootstrap" {
|
||||
driver = "docker"
|
||||
|
||||
lifecycle {
|
||||
@ -321,19 +184,23 @@ EOF
|
||||
config {
|
||||
image = "mariadb:10"
|
||||
args = [
|
||||
"/usr/bin/timeout",
|
||||
"2m",
|
||||
"/bin/bash",
|
||||
"-c",
|
||||
"/usr/bin/timeout 2m /bin/bash -c \"until /usr/bin/mysql --defaults-extra-file=$${NOMAD_SECRETS_DIR}/my.cnf < $${NOMAD_SECRETS_DIR}/bootstrap.sql; do echo 'Retry in 10s'; sleep 10; done\" || true",
|
||||
"until /usr/bin/mysql --defaults-extra-file=$${NOMAD_SECRETS_DIR}/my.cnf < $${NOMAD_SECRETS_DIR}/bootstrap.sql; do sleep 10; done",
|
||||
]
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
[client]
|
||||
host=127.0.0.1
|
||||
port=3306
|
||||
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-server" -}}
|
||||
host={{ .Address }}
|
||||
port={{ .Port }}
|
||||
{{ end -}}
|
||||
user=root
|
||||
{{ with nomadVar "secrets/mysql" }}
|
||||
{{ with nomadVar "nomad/jobs" }}
|
||||
password={{ .mysql_root_password }}
|
||||
{{ end }}
|
||||
EOF
|
||||
|
@ -1,88 +1,25 @@
|
||||
variable "base_hostname" {
|
||||
type = string
|
||||
description = "Base hostname to serve content from"
|
||||
default = "dev.homelab"
|
||||
}
|
||||
|
||||
locals {
|
||||
config_data = templatefile(
|
||||
"${path.module}/config.yml",
|
||||
{
|
||||
"base_hostname" = var.base_hostname,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
resource "nomad_job" "blocky" {
|
||||
jobspec = templatefile("${path.module}/blocky.nomad", {
|
||||
use_wesher = var.use_wesher,
|
||||
module_path = path.module,
|
||||
})
|
||||
}
|
||||
|
||||
# Generate secrets and policies for access to MySQL
|
||||
resource "nomad_acl_policy" "blocky_mysql_bootstrap_secrets" {
|
||||
name = "blocky-secrets-mysql"
|
||||
description = "Give access to MySQL secrets"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/mysql" {
|
||||
capabilities = ["read"]
|
||||
hcl2 {
|
||||
enabled = true
|
||||
vars = {
|
||||
"config_data" = local.config_data,
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
job_id = "blocky"
|
||||
group = "blocky"
|
||||
task = "mysql-bootstrap"
|
||||
}
|
||||
}
|
||||
|
||||
resource "random_password" "blocky_mysql_psk" {
|
||||
length = 32
|
||||
override_special = "!@#%&*-_="
|
||||
}
|
||||
|
||||
resource "nomad_variable" "blocky_mysql_psk" {
|
||||
path = "secrets/mysql/allowed_psks/blocky"
|
||||
items = {
|
||||
psk = "blocky:${resource.random_password.blocky_mysql_psk.result}"
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "blocky_mysql_psk" {
|
||||
name = "blocky-secrets-mysql-psk"
|
||||
description = "Give access to MySQL PSK secrets"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/mysql/allowed_psks/blocky" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
job_id = "blocky"
|
||||
group = "blocky"
|
||||
task = "stunnel"
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_variable" "blocky_lists_terraform" {
|
||||
path = "blocky_lists/terraform"
|
||||
items = {
|
||||
smarttv_regex = file("${path.module}/list-smarttv-regex.txt")
|
||||
wemo = file("${path.module}/list-wemo.txt")
|
||||
sonos = file("${path.module}/list-sonos.txt")
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "blocky_lists" {
|
||||
name = "blocky-lists"
|
||||
description = "Give access Blocky lists"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "blocky_lists/*" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
job_id = "blocky"
|
||||
group = "blocky"
|
||||
task = "blocky"
|
||||
}
|
||||
jobspec = file("${path.module}/blocky.nomad")
|
||||
}
|
||||
|
@ -2,53 +2,28 @@ ports:
|
||||
dns: 53
|
||||
http: 4000
|
||||
|
||||
# I must have ip v6 blocked or something
|
||||
connectIPVersion: v4
|
||||
|
||||
bootstrapDns:
|
||||
- upstream: 1.1.1.1
|
||||
- upstream: 1.0.0.1
|
||||
- upstream: 9.9.9.9
|
||||
- upstream: 149.112.112.112
|
||||
|
||||
|
||||
upstreams:
|
||||
init:
|
||||
strategy: fast
|
||||
groups:
|
||||
default:
|
||||
- https://dns.quad9.net/dns-query
|
||||
- tcp-tls:dns.quad9.net
|
||||
- https://one.one.one.one/dns-query
|
||||
- tcp-tls:one.one.one.one
|
||||
# cloudflare:
|
||||
# - 1.1.1.1
|
||||
# - 1.0.0.1
|
||||
# - 2606:4700:4700::1111
|
||||
# - 2606:4700:4700::1001
|
||||
# - https://one.one.one.one/dns-query
|
||||
# - tcp-tls:one.one.one.one
|
||||
# quad9:
|
||||
# - 9.9.9.9
|
||||
# - 149.112.112.112
|
||||
# - 2620:fe::fe
|
||||
# - 2620:fe::9
|
||||
# - https://dns.quad9.net/dns-query
|
||||
# - tcp-tls:dns.quad9.net
|
||||
# quad9-secured:
|
||||
# - 9.9.9.11
|
||||
# - 149.112.112.11
|
||||
# - 2620:fe::11
|
||||
# - 2620:fe::fe:11
|
||||
# - https://dns11.quad9.net/dns-query
|
||||
# - tcp-tls:dns11.quad9.net
|
||||
# quad9-unsecured:
|
||||
# - 9.9.9.10
|
||||
# - 149.112.112.10
|
||||
# - 2620:fe::10
|
||||
# - 2620:fe::fe:10
|
||||
# - https://dns10.quad9.net/dns-query
|
||||
# - tcp-tls:dns10.quad9.net
|
||||
upstream:
|
||||
default:
|
||||
- 1.1.1.1
|
||||
- 1.0.0.1
|
||||
quad9:
|
||||
- 9.9.9.9
|
||||
- 149.112.112.112
|
||||
- 2620:fe::fe
|
||||
- 2620:fe::9
|
||||
- https://dns.quad9.net/dns-query
|
||||
- tcp-tls:dns.quad9.net
|
||||
quad9-unsecured:
|
||||
- 9.9.9.10
|
||||
- 149.112.112.10
|
||||
- 2620:fe::10
|
||||
- 2620:fe::fe:10
|
||||
- https://dns10.quad9.net/dns-query
|
||||
- tcp-tls:dns10.quad9.net
|
||||
|
||||
conditional:
|
||||
fallbackUpstream: false
|
||||
@ -61,11 +36,9 @@ conditional:
|
||||
.: 192.168.2.1
|
||||
|
||||
hostsFile:
|
||||
sources:
|
||||
- {{ env "NOMAD_TASK_DIR" }}/nomad.hosts
|
||||
filePath: {{ env "NOMAD_TASK_DIR" }}/nomad.hosts
|
||||
hostsTTL: 30s
|
||||
loading:
|
||||
refreshPeriod: 30s
|
||||
refreshPeriod: 30s
|
||||
|
||||
clientLookup:
|
||||
upstream: 192.168.2.1
|
||||
@ -77,12 +50,22 @@ blocking:
|
||||
- http://sysctl.org/cameleon/hosts
|
||||
- https://s3.amazonaws.com/lists.disconnect.me/simple_tracking.txt
|
||||
- https://s3.amazonaws.com/lists.disconnect.me/simple_ad.txt
|
||||
# - https://hosts-file.net/ad_servers.txt
|
||||
iot:
|
||||
- https://raw.githubusercontent.com/Perflyst/PiHoleBlocklist/master/SmartTV.txt
|
||||
- {{ env "NOMAD_TASK_DIR" }}/smarttv-regex.txt
|
||||
- {{ env "NOMAD_TASK_DIR" }}/wemo.txt
|
||||
- {{ env "NOMAD_TASK_DIR" }}/sonos.txt
|
||||
- https://hosts-file.net/ad_servers.txt
|
||||
smarttv:
|
||||
- https://perflyst.github.io/PiHoleBlocklist/SmartTV.txt
|
||||
- https://perflyst.github.io/PiHoleBlocklist/regex.list
|
||||
wemo:
|
||||
- |
|
||||
# Remote commands
|
||||
api.xbcs.net
|
||||
# Firmware updates
|
||||
fw.xbcs.net
|
||||
# TURN service
|
||||
nat.wemo2.com
|
||||
# Connectivity checks
|
||||
heartbeat.xwemo.com
|
||||
malware:
|
||||
- https://mirror1.malwaredomains.com/files/justdomains
|
||||
antisocial:
|
||||
- |
|
||||
facebook.com
|
||||
@ -90,21 +73,20 @@ blocking:
|
||||
reddit.com
|
||||
twitter.com
|
||||
youtube.com
|
||||
custom:
|
||||
- {{ env "NOMAD_TASK_DIR" }}/block
|
||||
|
||||
whiteLists:
|
||||
custom:
|
||||
- {{ env "NOMAD_TASK_DIR" }}/allow
|
||||
# Move to Gitea when deployed internally
|
||||
ads:
|
||||
{{ with nomadVar "nomad/jobs/blocky" -}}
|
||||
{{ .whitelists_ads.Value | indent 6 }}
|
||||
{{- end }}
|
||||
|
||||
clientGroupsBlock:
|
||||
default:
|
||||
- ads
|
||||
- custom
|
||||
192.168.3.1/24:
|
||||
- ads
|
||||
- iot
|
||||
- custom
|
||||
- malware
|
||||
- smarttv
|
||||
- wemo
|
||||
|
||||
customDNS:
|
||||
customTTL: 1h
|
||||
@ -123,7 +105,7 @@ customDNS:
|
||||
prometheus:
|
||||
enable: true
|
||||
|
||||
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "redis-blocky" -}}
|
||||
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "redis-tls" -}}
|
||||
redis:
|
||||
address: 127.0.0.1:6379
|
||||
# password: ""
|
||||
@ -132,6 +114,7 @@ redis:
|
||||
connectionCooldown: 3s
|
||||
{{ end -}}
|
||||
|
||||
|
||||
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-tls" -}}
|
||||
{{ with nomadVar "nomad/jobs/blocky" -}}
|
||||
queryLog:
|
||||
|
@ -1,13 +0,0 @@
|
||||
# From: https://perflyst.github.io/PiHoleBlocklist/regex.list
|
||||
# Title: Perflyst's SmartTV Blocklist for Pi-hole - RegEx extension
|
||||
# Version: 13July2023v1
|
||||
# Samsung
|
||||
/(^|\.)giraffic\.com$/
|
||||
/(^|\.)internetat\.tv$/
|
||||
/(^|\.)pavv\.co\.kr$/
|
||||
/(^|\.)samsungcloudsolution\.net$/
|
||||
/(^|\.)samsungelectronics\.com$/
|
||||
/(^|\.)samsungrm\.net$/
|
||||
# /(^|\.)samsungotn\.net$/ # prevents updates
|
||||
# /(^|\.)samsungcloudcdn\.com$/ # prevents updates
|
||||
# /(^|\.)samsungcloudsolution\.com$/ # prevents internet connection
|
@ -1,2 +0,0 @@
|
||||
# Block Sonos devices from phoning home and allowing remote access
|
||||
/(^|\.)sonos\.com$/
|
@ -1,8 +0,0 @@
|
||||
# Remote commands
|
||||
api.xbcs.net
|
||||
# Firmware updates
|
||||
fw.xbcs.net
|
||||
# TURN service
|
||||
nat.wemo2.com
|
||||
# Connectivity checks
|
||||
heartbeat.xwemo.com
|
@ -1,5 +0,0 @@
|
||||
variable "use_wesher" {
|
||||
type = bool
|
||||
description = "Indicates whether or not services should expose themselves on the wesher network"
|
||||
default = true
|
||||
}
|
@ -1,5 +0,0 @@
|
||||
resource "nomad_job" "exporters" {
|
||||
jobspec = templatefile("${path.module}/exporters.nomad", {
|
||||
use_wesher = var.use_wesher,
|
||||
})
|
||||
}
|
117
core/grafana.tf
117
core/grafana.tf
@ -1,117 +0,0 @@
|
||||
resource "nomad_job" "grafana" {
|
||||
jobspec = templatefile("${path.module}/grafana.nomad", {
|
||||
module_path = path.module
|
||||
use_wesher = var.use_wesher
|
||||
})
|
||||
|
||||
depends_on = [nomad_job.prometheus]
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "grafana_smtp_secrets" {
|
||||
name = "grafana-secrets-smtp"
|
||||
description = "Give access to MySQL secrets"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/smtp" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
job_id = "grafana"
|
||||
group = "grafana"
|
||||
task = "grafana"
|
||||
}
|
||||
}
|
||||
|
||||
# Generate secrets and policies for access to MySQL
|
||||
resource "nomad_acl_policy" "grafana_mysql_bootstrap_secrets" {
|
||||
name = "grafana-secrets-mysql"
|
||||
description = "Give access to MySQL secrets"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/mysql" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
job_id = "grafana"
|
||||
group = "grafana"
|
||||
task = "mysql-bootstrap"
|
||||
}
|
||||
}
|
||||
|
||||
resource "random_password" "grafana_mysql_psk" {
|
||||
length = 32
|
||||
override_special = "!@#%&*-_="
|
||||
}
|
||||
|
||||
resource "nomad_variable" "grafana_mysql_psk" {
|
||||
path = "secrets/mysql/allowed_psks/grafana"
|
||||
items = {
|
||||
psk = "grafana:${resource.random_password.grafana_mysql_psk.result}"
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "grafana_mysql_psk" {
|
||||
name = "grafana-secrets-mysql-psk"
|
||||
description = "Give access to MySQL PSK secrets"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/mysql/allowed_psks/grafana" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
job_id = "grafana"
|
||||
group = "grafana"
|
||||
task = "stunnel"
|
||||
}
|
||||
}
|
||||
|
||||
module "grafana_oidc" {
|
||||
source = "./oidc_client"
|
||||
|
||||
name = "grafana"
|
||||
oidc_client_config = {
|
||||
description = "Grafana"
|
||||
scopes = [
|
||||
"openid",
|
||||
"groups",
|
||||
"email",
|
||||
"profile",
|
||||
]
|
||||
redirect_uris = [
|
||||
"https://grafana.thefij.rocks/login/generic_oauth",
|
||||
]
|
||||
}
|
||||
|
||||
job_acl = {
|
||||
job_id = "grafana"
|
||||
group = "grafana"
|
||||
task = "grafana"
|
||||
}
|
||||
}
|
||||
|
||||
# resource "nomad_variable" "grafana_config" {
|
||||
# for_each = fileset("${path.module}/grafana", "**")
|
||||
#
|
||||
# path = "nomad/jobs/grafana/${replace(each.key, ".", "_")}"
|
||||
# items = {
|
||||
# path = "${each.key}"
|
||||
# value = file("${path.module}/grafana/${each.key}")
|
||||
# left_delimiter = endswith(each.key, ".json") ? "<<<<" : "{{"
|
||||
# right_delimiter = endswith(each.key, ".json") ? ">>>>" : "}}"
|
||||
# }
|
||||
# }
|
@ -1,19 +0,0 @@
|
||||
---
|
||||
apiVersion: 1
|
||||
|
||||
datasources:
|
||||
- name: HASS Metrics
|
||||
url: "http://192.168.2.75:8086"
|
||||
type: influxdb
|
||||
access: proxy
|
||||
database: hass
|
||||
jsonData:
|
||||
dbName: hass
|
||||
|
||||
- name: Proxmox Metrics
|
||||
url: "http://192.168.2.75:8086"
|
||||
type: influxdb
|
||||
access: proxy
|
||||
database: proxmox
|
||||
jsonData:
|
||||
dbName: proxmox
|
@ -1,96 +0,0 @@
|
||||
variable "lego_version" {
|
||||
default = "4.14.2"
|
||||
type = string
|
||||
}
|
||||
|
||||
variable "nomad_var_dirsync_version" {
|
||||
default = "0.0.2"
|
||||
type = string
|
||||
}
|
||||
|
||||
job "lego" {
|
||||
|
||||
type = "batch"
|
||||
|
||||
periodic {
|
||||
cron = "@weekly"
|
||||
prohibit_overlap = true
|
||||
}
|
||||
|
||||
group "main" {
|
||||
|
||||
network {
|
||||
dns {
|
||||
servers = ["1.1.1.1", "1.0.0.1"]
|
||||
}
|
||||
}
|
||||
|
||||
task "main" {
|
||||
driver = "exec"
|
||||
|
||||
config {
|
||||
command = "/bin/bash"
|
||||
args = ["${NOMAD_TASK_DIR}/start.sh"]
|
||||
}
|
||||
|
||||
artifact {
|
||||
source = "https://github.com/go-acme/lego/releases/download/v${var.lego_version}/lego_v${var.lego_version}_linux_${attr.cpu.arch}.tar.gz"
|
||||
}
|
||||
|
||||
artifact {
|
||||
source = "https://git.iamthefij.com/iamthefij/nomad-var-dirsync/releases/download/v${var.nomad_var_dirsync_version}/nomad-var-dirsync-linux-${attr.cpu.arch}.tar.gz"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOH
|
||||
#! /bin/sh
|
||||
set -ex
|
||||
|
||||
cd ${NOMAD_TASK_DIR}
|
||||
|
||||
echo "Read certs from nomad vars"
|
||||
${NOMAD_TASK_DIR}/nomad-var-dirsync-linux-{{ env "attr.cpu.arch" }} -root-var=secrets/certs read .
|
||||
|
||||
action=run
|
||||
if [ -f /.lego/certificates/_.thefij.rocks.crt ]; then
|
||||
action=renew
|
||||
fi
|
||||
|
||||
echo "Attempt to $action certificates"
|
||||
${NOMAD_TASK_DIR}/lego \
|
||||
--accept-tos --pem \
|
||||
--email=iamthefij@gmail.com \
|
||||
--domains="*.thefij.rocks" \
|
||||
--dns="cloudflare" \
|
||||
$action \
|
||||
--$action-hook="${NOMAD_TASK_DIR}/nomad-var-dirsync-linux-{{ env "attr.cpu.arch" }} -root-var=secrets/certs write .lego" \
|
||||
EOH
|
||||
destination = "${NOMAD_TASK_DIR}/start.sh"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOH
|
||||
{{ with nomadVar "nomad/jobs/lego" -}}
|
||||
CF_DNS_API_TOKEN={{ .domain_lego_dns }}
|
||||
CF_ZONE_API_TOKEN={{ .domain_lego_dns }}
|
||||
{{- end }}
|
||||
EOH
|
||||
destination = "secrets/cloudflare.env"
|
||||
env = true
|
||||
}
|
||||
|
||||
env = {
|
||||
NOMAD_ADDR = "unix:///secrets/api.sock"
|
||||
}
|
||||
|
||||
identity {
|
||||
env = true
|
||||
}
|
||||
|
||||
resources {
|
||||
cpu = 50
|
||||
memory = 100
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
23
core/lego.tf
23
core/lego.tf
@ -1,23 +0,0 @@
|
||||
resource "nomad_job" "lego" {
|
||||
jobspec = file("${path.module}/lego.nomad")
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "secrets_certs_write" {
|
||||
name = "secrets-certs-write"
|
||||
description = "Write certs to secrets store"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/certs/*" {
|
||||
capabilities = ["write", "read"]
|
||||
}
|
||||
path "secrets/certs" {
|
||||
capabilities = ["write", "read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
job_acl {
|
||||
job_id = "lego/*"
|
||||
}
|
||||
}
|
@ -3,25 +3,17 @@ job "lldap" {
|
||||
type = "service"
|
||||
priority = 80
|
||||
|
||||
update {
|
||||
auto_revert = true
|
||||
}
|
||||
|
||||
group "lldap" {
|
||||
|
||||
network {
|
||||
mode = "bridge"
|
||||
|
||||
port "web" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
}
|
||||
|
||||
port "ldap" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
}
|
||||
|
||||
port "tls" {}
|
||||
@ -54,75 +46,47 @@ job "lldap" {
|
||||
driver = "docker"
|
||||
|
||||
config {
|
||||
image = "ghcr.io/lldap/lldap:v0.5"
|
||||
image = "nitnelave/lldap:latest"
|
||||
ports = ["ldap", "web"]
|
||||
args = ["run", "--config-file", "$${NOMAD_TASK_DIR}/lldap_config.toml"]
|
||||
args = ["run", "--config-file", "${NOMAD_SECRETS_DIR}/lldap_config.toml"]
|
||||
}
|
||||
|
||||
env = {
|
||||
"LLDAP_VERBOSE" = "true"
|
||||
"LLDAP_LDAP_PORT" = "$${NOMAD_PORT_ldap}"
|
||||
"LLDAP_HTTP_PORT" = "$${NOMAD_PORT_web}"
|
||||
"LLDAP_DATABASE_URL_FILE" = "$${NOMAD_SECRETS_DIR}/database_url.txt"
|
||||
"LLDAP_KEY_SEED_FILE" = "$${NOMAD_SECRETS_DIR}/key_seed.txt"
|
||||
"LLDAP_JWT_SECRET_FILE" = "$${NOMAD_SECRETS_DIR}/jwt_secret.txt"
|
||||
"LLDAP_USER_PASS_FILE" = "$${NOMAD_SECRETS_DIR}/user_pass.txt"
|
||||
"LLDAP_SMTP_OPTIONS__PASSWORD_FILE" = "$${NOMAD_SECRETS_DIR}/smtp_password.txt"
|
||||
"LLDAP_LDAP_PORT" = "${NOMAD_PORT_ldap}"
|
||||
"LLDAP_HTTP_PORT" = "${NOMAD_PORT_web}"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOH
|
||||
ldap_base_dn = "{{ with nomadVar "nomad/jobs" }}{{ .ldap_base_dn }}{{ end }}"
|
||||
|
||||
{{ with nomadVar "secrets/ldap" -}}
|
||||
{{ with nomadVar "nomad/jobs/lldap" -}}
|
||||
database_url = "mysql://{{ .db_user }}:{{ .db_pass }}@127.0.0.1:3306/{{ .db_name }}"
|
||||
key_seed = "{{ .key_seed }}"
|
||||
jwt_secret = "{{ .jwt_secret }}"
|
||||
|
||||
ldap_user_dn = "{{ .admin_user }}"
|
||||
ldap_user_email = "{{ .admin_email }}"
|
||||
{{ end -}}
|
||||
ldap_user_pass = "{{ .admin_password }}"
|
||||
|
||||
{{ with nomadVar "nomad/jobs/lldap" -}}
|
||||
[smtp_options]
|
||||
from = "{{ .smtp_from }}"
|
||||
reply_to = "{{ .smtp_reply_to }}"
|
||||
|
||||
enable_password_reset = true
|
||||
{{ end -}}
|
||||
{{ with nomadVar "secrets/smtp" -}}
|
||||
server = "{{ .server }}"
|
||||
port = {{ .port }}
|
||||
tls_required = {{ .tls.Value | toLower }}
|
||||
user = "{{ .user }}"
|
||||
{{- end }}
|
||||
|
||||
# TODO: Better access to SMTP creds using nomad ACLs
|
||||
{{ with nomadVar "nomad/jobs" -}}
|
||||
server = "{{ .smtp_server }}"
|
||||
port = {{ .smtp_port }}
|
||||
tls_required = {{ .smtp_tls.Value | toLower }}
|
||||
user = "{{ .smtp_user }}"
|
||||
password = "{{ .smtp_password }}"
|
||||
{{ end -}}
|
||||
EOH
|
||||
destination = "$${NOMAD_TASK_DIR}/lldap_config.toml"
|
||||
change_mode = "restart"
|
||||
}
|
||||
|
||||
template {
|
||||
data = "{{ with nomadVar \"nomad/jobs/lldap\" }}mysql://{{ .db_user }}:{{ .db_pass }}@127.0.0.1:3306/{{ .db_name }}{{ end }}"
|
||||
destination = "$${NOMAD_SECRETS_DIR}/database_url.txt"
|
||||
change_mode = "restart"
|
||||
}
|
||||
|
||||
template {
|
||||
data = "{{ with nomadVar \"nomad/jobs/lldap\" }}{{ .key_seed }}{{ end }}"
|
||||
destination = "$${NOMAD_SECRETS_DIR}/key_seed.txt"
|
||||
change_mode = "restart"
|
||||
}
|
||||
|
||||
template {
|
||||
data = "{{ with nomadVar \"nomad/jobs/lldap\" }}{{ .jwt_secret }}{{ end }}"
|
||||
destination = "$${NOMAD_SECRETS_DIR}/jwt_secret.txt"
|
||||
change_mode = "restart"
|
||||
}
|
||||
|
||||
template {
|
||||
data = "{{ with nomadVar \"secrets/ldap\" }}{{ .admin_password }}{{ end }}"
|
||||
destination = "$${NOMAD_SECRETS_DIR}/user_pass.txt"
|
||||
change_mode = "restart"
|
||||
}
|
||||
|
||||
template {
|
||||
data = "{{ with nomadVar \"secrets/smtp\" }}{{ .password }}{{ end }}"
|
||||
destination = "$${NOMAD_SECRETS_DIR}/smtp_password.txt"
|
||||
destination = "${NOMAD_SECRETS_DIR}/lldap_config.toml"
|
||||
change_mode = "restart"
|
||||
}
|
||||
|
||||
@ -145,10 +109,10 @@ user = "{{ .user }}"
|
||||
image = "mariadb:10"
|
||||
args = [
|
||||
"/usr/bin/timeout",
|
||||
"20m",
|
||||
"2m",
|
||||
"/bin/bash",
|
||||
"-c",
|
||||
"until /usr/bin/mysql --defaults-extra-file=$${NOMAD_SECRETS_DIR}/my.cnf < $${NOMAD_SECRETS_DIR}/bootstrap.sql; do sleep 10; done",
|
||||
"until /usr/bin/mysql --defaults-extra-file=${NOMAD_SECRETS_DIR}/my.cnf < ${NOMAD_SECRETS_DIR}/bootstrap.sql; do sleep 10; done",
|
||||
]
|
||||
}
|
||||
|
||||
@ -158,11 +122,12 @@ user = "{{ .user }}"
|
||||
host=127.0.0.1
|
||||
port=3306
|
||||
user=root
|
||||
{{ with nomadVar "secrets/mysql" -}}
|
||||
# TODO: Use via lesser scoped access
|
||||
{{ with nomadVar "nomad/jobs/lldap/lldap/bootstrap" -}}
|
||||
password={{ .mysql_root_password }}
|
||||
{{ end -}}
|
||||
EOF
|
||||
destination = "$${NOMAD_SECRETS_DIR}/my.cnf"
|
||||
destination = "${NOMAD_SECRETS_DIR}/my.cnf"
|
||||
}
|
||||
|
||||
template {
|
||||
@ -181,7 +146,7 @@ GRANT ALL ON `{{ .db_name }}`.*
|
||||
SELECT 'NOOP';
|
||||
{{ end -}}
|
||||
EOF
|
||||
destination = "$${NOMAD_SECRETS_DIR}/bootstrap.sql"
|
||||
destination = "${NOMAD_SECRETS_DIR}/bootstrap.sql"
|
||||
}
|
||||
|
||||
resources {
|
||||
@ -199,9 +164,9 @@ SELECT 'NOOP';
|
||||
}
|
||||
|
||||
config {
|
||||
image = "iamthefij/stunnel:1.0.0"
|
||||
args = ["$${NOMAD_TASK_DIR}/stunnel.conf"]
|
||||
image = "alpine:3.17"
|
||||
ports = ["tls"]
|
||||
args = ["/bin/sh", "${NOMAD_TASK_DIR}/start.sh"]
|
||||
}
|
||||
|
||||
resources {
|
||||
@ -209,6 +174,15 @@ SELECT 'NOOP';
|
||||
memory = 100
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
set -e
|
||||
apk add stunnel
|
||||
exec stunnel {{ env "NOMAD_TASK_DIR" }}/stunnel.conf
|
||||
EOF
|
||||
destination = "${NOMAD_TASK_DIR}/start.sh"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
syslog = no
|
||||
@ -219,7 +193,7 @@ delay = yes
|
||||
accept = {{ env "NOMAD_PORT_tls" }}
|
||||
connect = 127.0.0.1:{{ env "NOMAD_PORT_ldap" }}
|
||||
ciphers = PSK
|
||||
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/stunnel_psk.txt
|
||||
PSKsecrets = {{ env "NOMAD_TASK_DIR" }}/stunnel_psk.txt
|
||||
|
||||
[mysql_client]
|
||||
client = yes
|
||||
@ -229,23 +203,23 @@ connect = {{ .Address }}:{{ .Port }}
|
||||
{{- end }}
|
||||
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/mysql_stunnel_psk.txt
|
||||
EOF
|
||||
destination = "$${NOMAD_TASK_DIR}/stunnel.conf"
|
||||
destination = "${NOMAD_TASK_DIR}/stunnel.conf"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{ range nomadVarList "secrets/ldap/allowed_psks" -}}
|
||||
{{ with nomadVar .Path }}{{ .psk }}{{ end }}
|
||||
{{ end -}}
|
||||
{{ with nomadVar "nomad/jobs/lldap/lldap/stunnel" -}}
|
||||
{{ .allowed_psks }}
|
||||
{{- end }}
|
||||
EOF
|
||||
destination = "$${NOMAD_SECRETS_DIR}/stunnel_psk.txt"
|
||||
destination = "${NOMAD_TASK_DIR}/stunnel_psk.txt"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{- with nomadVar "secrets/mysql/allowed_psks/lldap" }}{{ .psk }}{{ end -}}
|
||||
{{- with nomadVar "nomad/jobs/lldap/lldap/stunnel" }}{{ .mysql_stunnel_psk }}{{ end -}}
|
||||
EOF
|
||||
destination = "$${NOMAD_SECRETS_DIR}/mysql_stunnel_psk.txt"
|
||||
destination = "${NOMAD_SECRETS_DIR}/mysql_stunnel_psk.txt"
|
||||
}
|
||||
|
||||
}
|
@ -3,27 +3,31 @@ auth_enabled: false
|
||||
server:
|
||||
http_listen_port: 3100
|
||||
|
||||
common:
|
||||
ring:
|
||||
instance_addr: 127.0.0.1
|
||||
kvstore:
|
||||
store: inmemory
|
||||
replication_factor: 1
|
||||
path_prefix: /tmp/loki
|
||||
ingester:
|
||||
lifecycler:
|
||||
address: 127.0.0.1
|
||||
ring:
|
||||
kvstore:
|
||||
store: inmemory
|
||||
replication_factor: 1
|
||||
final_sleep: 0s
|
||||
chunk_idle_period: 5m
|
||||
chunk_retain_period: 30s
|
||||
max_transfer_retries: 0
|
||||
|
||||
schema_config:
|
||||
configs:
|
||||
- from: 2020-05-15
|
||||
store: boltdb-shipper
|
||||
- from: 2018-04-15
|
||||
store: boltdb
|
||||
object_store: filesystem
|
||||
schema: v11
|
||||
index:
|
||||
prefix: index_
|
||||
period: 24h
|
||||
period: 168h
|
||||
|
||||
storage_config:
|
||||
boltdb_shipper:
|
||||
active_index_directory: {{ env "NOMAD_TASK_DIR" }}/index
|
||||
boltdb:
|
||||
directory: {{ env "NOMAD_TASK_DIR" }}/index
|
||||
|
||||
filesystem:
|
||||
directory: {{ env "NOMAD_TASK_DIR" }}/chunks
|
||||
@ -34,8 +38,8 @@ limits_config:
|
||||
reject_old_samples_max_age: 168h
|
||||
|
||||
chunk_store_config:
|
||||
max_look_back_period: 168h
|
||||
max_look_back_period: 0s
|
||||
|
||||
table_manager:
|
||||
retention_deletes_enabled: true
|
||||
retention_period: 168h
|
||||
retention_deletes_enabled: false
|
||||
retention_period: 0s
|
||||
|
24
core/loki.tf
24
core/loki.tf
@ -1,24 +0,0 @@
|
||||
module "loki" {
|
||||
source = "../services/service"
|
||||
detach = false
|
||||
|
||||
name = "loki"
|
||||
image = "grafana/loki:2.8.7"
|
||||
args = ["--config.file=$${NOMAD_TASK_DIR}/loki-config.yml"]
|
||||
|
||||
service_port = 3100
|
||||
ingress = true
|
||||
use_wesher = var.use_wesher
|
||||
service_check = {
|
||||
path = "/ready"
|
||||
}
|
||||
|
||||
sticky_disk = true
|
||||
templates = [
|
||||
{
|
||||
data = file("${path.module}/loki-config.yml")
|
||||
dest = "loki-config.yml"
|
||||
mount = false
|
||||
}
|
||||
]
|
||||
}
|
183
core/main.tf
183
core/main.tf
@ -1,14 +1,21 @@
|
||||
module "blocky" {
|
||||
source = "./blocky"
|
||||
|
||||
use_wesher = var.use_wesher
|
||||
|
||||
base_hostname = var.base_hostname
|
||||
# Not in this module
|
||||
# depends_on = [module.databases]
|
||||
}
|
||||
|
||||
module "traefik" {
|
||||
source = "./traefik"
|
||||
|
||||
base_hostname = var.base_hostname
|
||||
}
|
||||
|
||||
module "metrics" {
|
||||
source = "./metrics"
|
||||
# Not in this module
|
||||
# depends_on = [module.databases]
|
||||
}
|
||||
|
||||
resource "nomad_job" "nomad-client-stalker" {
|
||||
@ -16,12 +23,180 @@ resource "nomad_job" "nomad-client-stalker" {
|
||||
jobspec = file("${path.module}/nomad-client-stalker.nomad")
|
||||
}
|
||||
|
||||
module "loki" {
|
||||
source = "../services/service"
|
||||
|
||||
name = "loki"
|
||||
image = "grafana/loki:2.2.1"
|
||||
args = ["--config.file=$${NOMAD_TASK_DIR}/loki-config.yml"]
|
||||
service_port = 3100
|
||||
ingress = true
|
||||
sticky_disk = true
|
||||
# healthcheck = "/ready"
|
||||
templates = [
|
||||
{
|
||||
data = file("${path.module}/loki-config.yml")
|
||||
dest = "loki-config.yml"
|
||||
mount = false
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
resource "nomad_job" "syslog-ng" {
|
||||
jobspec = file("${path.module}/syslogng.nomad")
|
||||
|
||||
depends_on = [module.loki]
|
||||
}
|
||||
|
||||
resource "nomad_job" "ddclient" {
|
||||
jobspec = file("${path.module}/ddclient.nomad")
|
||||
}
|
||||
|
||||
resource "nomad_job" "lldap" {
|
||||
jobspec = file("${path.module}/lldap.nomad")
|
||||
}
|
||||
|
||||
module "authelia" {
|
||||
source = "../services/service"
|
||||
|
||||
name = "authelia"
|
||||
instance_count = 2
|
||||
priority = 70
|
||||
image = "authelia/authelia:latest"
|
||||
args = ["--config", "$${NOMAD_TASK_DIR}/authelia.yml"]
|
||||
ingress = true
|
||||
service_port = 9091
|
||||
service_port_static = true
|
||||
# metrics_port = 9959
|
||||
|
||||
env = {
|
||||
AUTHELIA_AUTHENTICATION_BACKEND_LDAP_PASSWORD_FILE = "$${NOMAD_SECRETS_DIR}/ldap_password.txt"
|
||||
AUTHELIA_JWT_SECRET_FILE = "$${NOMAD_SECRETS_DIR}/jwt_secret.txt"
|
||||
AUTHELIA_SESSION_SECRET_FILE = "$${NOMAD_SECRETS_DIR}/session_secret.txt"
|
||||
AUTHELIA_STORAGE_ENCRYPTION_KEY_FILE = "$${NOMAD_SECRETS_DIR}/storage_encryption_key.txt"
|
||||
AUTHELIA_STORAGE_MYSQL_PASSWORD_FILE = "$${NOMAD_SECRETS_DIR}/mysql_password.txt"
|
||||
AUTHELIA_NOTIFIER_SMTP_PASSWORD_FILE = "$${NOMAD_SECRETS_DIR}/smtp_password.txt"
|
||||
AUTHELIA_IDENTITY_PROVIDERS_OIDC_HMAC_SECRET_FILE = "$${NOMAD_SECRETS_DIR}/oidc_hmac_secret.txt"
|
||||
AUTHELIA_IDENTITY_PROVIDERS_OIDC_ISSUER_PRIVATE_KEY_FILE = "$${NOMAD_SECRETS_DIR}/oidc_issuer_private_key.txt"
|
||||
# AUTHELIA_IDENTITY_PROVIDERS_OIDC_ISSUER_CERTIFICATE_CHAIN_FILE = "$${NOMAD_SECRETS_DIR}/oidc_issuer_certificate_chain.txt"
|
||||
}
|
||||
|
||||
use_mysql = true
|
||||
use_ldap = true
|
||||
use_redis = true
|
||||
mysql_bootstrap = {
|
||||
enabled = true
|
||||
}
|
||||
|
||||
service_tags = [
|
||||
# Configure traefik to add this middleware
|
||||
"traefik.http.middlewares.authelia.forwardAuth.address=http://authelia.nomad:9091/api/verify?rd=https%3A%2F%2Fauthelia.thefij.rocks%2F",
|
||||
"traefik.http.middlewares.authelia.forwardAuth.trustForwardHeader=true",
|
||||
"traefik.http.middlewares.authelia.forwardAuth.authResponseHeaders=Remote-User,Remote-Groups,Remote-Name,Remote-Email",
|
||||
"traefik.http.middlewares.authelia-basic.forwardAuth.address=http://authelia.nomad:9091/api/verify?auth=basic",
|
||||
"traefik.http.middlewares.authelia-basic.forwardAuth.trustForwardHeader=true",
|
||||
"traefik.http.middlewares.authelia-basic.forwardAuth.authResponseHeaders=Remote-User,Remote-Groups,Remote-Name,Remote-Email",
|
||||
]
|
||||
|
||||
templates = [
|
||||
{
|
||||
data = file("${path.module}/authelia.yml")
|
||||
dest = "authelia.yml"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"nomad/jobs/authelia\" }}{{ .lldap_admin_password }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "ldap_password.txt"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"nomad/jobs/authelia\" }}{{ .jwt_secret }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "jwt_secret.txt"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"nomad/jobs/authelia\" }}{{ .session_secret }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "session_secret.txt"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"nomad/jobs/authelia\" }}{{ .storage_encryption_key }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "storage_encryption_key.txt"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"nomad/jobs/authelia\" }}{{ .db_pass }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "mysql_password.txt"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"nomad/jobs/authelia\" }}{{ .oidc_hmac_secret }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "oidc_hmac_secret.txt"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"nomad/jobs/authelia\" }}{{ .oidc_issuer_private_key }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "oidc_issuer_private_key.txt"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"nomad/jobs/authelia\" }}{{ .oidc_issuer_certificate_chain }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "oidc_issuer_certificate_chain.txt"
|
||||
mount = false
|
||||
},
|
||||
{
|
||||
data = "{{ with nomadVar \"nomad/jobs\" }}{{ .smtp_password }}{{ end }}"
|
||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||
dest = "smtp_password.txt"
|
||||
mount = false
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
resource "nomad_acl_auth_method" "nomad_authelia" {
|
||||
name = "authelia"
|
||||
type = "OIDC"
|
||||
token_locality = "global"
|
||||
max_token_ttl = "1h0m0s"
|
||||
default = true
|
||||
|
||||
config {
|
||||
oidc_discovery_url = "https://authelia.thefij.rocks"
|
||||
oidc_client_id = "nomad"
|
||||
oidc_client_secret = yamldecode(file("${path.module}/../ansible_playbooks/vars/nomad_vars.yml"))["nomad/oidc"]["secret"]
|
||||
bound_audiences = ["nomad"]
|
||||
oidc_scopes = [
|
||||
"groups",
|
||||
"openid",
|
||||
]
|
||||
allowed_redirect_uris = [
|
||||
"https://nomad.thefij.rocks/oidc/callback",
|
||||
"https://nomad.thefij.rocks/ui/settings/tokens",
|
||||
]
|
||||
list_claim_mappings = {
|
||||
"groups" : "roles"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_acl_binding_rule" "nomad_authelia_admin" {
|
||||
description = "engineering rule"
|
||||
auth_method = nomad_acl_auth_method.nomad_authelia.name
|
||||
selector = "\"nomad-deploy\" in list.roles"
|
||||
bind_type = "role"
|
||||
bind_name = "admin" # acls.nomad_acl_role.admin.name
|
||||
}
|
||||
|
||||
resource "nomad_acl_binding_rule" "nomad_authelia_deploy" {
|
||||
description = "engineering rule"
|
||||
auth_method = nomad_acl_auth_method.nomad_authelia.name
|
||||
selector = "\"nomad-deploy\" in list.roles"
|
||||
bind_type = "role"
|
||||
bind_name = "deploy" # acls.nomad_acl_role.deploy.name
|
||||
}
|
||||
|
40
core/metrics/.terraform.lock.hcl
generated
Normal file
40
core/metrics/.terraform.lock.hcl
generated
Normal file
@ -0,0 +1,40 @@
|
||||
# This file is maintained automatically by "terraform init".
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.terraform.io/hashicorp/consul" {
|
||||
version = "2.15.0"
|
||||
hashes = [
|
||||
"h1:o+Su3YqeOkHgf86GEArIVDZfaZQphYFjAOwpi/b0bzs=",
|
||||
"h1:tAb2gwW+oZ8/t2j7lExdqpNrxmaWsHbyA2crFWClPb0=",
|
||||
"zh:0bd2a9873099d89bd52e9eee623dd20ccb275d1e2f750da229a53a4d5b23450c",
|
||||
"zh:1c9f87d4d97b2c61d006c0bef159d61d2a661a103025f8276ebbeb000129f931",
|
||||
"zh:25b73a34115255c464be10a53f2510c4a1db958a71be31974d30654d5472e624",
|
||||
"zh:32fa31329731db2bf4b7d0f09096416ca146f05b58f4482bbd4ee0f28cefbbcc",
|
||||
"zh:59136b73d3abe7cc5b06d9e12d123ad21298ca86ed49a4060a3cd7c2a28a74a1",
|
||||
"zh:a191f3210773ca25c543a92f2d392b85e6a053d596293655b1f25b33eb843b4c",
|
||||
"zh:b8b6033cf0687eadc1099f11d9fb2ca9429ff40c2d85bd6cb047c0f6bc5d5d8d",
|
||||
"zh:bb7d67ed28aa9b28fc5154161af003383f940b2beda0d4577857cad700f39cd1",
|
||||
"zh:be615288f59327b975532a1999deab60a022e6819fe80e5a32526155210ecbba",
|
||||
"zh:de1e3d5c34eef87eb301e74717754babb6dc8e19e3a964919e1165c5a076a719",
|
||||
"zh:eb8c61b20d8ce2bfff9f735ca8456a0d6368af13aa1f43866f61c70f88cc491c",
|
||||
]
|
||||
}
|
||||
|
||||
provider "registry.terraform.io/hashicorp/nomad" {
|
||||
version = "1.4.16"
|
||||
hashes = [
|
||||
"h1:PQxNPNmMVOErxryTWIJwr22k95DTSODmgRylqjc2TjI=",
|
||||
"h1:tyfjD/maKzb0RxxD9KWgLnkJu9lnYziYsQgGw85Giz8=",
|
||||
"zh:0d4fbb7030d9caac3b123e60afa44f50c83cc2a983e1866aec7f30414abe7b0e",
|
||||
"zh:0db080228e07c72d6d8ca8c45249d6f97cd0189fce82a77abbdcd49a52e57572",
|
||||
"zh:0df88393271078533a217654b96f0672c60eb59570d72e6aefcb839eea87a7a0",
|
||||
"zh:2883b335bb6044b0db6a00e602d6926c047c7f330294a73a90d089f98b24d084",
|
||||
"zh:390158d928009a041b3a182bdd82376b50530805ae92be2b84ed7c3b0fa902a0",
|
||||
"zh:7169b8f8df4b8e9659c49043848fd5f7f8473d0471f67815e8b04980f827f5ef",
|
||||
"zh:9417ee1383b1edd137024882d7035be4dca51fb4f725ca00ed87729086ec1755",
|
||||
"zh:a22910b5a29eeab5610350700b4899267c1b09b66cf21f7e4d06afc61d425800",
|
||||
"zh:a6185c9cd7aa458cd81861058ba568b6411fbac344373a20155e20256f4a7557",
|
||||
"zh:b6260ca9f034df1b47905b4e2a9c33b67dbf77224a694d5b10fb09ae92ffad4c",
|
||||
"zh:d87c12a6a7768f2b6c2a59495c7dc00f9ecc52b1b868331d4c284f791e278a1e",
|
||||
]
|
||||
}
|
@ -1,24 +1,14 @@
|
||||
job "exporters" {
|
||||
datacenters = ["dc1"]
|
||||
type = "service"
|
||||
priority = 55
|
||||
|
||||
constraint {
|
||||
distinct_hosts = true
|
||||
}
|
||||
type = "system"
|
||||
|
||||
group "promtail" {
|
||||
# TODO: This must be updated to match the nubmer of servers (possibly grabbed from TF)
|
||||
# I am moving away from `system` jobs because of https://github.com/hashicorp/nomad/issues/1202
|
||||
count = 2
|
||||
|
||||
network {
|
||||
mode = "bridge"
|
||||
|
||||
port "promtail" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
to = 9080
|
||||
}
|
||||
}
|
||||
@ -29,8 +19,8 @@ job "exporters" {
|
||||
port = "promtail"
|
||||
|
||||
meta {
|
||||
nomad_dc = "$${NOMAD_DC}"
|
||||
nomad_node_name = "$${node.unique.name}"
|
||||
nomad_dc = "${NOMAD_DC}"
|
||||
nomad_node_name = "${node.unique.name}"
|
||||
}
|
||||
|
||||
tags = [
|
||||
@ -41,9 +31,15 @@ job "exporters" {
|
||||
task "promtail" {
|
||||
driver = "docker"
|
||||
|
||||
meta = {
|
||||
"diun.sort_tags" = "semver"
|
||||
"diun.watch_repo" = true
|
||||
"diun.include_tags" = "^[0-9]+\\.[0-9]+\\.[0-9]+$"
|
||||
}
|
||||
|
||||
config {
|
||||
image = "grafana/promtail:3.3.0"
|
||||
args = ["-config.file=$${NOMAD_TASK_DIR}/promtail.yml"]
|
||||
image = "grafana/promtail:2.7.1"
|
||||
args = ["-config.file=${NOMAD_TASK_DIR}/promtail.yml"]
|
||||
ports = ["promtail"]
|
||||
|
||||
# Bind mount host machine-id and log directories
|
||||
@ -131,7 +127,7 @@ scrape_configs:
|
||||
- source_labels: ['__journal_com_hashicorp_nomad_task_name']
|
||||
target_label: nomad_task_name
|
||||
EOF
|
||||
destination = "$${NOMAD_TASK_DIR}/promtail.yml"
|
||||
destination = "${NOMAD_TASK_DIR}/promtail.yml"
|
||||
}
|
||||
|
||||
resources {
|
@ -8,9 +8,7 @@ job "grafana" {
|
||||
mode = "bridge"
|
||||
|
||||
port "web" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
to = 3000
|
||||
}
|
||||
}
|
||||
@ -28,6 +26,7 @@ job "grafana" {
|
||||
tags = [
|
||||
"traefik.enable=true",
|
||||
"traefik.http.routers.grafana.entryPoints=websecure",
|
||||
# "traefik.http.routers.grafana.middlewares=authelia@nomad",
|
||||
]
|
||||
}
|
||||
|
||||
@ -40,8 +39,8 @@ job "grafana" {
|
||||
}
|
||||
|
||||
config {
|
||||
image = "iamthefij/stunnel:1.0.0"
|
||||
args = ["$${NOMAD_TASK_DIR}/stunnel.conf"]
|
||||
image = "alpine:3.17"
|
||||
args = ["/bin/sh", "$${NOMAD_TASK_DIR}/start.sh"]
|
||||
}
|
||||
|
||||
resources {
|
||||
@ -49,6 +48,15 @@ job "grafana" {
|
||||
memory = 100
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
set -e
|
||||
apk add stunnel
|
||||
exec stunnel {{ env "NOMAD_TASK_DIR" }}/stunnel.conf
|
||||
EOF
|
||||
destination = "$${NOMAD_TASK_DIR}/start.sh"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
syslog = no
|
||||
@ -66,15 +74,17 @@ PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/mysql_stunnel_psk.txt
|
||||
destination = "$${NOMAD_TASK_DIR}/stunnel.conf"
|
||||
}
|
||||
|
||||
# TODO: Get psk for backup jobs despite multiple job declarations
|
||||
# Probably should use variable ACLs to grant each node job to this path
|
||||
template {
|
||||
data = <<EOF
|
||||
{{- with nomadVar "secrets/mysql/allowed_psks/grafana" }}{{ .psk }}{{ end -}}
|
||||
{{- with nomadVar "nomad/jobs/grafana/grafana/stunnel" }}{{ .mysql_stunnel_psk }}{{ end -}}
|
||||
EOF
|
||||
destination = "$${NOMAD_SECRETS_DIR}/mysql_stunnel_psk.txt"
|
||||
}
|
||||
}
|
||||
|
||||
task "mysql-bootstrap" {
|
||||
task "grafana-bootstrap" {
|
||||
driver = "docker"
|
||||
|
||||
lifecycle {
|
||||
@ -86,10 +96,10 @@ EOF
|
||||
image = "mariadb:10"
|
||||
args = [
|
||||
"/usr/bin/timeout",
|
||||
"20m",
|
||||
"2m",
|
||||
"/bin/bash",
|
||||
"-c",
|
||||
"until /usr/bin/mysql --defaults-extra-file=$${NOMAD_SECRETS_DIR}/my.cnf < $${NOMAD_SECRETS_DIR}/bootstrap.sql; do echo 'Retry in 10s'; sleep 10; done",
|
||||
"until /usr/bin/mysql --defaults-extra-file=$${NOMAD_SECRETS_DIR}/my.cnf < $${NOMAD_SECRETS_DIR}/bootstrap.sql; do sleep 10; done",
|
||||
]
|
||||
}
|
||||
|
||||
@ -99,7 +109,7 @@ EOF
|
||||
host=127.0.0.1
|
||||
port=3306
|
||||
user=root
|
||||
{{ with nomadVar "secrets/mysql" -}}
|
||||
{{ with nomadVar "nomad/jobs" -}}
|
||||
password={{ .mysql_root_password }}
|
||||
{{ end -}}
|
||||
EOF
|
||||
@ -133,28 +143,26 @@ SELECT 'NOOP';
|
||||
driver = "docker"
|
||||
|
||||
config {
|
||||
image = "grafana/grafana:10.0.10"
|
||||
args = ["--config", "$${NOMAD_ALLOC_DIR}/config/grafana.ini"]
|
||||
image = "grafana/grafana:9.4.2"
|
||||
ports = ["web"]
|
||||
}
|
||||
|
||||
env = {
|
||||
"GF_INSTALL_PLUGINS" = "grafana-clock-panel,grafana-piechart-panel,grafana-polystat-panel,natel-discrete-panel",
|
||||
"GF_PATHS_CONFIG" = "$${NOMAD_ALLOC_DIR}/config/grafana.ini",
|
||||
"GF_PATHS_PROVISIONING" = "$${NOMAD_ALLOC_DIR}/config/provisioning",
|
||||
"GF_PATHS_CONFIG" = "$${NOMAD_ALLOC_DIR}/config/grafana.ini"
|
||||
"GF_PATHS_PROVISIONING" = "$${NOMAD_ALLOC_DIR}/config/provisioning"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{ with nomadVar "secrets/smtp" -}}
|
||||
GF_SMTP_USER={{ .user }}
|
||||
GF_SMTP_PASSWORD={{ .password }}
|
||||
{{ end -}}
|
||||
{{ with nomadVar "nomad/jobs/grafana" -}}
|
||||
GF_SECURITY_ADMIN_PASSWORD={{ .admin_pw }}
|
||||
GF_SMTP_USER={{ .smtp_user }}
|
||||
GF_SMTP_PASSWORD={{ .smtp_password }}
|
||||
GF_EXTERNAL_IMAGE_STORAGE_S3_ACCESS_KEY={{ .minio_access_key }}
|
||||
GF_EXTERNAL_IMAGE_STORAGE_S3_SECRET_KEY={{ .minio_secret_key }}
|
||||
GRAFANA_ALERT_EMAIL_ADDRESSES={{ .alert_email_addresses }}
|
||||
GF_AUTH_GENERIC_OAUTH_CLIENT_SECRET={{ .oidc_secret }}
|
||||
{{ if .db_name -}}
|
||||
# Database storage
|
||||
GF_DATABASE_TYPE=mysql
|
||||
@ -162,14 +170,10 @@ GF_DATABASE_HOST=127.0.0.1:3306
|
||||
GF_DATABASE_NAME={{ .db_name }}
|
||||
GF_DATABASE_USER={{ .db_user }}
|
||||
GF_DATABASE_PASSWORD={{ .db_pass }}
|
||||
{{ end -}}
|
||||
{{- end }}
|
||||
SLACK_BOT_URL={{ .slack_bot_url }}
|
||||
SLACK_BOT_TOKEN={{ .slack_bot_token }}
|
||||
SLACK_HOOK_URL={{ .slack_hook_url }}
|
||||
{{ end -}}
|
||||
{{ with nomadVar "secrets/authelia/grafana" -}}
|
||||
GF_AUTH_GENERIC_OAUTH_CLIENT_ID={{ .client_id }}
|
||||
GF_AUTH_GENERIC_OAUTH_CLIENT_SECRET={{ .secret }}
|
||||
{{ end -}}
|
||||
EOF
|
||||
env = true
|
||||
@ -191,17 +195,13 @@ GF_AUTH_GENERIC_OAUTH_CLIENT_SECRET={{ .secret }}
|
||||
}
|
||||
|
||||
config {
|
||||
image = "alpine:3.17"
|
||||
image = "alpine"
|
||||
args = ["$${NOMAD_TASK_DIR}/startup.sh"]
|
||||
}
|
||||
|
||||
resources {
|
||||
cpu = 50
|
||||
memory = 50
|
||||
}
|
||||
|
||||
action "reloadnow" {
|
||||
command = "/local/reload_config.sh"
|
||||
cpu = 100
|
||||
memory = 100
|
||||
}
|
||||
|
||||
env = {
|
||||
@ -263,7 +263,7 @@ ${file(join("/", [module_path, "grafana", config_file]))}
|
||||
# Set owner to grafana uid
|
||||
# uid = 472
|
||||
# Change template delimeter for dashboard files that use json and have double curly braces and square braces
|
||||
%{ if endswith(config_file, ".json") ~}
|
||||
%{ if length(regexall("dashboard", config_file)) > 0 ~}
|
||||
left_delimiter = "<<<<"
|
||||
right_delimiter = ">>>>"
|
||||
%{ endif }
|
||||
@ -279,11 +279,6 @@ ${file(join("/", [module_path, "grafana", config_file]))}
|
||||
task "grafana-image-renderer" {
|
||||
driver = "docker"
|
||||
|
||||
constraint {
|
||||
attribute = "$${attr.cpu.arch}"
|
||||
value = "amd64"
|
||||
}
|
||||
|
||||
config {
|
||||
image = "grafana/grafana-image-renderer:3.6.1"
|
||||
ports = ["renderer"]
|
@ -20,8 +20,8 @@ data = /var/lib/grafana
|
||||
# Directory where grafana will automatically scan and look for plugins
|
||||
;plugins = /var/lib/grafana/plugins
|
||||
|
||||
# folder that contains PROVISIONING config files that grafana will apply on startup and while running.
|
||||
provisioning = from_env
|
||||
# folder that contains provisioning config files that grafana will apply on startup and while running.
|
||||
; provisioning = /etc/grafana/provisioning
|
||||
|
||||
#################################### Server ####################################
|
||||
[server]
|
||||
@ -43,7 +43,7 @@ provisioning = from_env
|
||||
|
||||
# The full public facing url you use in browser, used for redirects and emails
|
||||
# If you use reverse proxy and sub path specify full url (with sub path)
|
||||
root_url = https://grafana.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}
|
||||
root_url = https://grafana.thefij.rocks
|
||||
|
||||
# Log web requests
|
||||
;router_logging = false
|
||||
@ -261,19 +261,15 @@ log_queries =
|
||||
enabled = true
|
||||
name = Authelia
|
||||
;allow_sign_up = true
|
||||
client_id = from_env
|
||||
client_id = grafana
|
||||
client_secret = from_env
|
||||
scopes = openid profile email groups
|
||||
auth_url = https://authelia.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}/api/oidc/authorization
|
||||
token_url = https://authelia.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}/api/oidc/token
|
||||
api_url = https://authelia.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}/api/oidc/userinfo
|
||||
auth_url = https://authelia.thefij.rocks/api/oidc/authorization
|
||||
token_url = https://authelia.thefij.rocks/api/oidc/token
|
||||
api_url = https://authelia.thefij.rocks/api/oidc/userinfo
|
||||
login_attribute_path = preferred_username
|
||||
groups_attribute_path = groups
|
||||
name_attribute_path = name
|
||||
# Role attribute path is not working
|
||||
role_attribute_path = contains(groups[*], 'admin') && 'Admin' || contains(groups[*], 'grafana-admin') && 'Admin' || contains(groups[*], 'grafana-editor') && 'Editor' || contains(groups[*], 'developer') && 'Editor'
|
||||
allow_assign_grafana_admin = true
|
||||
skip_org_role_sync = true
|
||||
use_pkce = true
|
||||
|
||||
;team_ids =
|
||||
@ -441,7 +437,7 @@ enabled = true
|
||||
provider = s3
|
||||
|
||||
[external_image_storage.s3]
|
||||
endpoint = https://minio.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}
|
||||
endpoint = https://minio.thefij.rocks
|
||||
bucket = grafana-images
|
||||
region = us-east-1
|
||||
path_style_access = true
|
@ -104,7 +104,7 @@
|
||||
"uid": "Prometheus"
|
||||
},
|
||||
"exemplar": false,
|
||||
"expr": "sum(nomad_client_allocs_running{exported_job=\"blocky\", task=\"blocky\"})",
|
||||
"expr": "sum(up{job=\"exporters\", consul_service=\"blocky-api\"})",
|
||||
"format": "table",
|
||||
"instant": true,
|
||||
"interval": "",
|
||||
@ -458,7 +458,7 @@
|
||||
"uid": "Prometheus"
|
||||
},
|
||||
"exemplar": true,
|
||||
"expr": "sum(blocky_blacklist_cache) / sum(nomad_client_allocs_running{exported_job=\"blocky\", task=\"blocky\"})",
|
||||
"expr": "sum(blocky_blacklist_cache) / sum(up{job=\"exporters\", consul_service=\"blocky-api\"})",
|
||||
"format": "table",
|
||||
"instant": false,
|
||||
"interval": "",
|
||||
@ -533,7 +533,7 @@
|
||||
"uid": "Prometheus"
|
||||
},
|
||||
"exemplar": true,
|
||||
"expr": "sum(go_memstats_sys_bytes{job=\"exporters\", consul_service=\"blocky-api\"})/sum(nomad_client_allocs_running{exported_job=\"blocky\", task=\"blocky\"})",
|
||||
"expr": "sum(go_memstats_sys_bytes{job=\"exporters\", consul_service=\"blocky-api\"})/sum(up{job=\"exporters\", consul_service=\"blocky-api\"})",
|
||||
"format": "table",
|
||||
"instant": false,
|
||||
"interval": "",
|
||||
@ -753,7 +753,7 @@
|
||||
"uid": "Prometheus"
|
||||
},
|
||||
"exemplar": true,
|
||||
"expr": "sum(blocky_cache_entry_count)/ sum(nomad_client_allocs_running{exported_job=\"blocky\", task=\"blocky\"})",
|
||||
"expr": "sum(blocky_cache_entry_count)/ sum(up{job=\"exporters\", consul_service=\"blocky-api\"})",
|
||||
"format": "table",
|
||||
"instant": false,
|
||||
"interval": "",
|
||||
@ -1162,7 +1162,7 @@
|
||||
"uid": "Prometheus"
|
||||
},
|
||||
"exemplar": false,
|
||||
"expr": "sum(time() -blocky_last_list_group_refresh)/ sum(nomad_client_allocs_running{exported_job=\"blocky\", task=\"blocky\"})",
|
||||
"expr": "sum(time() -blocky_last_list_group_refresh)/ sum(up{job=\"exporters\", consul_service=\"blocky-api\"})",
|
||||
"format": "table",
|
||||
"instant": true,
|
||||
"interval": "",
|
||||
@ -1224,7 +1224,7 @@
|
||||
"uid": "Prometheus"
|
||||
},
|
||||
"exemplar": true,
|
||||
"expr": "sum(blocky_prefetch_domain_name_cache_count)/ sum(nomad_client_allocs_running{exported_job=\"blocky\", task=\"blocky\"})",
|
||||
"expr": "sum(blocky_prefetch_domain_name_cache_count)/ sum(up{job=\"exporters\", consul_service=\"blocky-api\"})",
|
||||
"format": "table",
|
||||
"interval": "",
|
||||
"legendFormat": "",
|
@ -0,0 +1,783 @@
|
||||
{
|
||||
"__inputs": [
|
||||
{
|
||||
"name": "DS_PROMETHEUS",
|
||||
"label": "Prometheus",
|
||||
"description": "",
|
||||
"type": "datasource",
|
||||
"pluginId": "prometheus",
|
||||
"pluginName": "Prometheus"
|
||||
}
|
||||
],
|
||||
"__requires": [
|
||||
{
|
||||
"type": "grafana",
|
||||
"id": "grafana",
|
||||
"name": "Grafana",
|
||||
"version": "7.5.5"
|
||||
},
|
||||
{
|
||||
"type": "panel",
|
||||
"id": "graph",
|
||||
"name": "Graph",
|
||||
"version": ""
|
||||
},
|
||||
{
|
||||
"type": "panel",
|
||||
"id": "piechart",
|
||||
"name": "Pie chart v2",
|
||||
"version": ""
|
||||
},
|
||||
{
|
||||
"type": "datasource",
|
||||
"id": "prometheus",
|
||||
"name": "Prometheus",
|
||||
"version": "1.0.0"
|
||||
},
|
||||
{
|
||||
"type": "panel",
|
||||
"id": "singlestat",
|
||||
"name": "Singlestat",
|
||||
"version": ""
|
||||
}
|
||||
],
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": "-- Grafana --",
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"type": "dashboard"
|
||||
}
|
||||
]
|
||||
},
|
||||
"description": "Traefik dashboard prometheus",
|
||||
"editable": true,
|
||||
"gnetId": 4475,
|
||||
"graphTooltip": 0,
|
||||
"id": null,
|
||||
"iteration": 1620932097756,
|
||||
"links": [],
|
||||
"panels": [
|
||||
{
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"id": 10,
|
||||
"title": "$backend stats",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"cacheTimeout": null,
|
||||
"datasource": "${DS_PROMETHEUS}",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"decimals": 0,
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 1
|
||||
},
|
||||
"id": 2,
|
||||
"interval": null,
|
||||
"links": [],
|
||||
"maxDataPoints": 3,
|
||||
"options": {
|
||||
"displayLabels": [],
|
||||
"legend": {
|
||||
"calcs": [],
|
||||
"displayMode": "table",
|
||||
"placement": "right",
|
||||
"values": [
|
||||
"value",
|
||||
"percent"
|
||||
]
|
||||
},
|
||||
"pieType": "pie",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"text": {}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"exemplar": true,
|
||||
"expr": "traefik_service_requests_total{service=\"$service\"}",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "{{method}} : {{code}}",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "$service return code",
|
||||
"type": "piechart"
|
||||
},
|
||||
{
|
||||
"cacheTimeout": null,
|
||||
"colorBackground": false,
|
||||
"colorValue": false,
|
||||
"colors": [
|
||||
"#299c46",
|
||||
"rgba(237, 129, 40, 0.89)",
|
||||
"#d44a3a"
|
||||
],
|
||||
"datasource": "${DS_PROMETHEUS}",
|
||||
"fieldConfig": {
|
||||
"defaults": {},
|
||||
"overrides": []
|
||||
},
|
||||
"format": "ms",
|
||||
"gauge": {
|
||||
"maxValue": 100,
|
||||
"minValue": 0,
|
||||
"show": false,
|
||||
"thresholdLabels": false,
|
||||
"thresholdMarkers": true
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 1
|
||||
},
|
||||
"id": 4,
|
||||
"interval": null,
|
||||
"links": [],
|
||||
"mappingType": 1,
|
||||
"mappingTypes": [
|
||||
{
|
||||
"name": "value to text",
|
||||
"value": 1
|
||||
},
|
||||
{
|
||||
"name": "range to text",
|
||||
"value": 2
|
||||
}
|
||||
],
|
||||
"maxDataPoints": 100,
|
||||
"nullPointMode": "connected",
|
||||
"nullText": null,
|
||||
"postfix": "",
|
||||
"postfixFontSize": "50%",
|
||||
"prefix": "",
|
||||
"prefixFontSize": "50%",
|
||||
"rangeMaps": [
|
||||
{
|
||||
"from": "null",
|
||||
"text": "N/A",
|
||||
"to": "null"
|
||||
}
|
||||
],
|
||||
"sparkline": {
|
||||
"fillColor": "rgba(31, 118, 189, 0.18)",
|
||||
"full": false,
|
||||
"lineColor": "rgb(31, 120, 193)",
|
||||
"show": true
|
||||
},
|
||||
"tableColumn": "",
|
||||
"targets": [
|
||||
{
|
||||
"exemplar": true,
|
||||
"expr": "sum(traefik_service_request_duration_seconds_sum{service=\"$service\"}) / sum(traefik_service_requests_total{service=\"$service\"}) * 1000",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"thresholds": "",
|
||||
"title": "$service response time",
|
||||
"type": "singlestat",
|
||||
"valueFontSize": "80%",
|
||||
"valueMaps": [
|
||||
{
|
||||
"op": "=",
|
||||
"text": "N/A",
|
||||
"value": "null"
|
||||
}
|
||||
],
|
||||
"valueName": "avg"
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": true,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "${DS_PROMETHEUS}",
|
||||
"fieldConfig": {
|
||||
"defaults": {},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 8
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 3,
|
||||
"legend": {
|
||||
"alignAsTable": true,
|
||||
"avg": true,
|
||||
"current": false,
|
||||
"max": true,
|
||||
"min": true,
|
||||
"rightSide": true,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": true
|
||||
},
|
||||
"lines": false,
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"alertThreshold": true
|
||||
},
|
||||
"percentage": false,
|
||||
"pluginVersion": "7.5.5",
|
||||
"pointradius": 5,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"exemplar": true,
|
||||
"expr": "sum(rate(traefik_service_requests_total{service=\"$service\"}[5m]))",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "Total requests $service",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Total requests over 5min $service",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"collapsed": false,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 15
|
||||
},
|
||||
"id": 12,
|
||||
"panels": [],
|
||||
"title": "Global stats",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": true,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "${DS_PROMETHEUS}",
|
||||
"fieldConfig": {
|
||||
"defaults": {},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 16
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 5,
|
||||
"legend": {
|
||||
"alignAsTable": true,
|
||||
"avg": false,
|
||||
"current": true,
|
||||
"max": true,
|
||||
"min": true,
|
||||
"rightSide": true,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": true
|
||||
},
|
||||
"lines": false,
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"alertThreshold": true
|
||||
},
|
||||
"percentage": false,
|
||||
"pluginVersion": "7.5.5",
|
||||
"pointradius": 5,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": true,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"expr": "rate(traefik_entrypoint_requests_total{entrypoint=~\"$entrypoint\",code=\"200\"}[5m])",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "{{method}} : {{code}}",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Status code 200 over 5min",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": true,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "${DS_PROMETHEUS}",
|
||||
"fieldConfig": {
|
||||
"defaults": {},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 16
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 6,
|
||||
"legend": {
|
||||
"alignAsTable": true,
|
||||
"avg": false,
|
||||
"current": true,
|
||||
"max": true,
|
||||
"min": true,
|
||||
"rightSide": true,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": true
|
||||
},
|
||||
"lines": false,
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"alertThreshold": true
|
||||
},
|
||||
"percentage": false,
|
||||
"pluginVersion": "7.5.5",
|
||||
"pointradius": 5,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": true,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"expr": "rate(traefik_entrypoint_requests_total{entrypoint=~\"$entrypoint\",code!=\"200\"}[5m])",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "{{ method }} : {{code}}",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Others status code over 5min",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"cacheTimeout": null,
|
||||
"datasource": "${DS_PROMETHEUS}",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"decimals": 0,
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 23
|
||||
},
|
||||
"id": 7,
|
||||
"interval": null,
|
||||
"links": [],
|
||||
"maxDataPoints": 3,
|
||||
"options": {
|
||||
"displayLabels": [],
|
||||
"legend": {
|
||||
"calcs": [],
|
||||
"displayMode": "table",
|
||||
"placement": "right",
|
||||
"values": [
|
||||
"value"
|
||||
]
|
||||
},
|
||||
"pieType": "pie",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"sum"
|
||||
],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"text": {}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"exemplar": true,
|
||||
"expr": "sum(rate(traefik_service_requests_total[5m])) by (service) ",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "{{ service }}",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Requests by service",
|
||||
"type": "piechart"
|
||||
},
|
||||
{
|
||||
"cacheTimeout": null,
|
||||
"datasource": "${DS_PROMETHEUS}",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"decimals": 0,
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 23
|
||||
},
|
||||
"id": 8,
|
||||
"interval": null,
|
||||
"links": [],
|
||||
"maxDataPoints": 3,
|
||||
"options": {
|
||||
"displayLabels": [],
|
||||
"legend": {
|
||||
"calcs": [],
|
||||
"displayMode": "table",
|
||||
"placement": "right",
|
||||
"values": [
|
||||
"value"
|
||||
]
|
||||
},
|
||||
"pieType": "pie",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"sum"
|
||||
],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"text": {}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"exemplar": true,
|
||||
"expr": "sum(rate(traefik_entrypoint_requests_total{entrypoint =~ \"$entrypoint\"}[5m])) by (entrypoint) ",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "{{ entrypoint }}",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Requests by protocol",
|
||||
"type": "piechart"
|
||||
}
|
||||
],
|
||||
"schemaVersion": 27,
|
||||
"style": "dark",
|
||||
"tags": [
|
||||
"traefik",
|
||||
"prometheus"
|
||||
],
|
||||
"templating": {
|
||||
"list": [
|
||||
{
|
||||
"allValue": null,
|
||||
"current": {},
|
||||
"datasource": "${DS_PROMETHEUS}",
|
||||
"definition": "label_values(service)",
|
||||
"description": null,
|
||||
"error": null,
|
||||
"hide": 0,
|
||||
"includeAll": false,
|
||||
"label": null,
|
||||
"multi": false,
|
||||
"name": "service",
|
||||
"options": [],
|
||||
"query": {
|
||||
"query": "label_values(service)",
|
||||
"refId": "StandardVariableQuery"
|
||||
},
|
||||
"refresh": 1,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"sort": 0,
|
||||
"tagValuesQuery": "",
|
||||
"tags": [],
|
||||
"tagsQuery": "",
|
||||
"type": "query",
|
||||
"useTags": false
|
||||
},
|
||||
{
|
||||
"allValue": null,
|
||||
"current": {},
|
||||
"datasource": "${DS_PROMETHEUS}",
|
||||
"definition": "",
|
||||
"description": null,
|
||||
"error": null,
|
||||
"hide": 0,
|
||||
"includeAll": true,
|
||||
"label": null,
|
||||
"multi": true,
|
||||
"name": "entrypoint",
|
||||
"options": [],
|
||||
"query": {
|
||||
"query": "label_values(entrypoint)",
|
||||
"refId": "Prometheus-entrypoint-Variable-Query"
|
||||
},
|
||||
"refresh": 1,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"sort": 0,
|
||||
"tagValuesQuery": "",
|
||||
"tags": [],
|
||||
"tagsQuery": "",
|
||||
"type": "query",
|
||||
"useTags": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"time": {
|
||||
"from": "now-1h",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {
|
||||
"refresh_intervals": [
|
||||
"5s",
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m",
|
||||
"15m",
|
||||
"30m",
|
||||
"1h",
|
||||
"2h",
|
||||
"1d"
|
||||
],
|
||||
"time_options": [
|
||||
"5m",
|
||||
"15m",
|
||||
"1h",
|
||||
"6h",
|
||||
"12h",
|
||||
"24h",
|
||||
"2d",
|
||||
"7d",
|
||||
"30d"
|
||||
]
|
||||
},
|
||||
"timezone": "",
|
||||
"title": "Traefik",
|
||||
"uid": "qPdAviJmz",
|
||||
"version": 10
|
||||
}
|
@ -5,4 +5,4 @@ providers:
|
||||
type: file
|
||||
disableDeletion: false
|
||||
options:
|
||||
path: {{ env "NOMAD_ALLOC_DIR" }}/config/provisioning/dashboards/default
|
||||
path: /etc/grafana/provisioning/dashboards/default
|
27
core/metrics/metrics.tf
Normal file
27
core/metrics/metrics.tf
Normal file
@ -0,0 +1,27 @@
|
||||
resource "nomad_job" "exporters" {
|
||||
hcl2 {
|
||||
enabled = true
|
||||
}
|
||||
|
||||
jobspec = file("${path.module}/exporters.nomad")
|
||||
}
|
||||
|
||||
resource "nomad_job" "prometheus" {
|
||||
hcl2 {
|
||||
enabled = true
|
||||
}
|
||||
|
||||
jobspec = file("${path.module}/prometheus.nomad")
|
||||
}
|
||||
|
||||
resource "nomad_job" "grafana" {
|
||||
hcl2 {
|
||||
enabled = true
|
||||
}
|
||||
|
||||
jobspec = templatefile("${path.module}/grafana.nomad", {
|
||||
module_path = path.module
|
||||
})
|
||||
|
||||
depends_on = [nomad_job.prometheus]
|
||||
}
|
@ -8,16 +8,12 @@ job "prometheus" {
|
||||
mode = "bridge"
|
||||
|
||||
port "web" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
to = 9090
|
||||
}
|
||||
|
||||
port "pushgateway" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
static = 9091
|
||||
}
|
||||
}
|
||||
@ -37,36 +33,12 @@ job "prometheus" {
|
||||
"traefik.enable=true",
|
||||
"traefik.http.routers.prometheus.entryPoints=websecure",
|
||||
]
|
||||
|
||||
check {
|
||||
type = "http"
|
||||
path = "/-/healthy"
|
||||
interval = "10s"
|
||||
timeout = "3s"
|
||||
|
||||
check_restart {
|
||||
limit = 3
|
||||
grace = "5m"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
service {
|
||||
name = "pushgateway"
|
||||
provider = "nomad"
|
||||
port = "pushgateway"
|
||||
|
||||
check {
|
||||
type = "http"
|
||||
path = "/-/healthy"
|
||||
interval = "10s"
|
||||
timeout = "3s"
|
||||
|
||||
check_restart {
|
||||
limit = 3
|
||||
grace = "5m"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task "prometheus" {
|
||||
@ -76,8 +48,8 @@ job "prometheus" {
|
||||
image = "prom/prometheus:v2.43.0"
|
||||
ports = ["web"]
|
||||
args = [
|
||||
"--config.file=$${NOMAD_TASK_DIR}/prometheus.yml",
|
||||
"--storage.tsdb.path=$${NOMAD_ALLOC_DIR}/data/tsdb",
|
||||
"--config.file=${NOMAD_TASK_DIR}/prometheus.yml",
|
||||
"--storage.tsdb.path=${NOMAD_ALLOC_DIR}/data/tsdb",
|
||||
"--web.listen-address=0.0.0.0:9090",
|
||||
"--web.console.libraries=/usr/share/prometheus/console_libraries",
|
||||
"--web.console.templates=/usr/share/prometheus/consoles",
|
||||
@ -140,7 +112,7 @@ scrape_configs:
|
||||
EOF
|
||||
change_mode = "signal"
|
||||
change_signal = "SIGHUP"
|
||||
destination = "$${NOMAD_TASK_DIR}/prometheus.yml"
|
||||
destination = "${NOMAD_TASK_DIR}/prometheus.yml"
|
||||
}
|
||||
|
||||
resources {
|
||||
@ -156,7 +128,7 @@ scrape_configs:
|
||||
image = "prom/pushgateway"
|
||||
ports = ["pushgateway"]
|
||||
args = [
|
||||
"--persistence.file=$${NOMAD_ALLOC_DIR}/pushgateway-persistence",
|
||||
"--persistence.file=${NOMAD_ALLOC_DIR}/pushgateway-persistence",
|
||||
]
|
||||
}
|
||||
|
@ -24,8 +24,7 @@ job "nomad-client-stalker" {
|
||||
|
||||
resources {
|
||||
cpu = 10
|
||||
memory = 15
|
||||
memory_max = 30
|
||||
memory = 10
|
||||
}
|
||||
}
|
||||
}
|
||||
|
40
core/oidc_client/.terraform.lock.hcl
generated
40
core/oidc_client/.terraform.lock.hcl
generated
@ -1,40 +0,0 @@
|
||||
# This file is maintained automatically by "terraform init".
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.terraform.io/hashicorp/nomad" {
|
||||
version = "2.3.1"
|
||||
hashes = [
|
||||
"h1:lMueBNB2GJ/a5rweL9NPybwVfDH/Q1s+rQvt5Y+kuYs=",
|
||||
"zh:1e7893a3fbebff171bcc5581b70a16eea33193c7e9dd73402ba5c04b7202f0bb",
|
||||
"zh:252cfd3fee4811c83bc74406ba1bc1bbb83d6de20e50a86f93737f8f86864171",
|
||||
"zh:387a7140be6dfa3f8d27f09d1eb2b9f3b84900328fe5a0478e9b3bd91a845808",
|
||||
"zh:49848fa491ac26b0568b112a57d14cc49772607c7cf405e2f74dd537407214b1",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:7b9f345f5bb5f17c5d0bc3d373c25828934a3cbcdb331e0eab54eb47f1355fb2",
|
||||
"zh:8e276f4de508a86e725fffc02ee891db73397c35dbd591d8918af427eeec93a1",
|
||||
"zh:90b349933d2fd28f822a36128be4625bb816aa9f20ec314c79c77306f632ae87",
|
||||
"zh:a0ca6fd6cd94a52684e432104d3dc170a74075f47d9d4ba725cc340a438ed75a",
|
||||
"zh:a6cffc45535a0ff8206782538b3eeaef17dc93d0e1fd58bc1e6f7d5aa0f6ba1a",
|
||||
"zh:c010807b5d3e03d769419787b0e5d4efa6963134e1873a413102af6bf3dd1c49",
|
||||
"zh:faf962ee1981e897e99f7e528642c7e74beed37afd8eaf743e6ede24df812d80",
|
||||
]
|
||||
}
|
||||
|
||||
provider "registry.terraform.io/hashicorp/random" {
|
||||
version = "3.6.2"
|
||||
hashes = [
|
||||
"h1:wmG0QFjQ2OfyPy6BB7mQ57WtoZZGGV07uAPQeDmIrAE=",
|
||||
"zh:0ef01a4f81147b32c1bea3429974d4d104bbc4be2ba3cfa667031a8183ef88ec",
|
||||
"zh:1bcd2d8161e89e39886119965ef0f37fcce2da9c1aca34263dd3002ba05fcb53",
|
||||
"zh:37c75d15e9514556a5f4ed02e1548aaa95c0ecd6ff9af1119ac905144c70c114",
|
||||
"zh:4210550a767226976bc7e57d988b9ce48f4411fa8a60cd74a6b246baf7589dad",
|
||||
"zh:562007382520cd4baa7320f35e1370ffe84e46ed4e2071fdc7e4b1a9b1f8ae9b",
|
||||
"zh:5efb9da90f665e43f22c2e13e0ce48e86cae2d960aaf1abf721b497f32025916",
|
||||
"zh:6f71257a6b1218d02a573fc9bff0657410404fb2ef23bc66ae8cd968f98d5ff6",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:9647e18f221380a85f2f0ab387c68fdafd58af6193a932417299cdcae4710150",
|
||||
"zh:bb6297ce412c3c2fa9fec726114e5e0508dd2638cad6a0cb433194930c97a544",
|
||||
"zh:f83e925ed73ff8a5ef6e3608ad9225baa5376446349572c2449c0c0b3cf184b7",
|
||||
"zh:fbef0781cb64de76b1df1ca11078aecba7800d82fd4a956302734999cfd9a4af",
|
||||
]
|
||||
}
|
@ -1,50 +0,0 @@
|
||||
resource "random_password" "oidc_client_id" {
|
||||
length = 72
|
||||
override_special = "-._~"
|
||||
}
|
||||
|
||||
resource "random_password" "oidc_secret" {
|
||||
length = 72
|
||||
override_special = "-._~"
|
||||
}
|
||||
|
||||
resource "nomad_variable" "authelia_oidc_secret" {
|
||||
path = "secrets/authelia/${var.name}"
|
||||
items = {
|
||||
client_id = resource.random_password.oidc_client_id.result
|
||||
secret = resource.random_password.oidc_secret.result
|
||||
secret_hash = resource.random_password.oidc_secret.bcrypt_hash
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_variable" "authelia_access_control_oidc" {
|
||||
path = "authelia/access_control/oidc_clients/${var.name}"
|
||||
items = {
|
||||
id = resource.random_password.oidc_client_id.result
|
||||
description = var.oidc_client_config.description
|
||||
authorization_policy = var.oidc_client_config.authorization_policy
|
||||
redirect_uris = yamlencode(var.oidc_client_config.redirect_uris)
|
||||
scopes = yamlencode(var.oidc_client_config.scopes)
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "oidc_authelia" {
|
||||
count = var.job_acl != null ? 1 : 0
|
||||
name = "${var.name}-authelia"
|
||||
description = "Give access to shared authelia variables"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/authelia/${var.name}" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
job_id = var.job_acl.job_id
|
||||
group = var.job_acl.group
|
||||
task = var.job_acl.task
|
||||
}
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
output "client_id" {
|
||||
value = resource.random_password.oidc_client_id.result
|
||||
}
|
||||
|
||||
output "secret" {
|
||||
value = resource.random_password.oidc_secret.result
|
||||
}
|
||||
|
||||
output "secret_hash" {
|
||||
value = resource.random_password.oidc_secret.bcrypt_hash
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
variable "name" {
|
||||
description = "Name of service"
|
||||
type = string
|
||||
}
|
||||
|
||||
variable "oidc_client_config" {
|
||||
description = "Authelia oidc client configuration to enable oidc authentication"
|
||||
type = object({
|
||||
description = string
|
||||
authorization_policy = optional(string, "one_factor")
|
||||
redirect_uris = list(string)
|
||||
scopes = list(string)
|
||||
})
|
||||
}
|
||||
|
||||
variable "job_acl" {
|
||||
description = "Job ACL that should be given to the secrets"
|
||||
type = object({
|
||||
job_id = string
|
||||
group = optional(string)
|
||||
task = optional(string)
|
||||
})
|
||||
|
||||
default = null
|
||||
}
|
@ -1,7 +0,0 @@
|
||||
resource "nomad_job" "prometheus" {
|
||||
jobspec = templatefile("${path.module}/prometheus.nomad", {
|
||||
use_wesher = var.use_wesher,
|
||||
})
|
||||
|
||||
detach = false
|
||||
}
|
@ -26,8 +26,14 @@ job "syslogng" {
|
||||
task "promtail" {
|
||||
driver = "docker"
|
||||
|
||||
meta = {
|
||||
"diun.sort_tags" = "semver"
|
||||
"diun.watch_repo" = true
|
||||
"diun.include_tags" = "^[0-9]+\\.[0-9]+\\.[0-9]+$"
|
||||
}
|
||||
|
||||
config {
|
||||
image = "grafana/promtail:3.3.0"
|
||||
image = "grafana/promtail:2.7.1"
|
||||
ports = ["main", "metrics"]
|
||||
args = ["--config.file=/etc/promtail/promtail.yml"]
|
||||
|
||||
@ -66,7 +72,7 @@ EOF
|
||||
|
||||
resources {
|
||||
cpu = 50
|
||||
memory = 50
|
||||
memory = 20
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -130,7 +136,7 @@ EOF
|
||||
|
||||
resources {
|
||||
cpu = 50
|
||||
memory = 50
|
||||
memory = 10
|
||||
}
|
||||
}
|
||||
}
|
||||
|
26
core/traefik/.terraform.lock.hcl
generated
26
core/traefik/.terraform.lock.hcl
generated
@ -2,20 +2,20 @@
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.terraform.io/hashicorp/nomad" {
|
||||
version = "2.1.0"
|
||||
version = "1.4.17"
|
||||
hashes = [
|
||||
"h1:ek0L7fA+4R1/BXhbutSRqlQPzSZ5aY/I2YfVehuYeEU=",
|
||||
"zh:39ba4d4fc9557d4d2c1e4bf866cf63973359b73e908cce237c54384512bdb454",
|
||||
"zh:40d2b66e3f3675e6b88000c145977c1d5288510c76b702c6c131d9168546c605",
|
||||
"zh:40fbe575d85a083f96d4703c6b7334e9fc3e08e4f1d441de2b9513215184ebcc",
|
||||
"zh:42ce6db79e2f94557fae516ee3f22e5271f0b556638eb45d5fbad02c99fc7af3",
|
||||
"zh:4acf63dfb92f879b3767529e75764fef68886521b7effa13dd0323c38133ce88",
|
||||
"zh:72cf35a13c2fb542cd3c8528826e2390db9b8f6f79ccb41532e009ad140a3269",
|
||||
"h1:iPylWr144mqXvM8NBVMTm+MS6JRhqIihlpJG91GYDyA=",
|
||||
"zh:146f97eacd9a0c78b357a6cfd2cb12765d4b18e9660a75500ee3e748c6eba41a",
|
||||
"zh:2eb89a6e5cee9aea03a96ea9f141096fe3baf219b2700ce30229d2d882f5015f",
|
||||
"zh:3d0f971f79b615c1014c75e2f99f34bd4b4da542ca9f31d5ea7fadc4e9de39c1",
|
||||
"zh:46099a750c752ce05aa14d663a86478a5ad66d95aff3d69367f1d3628aac7792",
|
||||
"zh:71e56006b013dcfe1e4e059b2b07148b44fcd79351ae2c357e0d97e27ae0d916",
|
||||
"zh:74febd25d776688f0558178c2f5a0e6818bbf4cdaa2e160d7049da04103940f0",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:8b8bcc136c05916234cb0c3bcc3d48fda7ca551a091ad8461ea4ab16fb6960a3",
|
||||
"zh:8e1c2f924eae88afe7ac83775f000ae8fd71a04e06228edf7eddce4df2421169",
|
||||
"zh:abc6e725531fc06a8e02e84946aaabc3453ecafbc1b7a442ea175db14fd9c86a",
|
||||
"zh:b735fcd1fb20971df3e92f81bb6d73eef845dcc9d3d98e908faa3f40013f0f69",
|
||||
"zh:ce59797282505d872903789db8f092861036da6ec3e73f6507dac725458a5ec9",
|
||||
"zh:af18c064a5f0dd5422d6771939274841f635b619ab392c73d5bf9720945fdb85",
|
||||
"zh:c133d7a862079da9f06e301c530eacbd70e9288fa2276ec0704df907270ee328",
|
||||
"zh:c894cf98d239b9f5a4b7cde9f5c836face0b5b93099048ee817b0380ea439c65",
|
||||
"zh:c918642870f0cafdbe4d7dd07c909701fc3ddb47cac8357bdcde1327bf78c11d",
|
||||
"zh:f8f5655099a57b4b9c0018a2d49133771e24c7ff8262efb1ceb140fd224aa9b6",
|
||||
]
|
||||
}
|
||||
|
@ -1,3 +1,9 @@
|
||||
variable "base_hostname" {
|
||||
type = string
|
||||
description = "Base hostname to serve content from"
|
||||
default = "dev.homelab"
|
||||
}
|
||||
|
||||
job "traefik" {
|
||||
datacenters = ["dc1"]
|
||||
type = "service"
|
||||
@ -14,15 +20,13 @@ job "traefik" {
|
||||
|
||||
update {
|
||||
max_parallel = 1
|
||||
canary = 1
|
||||
auto_promote = false
|
||||
# canary = 1
|
||||
# auto_promote = true
|
||||
auto_revert = true
|
||||
min_healthy_time = "30s"
|
||||
healthy_deadline = "5m"
|
||||
}
|
||||
|
||||
group "traefik" {
|
||||
count = 2
|
||||
count = 1
|
||||
|
||||
network {
|
||||
port "web" {
|
||||
@ -37,17 +41,12 @@ job "traefik" {
|
||||
static = 514
|
||||
}
|
||||
|
||||
port "gitssh" {
|
||||
static = 2222
|
||||
}
|
||||
|
||||
port "metrics" {}
|
||||
|
||||
dns {
|
||||
servers = [
|
||||
"192.168.2.101",
|
||||
"192.168.2.102",
|
||||
"192.168.2.30",
|
||||
"192.168.2.170",
|
||||
]
|
||||
}
|
||||
}
|
||||
@ -57,42 +56,39 @@ job "traefik" {
|
||||
sticky = true
|
||||
}
|
||||
|
||||
service {
|
||||
name = "traefik"
|
||||
provider = "nomad"
|
||||
port = "web"
|
||||
|
||||
check {
|
||||
type = "http"
|
||||
path = "/ping"
|
||||
port = "web"
|
||||
interval = "10s"
|
||||
timeout = "2s"
|
||||
}
|
||||
|
||||
tags = [
|
||||
"traefik.enable=true",
|
||||
"traefik.http.routers.traefik.entryPoints=websecure",
|
||||
"traefik.http.routers.traefik.service=api@internal",
|
||||
]
|
||||
}
|
||||
|
||||
task "traefik" {
|
||||
driver = "docker"
|
||||
|
||||
service {
|
||||
name = "traefik"
|
||||
provider = "nomad"
|
||||
port = "web"
|
||||
|
||||
check {
|
||||
type = "http"
|
||||
path = "/ping"
|
||||
interval = "10s"
|
||||
timeout = "2s"
|
||||
}
|
||||
|
||||
tags = [
|
||||
"traefik.enable=true",
|
||||
"traefik.http.routers.traefik.entryPoints=websecure",
|
||||
"traefik.http.routers.traefik.service=api@internal",
|
||||
]
|
||||
}
|
||||
|
||||
service {
|
||||
name = "traefik-metrics"
|
||||
provider = "nomad"
|
||||
port = "metrics"
|
||||
|
||||
tags = [
|
||||
"prometheus.scrape",
|
||||
]
|
||||
meta = {
|
||||
"diun.sort_tags" = "semver"
|
||||
"diun.watch_repo" = true
|
||||
"diun.include_tags" = "^[0-9]+\\.[0-9]+$"
|
||||
}
|
||||
|
||||
config {
|
||||
image = "traefik:3.0"
|
||||
image = "traefik:2.9"
|
||||
|
||||
ports = ["web", "websecure", "syslog", "gitssh", "metrics"]
|
||||
ports = ["web", "websecure"]
|
||||
network_mode = "host"
|
||||
|
||||
mount {
|
||||
@ -106,20 +102,6 @@ job "traefik" {
|
||||
target = "/etc/traefik/usersfile"
|
||||
source = "secrets/usersfile"
|
||||
}
|
||||
|
||||
mount {
|
||||
type = "bind"
|
||||
target = "/etc/traefik/certs"
|
||||
source = "secrets/certs"
|
||||
}
|
||||
}
|
||||
|
||||
env = {
|
||||
TRAEFIK_PROVIDERS_NOMAD_ENDPOINT_TOKEN = "${NOMAD_TOKEN}"
|
||||
}
|
||||
|
||||
identity {
|
||||
env = true
|
||||
}
|
||||
|
||||
template {
|
||||
@ -142,9 +124,12 @@ job "traefik" {
|
||||
[entryPoints.websecure]
|
||||
address = ":443"
|
||||
[entryPoints.websecure.http.tls]
|
||||
certResolver = "letsEncrypt"
|
||||
[[entryPoints.websecure.http.tls.domains]]
|
||||
main = "*.<< with nomadVar "nomad/jobs" >><< .base_hostname >><< end >>"
|
||||
|
||||
[entryPoints.metrics]
|
||||
address = ":<< env "NOMAD_PORT_metrics" >>"
|
||||
address = ":8989"
|
||||
|
||||
[entryPoints.syslogtcp]
|
||||
address = ":514"
|
||||
@ -152,9 +137,6 @@ job "traefik" {
|
||||
[entryPoints.syslogudp]
|
||||
address = ":514/udp"
|
||||
|
||||
[entryPoints.gitssh]
|
||||
address = ":2222"
|
||||
|
||||
[api]
|
||||
dashboard = true
|
||||
|
||||
@ -174,9 +156,31 @@ job "traefik" {
|
||||
exposedByDefault = false
|
||||
defaultRule = "Host(`{{normalize .Name}}.<< with nomadVar "nomad/jobs" >><< .base_hostname >><< end >>`)"
|
||||
[providers.nomad.endpoint]
|
||||
address = "unix:///secrets/api.sock"
|
||||
address = "http://<< env "attr.unique.network.ip-address" >>:4646"
|
||||
|
||||
<< if nomadVarExists "nomad/jobs/traefik" ->>
|
||||
[certificatesResolvers.letsEncrypt.acme]
|
||||
email = "<< with nomadVar "nomad/jobs/traefik" >><< .acme_email >><< end >>"
|
||||
# Store in /local because /secrets doesn't persist with ephemeral disk
|
||||
storage = "/local/acme.json"
|
||||
[certificatesResolvers.letsEncrypt.acme.dnsChallenge]
|
||||
provider = "cloudflare"
|
||||
resolvers = ["1.1.1.1:53", "8.8.8.8:53"]
|
||||
delayBeforeCheck = 0
|
||||
<<- end >>
|
||||
EOH
|
||||
destination = "${NOMAD_TASK_DIR}/config/traefik.toml"
|
||||
destination = "local/config/traefik.toml"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOH
|
||||
{{ with nomadVar "nomad/jobs/traefik" -}}
|
||||
CF_DNS_API_TOKEN={{ .domain_lego_dns }}
|
||||
CF_ZONE_API_TOKEN={{ .domain_lego_dns }}
|
||||
{{- end }}
|
||||
EOH
|
||||
destination = "secrets/cloudflare.env"
|
||||
env = true
|
||||
}
|
||||
|
||||
template {
|
||||
@ -187,48 +191,23 @@ job "traefik" {
|
||||
entryPoints = ["websecure"]
|
||||
service = "nomad"
|
||||
rule = "Host(`nomad.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}`)"
|
||||
|
||||
{{ range nomadVarList "traefik_external" }}{{ with nomadVar .Path }}
|
||||
[http.routers.{{ .name }}]
|
||||
[http.routers.hass]
|
||||
entryPoints = ["websecure"]
|
||||
service = "{{ .name }}"
|
||||
rule = "Host(`{{ .subdomain }}.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}`){{ with .path_prefix.Value }}&&PathPrefix(`{{ . }}`){{ end }}"
|
||||
{{ $name := .name -}}
|
||||
{{ with .path_prefix.Value -}}
|
||||
middlewares = ["{{ $name }}@file"]
|
||||
{{ end }}
|
||||
{{- end }}{{ end }}
|
||||
|
||||
#[http.middlewares]
|
||||
# {{ range nomadVarList "traefik_external" }}{{ with nomadVar .Path -}}
|
||||
# {{ $name := .name -}}
|
||||
# {{ with .path_prefix.Value -}}
|
||||
# [http.middlewares.{{ $name }}.stripPrefix]
|
||||
# prefixes = ["{{ . }}"]
|
||||
# {{ end }}
|
||||
# {{- end }}{{ end }}
|
||||
service = "hass"
|
||||
rule = "Host(`hass.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}`)"
|
||||
|
||||
[http.services]
|
||||
[http.services.nomad]
|
||||
[http.services.nomad.loadBalancer]
|
||||
[[http.services.nomad.loadBalancer.servers]]
|
||||
url = "http://127.0.0.1:4646"
|
||||
|
||||
{{ range nomadVarList "traefik_external" }}{{ with nomadVar .Path }}
|
||||
[http.services.{{ .name }}]
|
||||
[http.services.{{ .name }}.loadBalancer]
|
||||
[[http.services.{{ .name }}.loadBalancer.servers]]
|
||||
url = "{{ .url }}"
|
||||
{{- end }}{{ end }}
|
||||
[http.services.hass]
|
||||
[http.services.hass.loadBalancer]
|
||||
[[http.services.hass.loadBalancer.servers]]
|
||||
url = "http://192.168.3.65:8123"
|
||||
EOH
|
||||
destination = "${NOMAD_TASK_DIR}/config/conf/route-hashi.toml"
|
||||
destination = "local/config/conf/route-hashi.toml"
|
||||
change_mode = "noop"
|
||||
splay = "1m"
|
||||
|
||||
wait {
|
||||
min = "10s"
|
||||
max = "20s"
|
||||
}
|
||||
}
|
||||
|
||||
template {
|
||||
@ -264,39 +243,7 @@ job "traefik" {
|
||||
{{ end -}}
|
||||
{{- end }}
|
||||
EOH
|
||||
destination = "${NOMAD_TASK_DIR}/config/conf/route-syslog-ng.toml"
|
||||
change_mode = "noop"
|
||||
splay = "1m"
|
||||
|
||||
wait {
|
||||
min = "10s"
|
||||
max = "20s"
|
||||
}
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{- with nomadVar "secrets/certs/_lego/certificates/__thefij_rocks_crt" }}{{ .contents }}{{ end -}}"
|
||||
EOF
|
||||
destination = "${NOMAD_SECRETS_DIR}/certs/_.thefij.rocks.crt"
|
||||
change_mode = "noop"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{- with nomadVar "secrets/certs/_lego/certificates/__thefij_rocks_key" }}{{ .contents }}{{ end -}}"
|
||||
EOF
|
||||
destination = "${NOMAD_SECRETS_DIR}/certs/_.thefij.rocks.key"
|
||||
change_mode = "noop"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOH
|
||||
[[tls.certificates]]
|
||||
certFile = "/etc/traefik/certs/_.thefij.rocks.crt"
|
||||
keyFile = "/etc/traefik/certs/_.thefij.rocks.key"
|
||||
EOH
|
||||
destination = "${NOMAD_TASK_DIR}/config/conf/dynamic-tls.toml"
|
||||
destination = "local/config/conf/route-syslog-ng.toml"
|
||||
change_mode = "noop"
|
||||
}
|
||||
|
||||
@ -306,11 +253,12 @@ EOF
|
||||
{{ with nomadVar "nomad/jobs/traefik" }}
|
||||
{{ if .usersfile }}
|
||||
[http.middlewares.basic-auth.basicAuth]
|
||||
# TODO: Reference secrets mount
|
||||
usersFile = "/etc/traefik/usersfile"
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
EOH
|
||||
destination = "${NOMAD_TASK_DIR}/config/conf/middlewares.toml"
|
||||
destination = "local/config/conf/middlewares.toml"
|
||||
change_mode = "noop"
|
||||
}
|
||||
|
||||
@ -320,13 +268,14 @@ EOF
|
||||
{{ .usersfile }}
|
||||
{{- end }}
|
||||
EOH
|
||||
destination = "${NOMAD_SECRETS_DIR}/usersfile"
|
||||
destination = "secrets/usersfile"
|
||||
change_mode = "noop"
|
||||
}
|
||||
|
||||
resources {
|
||||
cpu = 100
|
||||
memory = 150
|
||||
memory = 100
|
||||
memory_max = 500
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,90 +1,16 @@
|
||||
variable "base_hostname" {
|
||||
type = string
|
||||
description = "Base hostname to serve content from"
|
||||
default = "dev.homelab"
|
||||
}
|
||||
|
||||
resource "nomad_job" "traefik" {
|
||||
hcl2 {
|
||||
enabled = true
|
||||
vars = {
|
||||
"base_hostname" = var.base_hostname,
|
||||
}
|
||||
}
|
||||
|
||||
jobspec = file("${path.module}/traefik.nomad")
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "treafik_secrets_certs_read" {
|
||||
name = "traefik-secrets-certs-read"
|
||||
description = "Read certs to secrets store"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/certs/*" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
path "secrets/certs" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
job_acl {
|
||||
job_id = resource.nomad_job.traefik.id
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "traefik_query_jobs" {
|
||||
name = "traefik-query-jobs"
|
||||
description = "Allow traefik to query jobs"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
capabilities = ["list-jobs", "read-job"]
|
||||
}
|
||||
EOH
|
||||
job_acl {
|
||||
job_id = resource.nomad_job.traefik.id
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "treafik_external" {
|
||||
name = "traefik-exernal"
|
||||
description = "Read external services"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "traefik_external/*" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
job_acl {
|
||||
job_id = "traefik"
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_variable" "traefik_external_hass" {
|
||||
path = "traefik_external/hass"
|
||||
items = {
|
||||
name = "hass"
|
||||
subdomain = "hass",
|
||||
url = "http://192.168.3.65:8123"
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_variable" "traefik_external_plex" {
|
||||
path = "traefik_external/plex"
|
||||
items = {
|
||||
name = "plex"
|
||||
subdomain = "plex",
|
||||
url = "http://agnosticfront.thefij:32400"
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_variable" "traefik_external_appdaemon" {
|
||||
path = "traefik_external/appdaemon"
|
||||
items = {
|
||||
name = "appdaemon"
|
||||
subdomain = "appdash",
|
||||
url = "http://192.168.3.65:5050"
|
||||
# path_prefix = "/add"
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_variable" "traefik_external_jellyfin" {
|
||||
path = "traefik_external/jellyfin"
|
||||
items = {
|
||||
name = "jellyfin"
|
||||
subdomain = "jellyfin",
|
||||
url = "http://agnosticfront.thefij:8096"
|
||||
}
|
||||
}
|
||||
|
@ -3,9 +3,3 @@ variable "base_hostname" {
|
||||
description = "Base hostname to serve content from"
|
||||
default = "dev.homelab"
|
||||
}
|
||||
|
||||
variable "use_wesher" {
|
||||
type = bool
|
||||
description = "Indicates whether or not services should expose themselves on the wesher network"
|
||||
default = true
|
||||
}
|
||||
|
58
databases/.terraform.lock.hcl
generated
58
databases/.terraform.lock.hcl
generated
@ -1,40 +1,40 @@
|
||||
# This file is maintained automatically by "terraform init".
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.terraform.io/hashicorp/nomad" {
|
||||
version = "2.0.0"
|
||||
provider "registry.terraform.io/hashicorp/consul" {
|
||||
version = "2.15.1"
|
||||
hashes = [
|
||||
"h1:lIHIxA6ZmfyTGL3J9YIddhxlfit4ipSS09BLxkwo6L0=",
|
||||
"zh:09b897d64db293f9a904a4a0849b11ec1e3fff5c638f734d82ae36d8dc044b72",
|
||||
"zh:435cc106799290f64078ec24b6c59cb32b33784d609088638ed32c6d12121199",
|
||||
"zh:7073444bd064e8c4ec115ca7d9d7f030cc56795c0a83c27f6668bba519e6849a",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:79d238c35d650d2d83a439716182da63f3b2767e72e4cbd0b69cb13d9b1aebfc",
|
||||
"zh:7ef5f49344278fe0bbc5447424e6aa5425ff1821d010d944a444d7fa2c751acf",
|
||||
"zh:92179091638c8ba03feef371c4361a790190f9955caea1fa59de2055c701a251",
|
||||
"zh:a8a34398851761368eb8e7c171f24e55efa6e9fdbb5c455f6dec34dc17f631bc",
|
||||
"zh:b38fd5338625ebace5a4a94cea1a28b11bd91995d834e318f47587cfaf6ec599",
|
||||
"zh:b71b273a2aca7ad5f1e07c767b25b5a888881ba9ca93b30044ccc39c2937f03c",
|
||||
"zh:cd14357e520e0f09fb25badfb4f2ee37d7741afdc3ed47c7bcf54c1683772543",
|
||||
"zh:e05e025f4bb95138c3c8a75c636e97cd7cfd2fc1525b0c8bd097db8c5f02df6e",
|
||||
"h1:PexyQBRLDA+SR+sWlzYBZswry5O5h/tTfj87CaECtLc=",
|
||||
"zh:1806830a3cf103e65e772a7d28fd4df2788c29a029fb2def1326bc777ad107ed",
|
||||
"zh:252be544fb4c9daf09cad7d3776daf5fa66b62740d3ea9d6d499a7b1697c3433",
|
||||
"zh:50985fe02a8e5ae47c75d7c28c911b25d7dc4716cff2ed55ca05889ab77a1f73",
|
||||
"zh:54cf0ec90538703c66937c77e8d72a38d5af47437eb0b8b55eb5836c5d288878",
|
||||
"zh:704f536c621337e06fffef6d5f49ac81f52d249f937250527c12884cb83aefed",
|
||||
"zh:896d8ef6d0b555299f124eb25bce8a17d735da14ef21f07582098d301f47da30",
|
||||
"zh:976277a85b0a0baafe267cc494f766448d1da5b6936ddcb3ce393bd4d22f08d2",
|
||||
"zh:c7faa9a2b11bc45833a3e8e340f22f1ecf01597eaeffa7669234b4549d7dfa85",
|
||||
"zh:caf851ef9c8ce482864badf7058f9278d4537112fa236efd8f1a9315801d9061",
|
||||
"zh:db203435d58b0ac842540861b3307a623423275d85754c171773f3b210ae5b24",
|
||||
"zh:f3d3efac504c9484a025beb919d22b290aa6dbff256f6e86c1f8ce7817e077e5",
|
||||
"zh:f710a37190429045d109edd35de69db3b5f619919c2fa04c77a3a639fea9fd7d",
|
||||
]
|
||||
}
|
||||
|
||||
provider "registry.terraform.io/hashicorp/random" {
|
||||
version = "3.5.1"
|
||||
provider "registry.terraform.io/hashicorp/nomad" {
|
||||
version = "1.4.17"
|
||||
hashes = [
|
||||
"h1:VSnd9ZIPyfKHOObuQCaKfnjIHRtR7qTw19Rz8tJxm+k=",
|
||||
"zh:04e3fbd610cb52c1017d282531364b9c53ef72b6bc533acb2a90671957324a64",
|
||||
"zh:119197103301ebaf7efb91df8f0b6e0dd31e6ff943d231af35ee1831c599188d",
|
||||
"zh:4d2b219d09abf3b1bb4df93d399ed156cadd61f44ad3baf5cf2954df2fba0831",
|
||||
"zh:6130bdde527587bbe2dcaa7150363e96dbc5250ea20154176d82bc69df5d4ce3",
|
||||
"zh:6cc326cd4000f724d3086ee05587e7710f032f94fc9af35e96a386a1c6f2214f",
|
||||
"h1:iPylWr144mqXvM8NBVMTm+MS6JRhqIihlpJG91GYDyA=",
|
||||
"zh:146f97eacd9a0c78b357a6cfd2cb12765d4b18e9660a75500ee3e748c6eba41a",
|
||||
"zh:2eb89a6e5cee9aea03a96ea9f141096fe3baf219b2700ce30229d2d882f5015f",
|
||||
"zh:3d0f971f79b615c1014c75e2f99f34bd4b4da542ca9f31d5ea7fadc4e9de39c1",
|
||||
"zh:46099a750c752ce05aa14d663a86478a5ad66d95aff3d69367f1d3628aac7792",
|
||||
"zh:71e56006b013dcfe1e4e059b2b07148b44fcd79351ae2c357e0d97e27ae0d916",
|
||||
"zh:74febd25d776688f0558178c2f5a0e6818bbf4cdaa2e160d7049da04103940f0",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:b6d88e1d28cf2dfa24e9fdcc3efc77adcdc1c3c3b5c7ce503a423efbdd6de57b",
|
||||
"zh:ba74c592622ecbcef9dc2a4d81ed321c4e44cddf7da799faa324da9bf52a22b2",
|
||||
"zh:c7c5cde98fe4ef1143bd1b3ec5dc04baf0d4cc3ca2c5c7d40d17c0e9b2076865",
|
||||
"zh:dac4bad52c940cd0dfc27893507c1e92393846b024c5a9db159a93c534a3da03",
|
||||
"zh:de8febe2a2acd9ac454b844a4106ed295ae9520ef54dc8ed2faf29f12716b602",
|
||||
"zh:eab0d0495e7e711cca367f7d4df6e322e6c562fc52151ec931176115b83ed014",
|
||||
"zh:af18c064a5f0dd5422d6771939274841f635b619ab392c73d5bf9720945fdb85",
|
||||
"zh:c133d7a862079da9f06e301c530eacbd70e9288fa2276ec0704df907270ee328",
|
||||
"zh:c894cf98d239b9f5a4b7cde9f5c836face0b5b93099048ee817b0380ea439c65",
|
||||
"zh:c918642870f0cafdbe4d7dd07c909701fc3ddb47cac8357bdcde1327bf78c11d",
|
||||
"zh:f8f5655099a57b4b9c0018a2d49133771e24c7ff8262efb1ceb140fd224aa9b6",
|
||||
]
|
||||
}
|
||||
|
@ -1,123 +0,0 @@
|
||||
resource "nomad_job" "lldap" {
|
||||
jobspec = templatefile("${path.module}/lldap.nomad", {
|
||||
use_wesher = var.use_wesher,
|
||||
})
|
||||
|
||||
depends_on = [resource.nomad_job.mysql-server]
|
||||
|
||||
# Block until deployed as there are servics dependent on this one
|
||||
detach = false
|
||||
}
|
||||
|
||||
# Give access to ldap secrets
|
||||
resource "nomad_acl_policy" "lldap_ldap_secrets" {
|
||||
name = "lldap-secrets-ldap"
|
||||
description = "Give access to LDAP secrets"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/ldap/*" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
path "secrets/ldap" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
# job_id = resource.nomad_job.lldap.id
|
||||
job_id = "lldap"
|
||||
}
|
||||
}
|
||||
|
||||
# Create self-scoped psk so that config is valid at first start
|
||||
resource "random_password" "lldap_ldap_psk" {
|
||||
length = 32
|
||||
override_special = "!@#%&*-_="
|
||||
}
|
||||
|
||||
resource "nomad_variable" "lldap_ldap_psk" {
|
||||
path = "secrets/ldap/allowed_psks/ldap"
|
||||
items = {
|
||||
psk = "lldap:${resource.random_password.lldap_ldap_psk.result}"
|
||||
}
|
||||
}
|
||||
|
||||
# Give access to smtp secrets
|
||||
resource "nomad_acl_policy" "lldap_smtp_secrets" {
|
||||
name = "lldap-secrets-smtp"
|
||||
description = "Give access to SMTP secrets"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/smtp" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
# job_id = resource.nomad_job.lldap.id
|
||||
job_id = "lldap"
|
||||
group = "lldap"
|
||||
task = "lldap"
|
||||
}
|
||||
}
|
||||
|
||||
# Generate secrets and policies for access to MySQL
|
||||
resource "nomad_acl_policy" "lldap_mysql_bootstrap_secrets" {
|
||||
name = "lldap-secrets-mysql"
|
||||
description = "Give access to MySQL secrets"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/mysql" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
# job_id = resource.nomad_job.lldap.id
|
||||
job_id = "lldap"
|
||||
group = "lldap"
|
||||
task = "bootstrap"
|
||||
}
|
||||
}
|
||||
|
||||
resource "random_password" "lldap_mysql_psk" {
|
||||
length = 32
|
||||
override_special = "!@#%&*-_="
|
||||
}
|
||||
|
||||
resource "nomad_variable" "lldap_mysql_psk" {
|
||||
path = "secrets/mysql/allowed_psks/lldap"
|
||||
items = {
|
||||
psk = "lldap:${resource.random_password.lldap_mysql_psk.result}"
|
||||
}
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "lldap_mysql_psk" {
|
||||
name = "lldap-secrets-mysql-psk"
|
||||
description = "Give access to MySQL PSK secrets"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/mysql/allowed_psks/lldap" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
# job_id = resource.nomad_job.lldap.id
|
||||
job_id = "lldap"
|
||||
group = "lldap"
|
||||
task = "stunnel"
|
||||
}
|
||||
}
|
46
databases/main.tf
Normal file
46
databases/main.tf
Normal file
@ -0,0 +1,46 @@
|
||||
resource "nomad_job" "mysql-server" {
|
||||
hcl2 {
|
||||
enabled = true
|
||||
}
|
||||
|
||||
jobspec = file("${path.module}/mysql.nomad")
|
||||
|
||||
# Block until deployed as there are servics dependent on this one
|
||||
detach = false
|
||||
}
|
||||
|
||||
resource "nomad_job" "postgres-server" {
|
||||
hcl2 {
|
||||
enabled = true
|
||||
}
|
||||
|
||||
jobspec = file("${path.module}/postgres.nomad")
|
||||
|
||||
# Block until deployed as there are servics dependent on this one
|
||||
detach = false
|
||||
}
|
||||
|
||||
resource "nomad_job" "redis" {
|
||||
for_each = toset(["blocky", "authelia"])
|
||||
|
||||
hcl2 {
|
||||
enabled = true
|
||||
}
|
||||
|
||||
jobspec = templatefile("${path.module}/redis.nomad",
|
||||
{
|
||||
name = each.key,
|
||||
}
|
||||
)
|
||||
|
||||
# Block until deployed as there are servics dependent on this one
|
||||
detach = false
|
||||
}
|
||||
|
||||
resource "nomad_job" "rediscommander" {
|
||||
hcl2 {
|
||||
enabled = true
|
||||
}
|
||||
|
||||
jobspec = file("${path.module}/rediscommander.nomad")
|
||||
}
|
@ -3,10 +3,6 @@ job "mysql-server" {
|
||||
type = "service"
|
||||
priority = 80
|
||||
|
||||
update {
|
||||
auto_revert = true
|
||||
}
|
||||
|
||||
group "mysql-server" {
|
||||
count = 1
|
||||
|
||||
@ -77,7 +73,7 @@ MYSQL_ROOT_PASSWORD={{ .mysql_root_password }}
|
||||
|
||||
resources {
|
||||
cpu = 300
|
||||
memory = 1600
|
||||
memory = 1536
|
||||
}
|
||||
}
|
||||
|
||||
@ -85,9 +81,9 @@ MYSQL_ROOT_PASSWORD={{ .mysql_root_password }}
|
||||
driver = "docker"
|
||||
|
||||
config {
|
||||
image = "iamthefij/stunnel:1.0.0"
|
||||
args = ["${NOMAD_TASK_DIR}/stunnel.conf"]
|
||||
image = "alpine:3.17"
|
||||
ports = ["tls"]
|
||||
args = ["/bin/sh", "${NOMAD_TASK_DIR}/start.sh"]
|
||||
}
|
||||
|
||||
resources {
|
||||
@ -95,6 +91,15 @@ MYSQL_ROOT_PASSWORD={{ .mysql_root_password }}
|
||||
memory = 100
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
set -e
|
||||
apk add stunnel
|
||||
exec stunnel ${NOMAD_TASK_DIR}/stunnel.conf
|
||||
EOF
|
||||
destination = "${NOMAD_TASK_DIR}/start.sh"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
syslog = no
|
||||
@ -112,9 +117,9 @@ PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/stunnel_psk.txt
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{ range nomadVarList "secrets/mysql/allowed_psks" -}}
|
||||
{{ with nomadVar .Path }}{{ .psk }}{{ end }}
|
||||
{{ end -}}
|
||||
{{ with nomadVar "nomad/jobs/mysql-server" -}}
|
||||
{{ .allowed_psks }}
|
||||
{{- end }}
|
||||
EOF
|
||||
destination = "${NOMAD_SECRETS_DIR}/stunnel_psk.txt"
|
||||
}
|
||||
|
@ -1,41 +0,0 @@
|
||||
resource "nomad_job" "mysql-server" {
|
||||
jobspec = file("${path.module}/mysql.nomad")
|
||||
|
||||
# Block until deployed as there are servics dependent on this one
|
||||
detach = false
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "secrets_mysql" {
|
||||
name = "secrets-mysql"
|
||||
description = "Give access to MySQL secrets"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/mysql" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
path "secrets/mysql/*" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
# job_id = resource.nomad_job.mysql-server.id
|
||||
job_id = "mysql-server"
|
||||
}
|
||||
}
|
||||
|
||||
# Create self-scoped psk so that config is valid at first start
|
||||
resource "random_password" "mysql_mysql_psk" {
|
||||
length = 32
|
||||
override_special = "!@#%&*-_="
|
||||
}
|
||||
|
||||
resource "nomad_variable" "mysql_mysql_psk" {
|
||||
path = "secrets/mysql/allowed_psks/mysql"
|
||||
items = {
|
||||
psk = "mysql:${resource.random_password.mysql_mysql_psk.result}"
|
||||
}
|
||||
}
|
@ -3,10 +3,6 @@ job "postgres-server" {
|
||||
type = "service"
|
||||
priority = 80
|
||||
|
||||
update {
|
||||
auto_revert = true
|
||||
}
|
||||
|
||||
group "postgres-server" {
|
||||
count = 1
|
||||
|
||||
@ -21,16 +17,15 @@ job "postgres-server" {
|
||||
mode = "bridge"
|
||||
|
||||
port "db" {
|
||||
static = 5432
|
||||
to = 5432
|
||||
host_network = "wesher"
|
||||
}
|
||||
|
||||
port "tls" {}
|
||||
}
|
||||
|
||||
volume "postgres-data" {
|
||||
type = "host"
|
||||
read_only = false
|
||||
source = "postgres-data"
|
||||
source = "mysql-data"
|
||||
}
|
||||
|
||||
service {
|
||||
@ -39,12 +34,6 @@ job "postgres-server" {
|
||||
port = "db"
|
||||
}
|
||||
|
||||
service {
|
||||
name = "postgres-tls"
|
||||
provider = "nomad"
|
||||
port = "tls"
|
||||
}
|
||||
|
||||
task "postgres-server" {
|
||||
driver = "docker"
|
||||
|
||||
@ -55,7 +44,7 @@ job "postgres-server" {
|
||||
|
||||
volume_mount {
|
||||
volume = "postgres-data"
|
||||
destination = "/var/lib/postgresql/data"
|
||||
destination = "/var/lib/postgresql"
|
||||
read_only = false
|
||||
}
|
||||
|
||||
@ -76,48 +65,8 @@ POSTGRES_PASSWORD={{ .superuser_pass }}
|
||||
}
|
||||
|
||||
resources {
|
||||
cpu = 500
|
||||
memory = 800
|
||||
memory_max = 1500
|
||||
}
|
||||
}
|
||||
|
||||
task "stunnel" {
|
||||
driver = "docker"
|
||||
|
||||
config {
|
||||
image = "iamthefij/stunnel:1.0.0"
|
||||
args = ["${NOMAD_TASK_DIR}/stunnel.conf"]
|
||||
ports = ["tls"]
|
||||
}
|
||||
|
||||
resources {
|
||||
cpu = 100
|
||||
memory = 100
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
syslog = no
|
||||
foreground = yes
|
||||
delay = yes
|
||||
|
||||
[postgres_server]
|
||||
accept = {{ env "NOMAD_PORT_tls" }}
|
||||
connect = 127.0.0.1:5432
|
||||
ciphers = PSK
|
||||
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/stunnel_psk.txt
|
||||
EOF
|
||||
destination = "${NOMAD_TASK_DIR}/stunnel.conf"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{ range nomadVarList "secrets/postgres/allowed_psks" -}}
|
||||
{{ with nomadVar .Path }}{{ .psk }}{{ end }}
|
||||
{{ end -}}
|
||||
EOF
|
||||
destination = "${NOMAD_SECRETS_DIR}/stunnel_psk.txt"
|
||||
cpu = 300
|
||||
memory = 256
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,41 +0,0 @@
|
||||
resource "nomad_job" "postgres-server" {
|
||||
jobspec = file("${path.module}/postgres.nomad")
|
||||
|
||||
# Block until deployed as there are servics dependent on this one
|
||||
detach = false
|
||||
}
|
||||
|
||||
resource "nomad_acl_policy" "secrets_postgres" {
|
||||
name = "secrets-postgres"
|
||||
description = "Give access to Postgres secrets"
|
||||
rules_hcl = <<EOH
|
||||
namespace "default" {
|
||||
variables {
|
||||
path "secrets/postgres" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
path "secrets/postgres/*" {
|
||||
capabilities = ["read"]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOH
|
||||
|
||||
job_acl {
|
||||
# job_id = resource.nomad_job.postgres-server.id
|
||||
job_id = "postgres-server"
|
||||
}
|
||||
}
|
||||
|
||||
# Create self-scoped psk so that config is valid at first start
|
||||
resource "random_password" "postgres_postgres_psk" {
|
||||
length = 32
|
||||
override_special = "!@#%&*-_="
|
||||
}
|
||||
|
||||
resource "nomad_variable" "postgres_postgres_psk" {
|
||||
path = "secrets/postgres/allowed_psks/postgres"
|
||||
items = {
|
||||
psk = "postgres:${resource.random_password.postgres_postgres_psk.result}"
|
||||
}
|
||||
}
|
@ -3,10 +3,6 @@ job "redis-${name}" {
|
||||
type = "service"
|
||||
priority = 80
|
||||
|
||||
update {
|
||||
auto_revert = true
|
||||
}
|
||||
|
||||
group "cache" {
|
||||
count = 1
|
||||
|
||||
@ -39,7 +35,7 @@ job "redis-${name}" {
|
||||
|
||||
resources {
|
||||
cpu = 100
|
||||
memory = 64
|
||||
memory = 128
|
||||
memory_max = 512
|
||||
}
|
||||
}
|
||||
@ -48,14 +44,23 @@ job "redis-${name}" {
|
||||
driver = "docker"
|
||||
|
||||
config {
|
||||
image = "iamthefij/stunnel:1.0.0"
|
||||
args = ["$${NOMAD_TASK_DIR}/stunnel.conf"]
|
||||
image = "alpine:3.17"
|
||||
ports = ["tls"]
|
||||
args = ["/bin/sh", "$${NOMAD_TASK_DIR}/start.sh"]
|
||||
}
|
||||
|
||||
resources {
|
||||
cpu = 50
|
||||
memory = 15
|
||||
cpu = 100
|
||||
memory = 100
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
set -e
|
||||
apk add stunnel
|
||||
exec stunnel $${NOMAD_TASK_DIR}/stunnel.conf
|
||||
EOF
|
||||
destination = "$${NOMAD_TASK_DIR}/start.sh"
|
||||
}
|
||||
|
||||
template {
|
||||
|
@ -1,12 +0,0 @@
|
||||
resource "nomad_job" "redis" {
|
||||
for_each = toset(["blocky", "authelia"])
|
||||
|
||||
jobspec = templatefile("${path.module}/redis.nomad",
|
||||
{
|
||||
name = each.key,
|
||||
}
|
||||
)
|
||||
|
||||
# Block until deployed as there are servics dependent on this one
|
||||
detach = false
|
||||
}
|
99
databases/rediscommander.nomad
Normal file
99
databases/rediscommander.nomad
Normal file
@ -0,0 +1,99 @@
|
||||
job "rediscommander" {
|
||||
datacenters = ["dc1"]
|
||||
type = "service"
|
||||
|
||||
group "rediscommander" {
|
||||
count = 1
|
||||
|
||||
network {
|
||||
mode = "bridge"
|
||||
|
||||
port "main" {
|
||||
host_network = "wesher"
|
||||
to = 8081
|
||||
}
|
||||
}
|
||||
|
||||
service {
|
||||
name = "rediscommander"
|
||||
provider = "nomad"
|
||||
port = "main"
|
||||
|
||||
tags = [
|
||||
"traefik.enable=true",
|
||||
"traefik.http.routers.rediscommander.entryPoints=websecure",
|
||||
]
|
||||
}
|
||||
|
||||
task "rediscommander" {
|
||||
driver = "docker"
|
||||
|
||||
config {
|
||||
image = "rediscommander/redis-commander:latest"
|
||||
ports = ["main"]
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOH
|
||||
REDIS_HOSTS=stunnel:127.0.0.1:6379
|
||||
EOH
|
||||
env = true
|
||||
destination = "env"
|
||||
}
|
||||
|
||||
resources {
|
||||
cpu = 50
|
||||
memory = 50
|
||||
}
|
||||
}
|
||||
|
||||
task "redis-stunnel" {
|
||||
driver = "docker"
|
||||
|
||||
config {
|
||||
image = "alpine:3.17"
|
||||
args = ["/bin/sh", "${NOMAD_TASK_DIR}/start.sh"]
|
||||
}
|
||||
|
||||
resources {
|
||||
cpu = 100
|
||||
memory = 100
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
set -e
|
||||
apk add stunnel
|
||||
exec stunnel {{ env "NOMAD_TASK_DIR" }}/stunnel.conf
|
||||
EOF
|
||||
destination = "${NOMAD_TASK_DIR}/start.sh"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
syslog = no
|
||||
foreground = yes
|
||||
delay = yes
|
||||
|
||||
[redis_client]
|
||||
client = yes
|
||||
accept = 127.0.0.1:6379
|
||||
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "redis-tls" -}}
|
||||
connect = {{ .Address }}:{{ .Port }}
|
||||
{{- end }}
|
||||
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/stunnel_psk.txt
|
||||
EOF
|
||||
destination = "${NOMAD_TASK_DIR}/stunnel.conf"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{ with nomadVar "nomad/jobs/rediscommander" -}}
|
||||
{{ .redis_stunnel_psk }}
|
||||
{{- end }}
|
||||
EOF
|
||||
destination = "${NOMAD_SECRETS_DIR}/stunnel_psk.txt"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,5 +0,0 @@
|
||||
variable "use_wesher" {
|
||||
type = bool
|
||||
description = "Indicates whether or not services should expose themselves on the wesher network"
|
||||
default = true
|
||||
}
|
34
main.tf
34
main.tf
@ -1,34 +0,0 @@
|
||||
module "databases" {
|
||||
source = "./databases"
|
||||
|
||||
use_wesher = var.use_wesher
|
||||
}
|
||||
|
||||
module "core" {
|
||||
source = "./core"
|
||||
|
||||
base_hostname = var.base_hostname
|
||||
use_wesher = var.use_wesher
|
||||
|
||||
# Metrics and Blocky depend on databases
|
||||
depends_on = [module.databases]
|
||||
}
|
||||
|
||||
module "services" {
|
||||
source = "./services"
|
||||
|
||||
base_hostname = var.base_hostname
|
||||
use_wesher = var.use_wesher
|
||||
|
||||
# NOTE: It may be possible to flip this and core so core templates don't
|
||||
# need to be rerendered every time a service goes up or down.
|
||||
depends_on = [module.databases, module.core]
|
||||
}
|
||||
|
||||
module "backups" {
|
||||
source = "./backups"
|
||||
|
||||
use_wesher = var.use_wesher
|
||||
|
||||
depends_on = [module.databases, module.services, module.core]
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
pre-commit
|
||||
detect-secrets==1.4.0 # This should match what is in .pre-commit-config.yaml
|
||||
ansible
|
||||
python-nomad
|
||||
netaddr
|
||||
python-consul
|
||||
hvac
|
||||
|
@ -1,128 +0,0 @@
|
||||
#! /usr/bin/env python3
|
||||
from os import environ
|
||||
from time import sleep
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
from argparse import ArgumentParser
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
NOMAD_ADDR = environ.get("NOMAD_ADDR", "http://127.0.0.1:4646")
|
||||
NOMAD_TOKEN = environ.get("NOMAD_TOKEN")
|
||||
|
||||
|
||||
def nomad_req(
|
||||
*path: str,
|
||||
params: dict[str, Any] | None = None,
|
||||
data: dict[str, Any] | None = None,
|
||||
method="GET",
|
||||
) -> list[dict[str, Any]] | dict[str, Any] | str:
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
if NOMAD_TOKEN:
|
||||
headers["X-Nomad-Token"] = NOMAD_TOKEN
|
||||
|
||||
response = requests.request(
|
||||
method,
|
||||
f"{NOMAD_ADDR}/v1/{'/'.join(path)}",
|
||||
params=params,
|
||||
json=data,
|
||||
headers=headers,
|
||||
)
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.RequestException as ex:
|
||||
print(response.text)
|
||||
raise ex
|
||||
|
||||
try:
|
||||
return response.json()
|
||||
except requests.exceptions.JSONDecodeError:
|
||||
return response.text
|
||||
|
||||
|
||||
def wait_for_job_alloc_status(job_id: str, status: str):
|
||||
allocs = nomad_req("job", job_id, "allocations")
|
||||
allocs = cast(list[dict[str, Any]], allocs)
|
||||
|
||||
while not all(alloc["ClientStatus"] == status for alloc in allocs):
|
||||
print(f"Waiting for all allocs to reach {status}...")
|
||||
sleep(5)
|
||||
allocs = nomad_req("job", job_id, "allocations")
|
||||
allocs = cast(list[dict[str, Any]], allocs)
|
||||
|
||||
|
||||
def wait_for_eval_status(eval_id: str, status: str):
|
||||
eval = nomad_req("evaluation", eval_id)
|
||||
eval = cast(dict[str, Any], eval)
|
||||
|
||||
while eval["Status"] != status:
|
||||
print(f"Waiting for eval to reach {status}...")
|
||||
sleep(5)
|
||||
eval = nomad_req("evaluation", eval_id)
|
||||
eval = cast(dict[str, Any], eval)
|
||||
|
||||
|
||||
parser = ArgumentParser(
|
||||
description="Execute one off backups and restores of services",
|
||||
)
|
||||
parser.add_argument("service_name", help="Name of the service to backup or restore")
|
||||
parser.add_argument("-a", "--action", default="backup", choices=("backup", "restore"), help="Action to take, backup or restore")
|
||||
parser.add_argument("-s", "--snapshot", default="latest", help="Backup snapshot to restore, if restore is the chosen action")
|
||||
parser.add_argument("-x", "--extra-safe", action="store_true", help="Perform extra safe backup or restore by stoping target job first")
|
||||
args = parser.parse_args()
|
||||
|
||||
service_name = args.service_name
|
||||
service_info = nomad_req("service", service_name, params={"choose": "1|backups"})
|
||||
|
||||
if not service_info:
|
||||
print(f"Could not find service {service_name}")
|
||||
exit(1)
|
||||
|
||||
service_info = cast(list[dict[str, Any]], service_info)
|
||||
node_id = service_info[0]["NodeID"]
|
||||
job_id = service_info[0]["JobID"]
|
||||
|
||||
node = nomad_req("node", node_id)
|
||||
node = cast(dict[str, Any], node)
|
||||
node_name = node["Name"]
|
||||
backup_job_name = f"backup-oneoff-{node_name}"
|
||||
|
||||
backup_job = nomad_req("job", backup_job_name)
|
||||
if not backup_job:
|
||||
print(f"Could not find backup job {backup_job_name} for {service_name}")
|
||||
|
||||
if args.extra_safe:
|
||||
print("Stopping job allocs")
|
||||
stop_job = nomad_req("job", job_id, method="DELETE")
|
||||
print(stop_job)
|
||||
wait_for_job_alloc_status(job_id, "complete")
|
||||
|
||||
backup_job = cast(dict[str, Any], backup_job)
|
||||
backup_job_id = backup_job["ID"]
|
||||
|
||||
dispatch = nomad_req(
|
||||
"job",
|
||||
backup_job_id,
|
||||
"dispatch",
|
||||
data={
|
||||
"Payload": None,
|
||||
"Meta": {
|
||||
"job_name": service_name,
|
||||
"task": args.action,
|
||||
"snapshot": args.snapshot,
|
||||
},
|
||||
},
|
||||
method="POST",
|
||||
)
|
||||
dispatch = cast(dict[str, Any], dispatch)
|
||||
print(dispatch)
|
||||
|
||||
if args.extra_safe:
|
||||
print(f"Wait for {args.action} to finish")
|
||||
wait_for_eval_status(dispatch["EvalID"], "complete")
|
||||
|
||||
print("Backup complete. Verify success and restart job")
|
||||
# If auto restarting, get versions and "revert" to version n-1 since n will be the recently stopped version
|
@ -1,99 +0,0 @@
|
||||
#! /usr/bin/env python3
|
||||
from argparse import ArgumentParser
|
||||
from os import environ
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
NOMAD_ADDR = environ.get("NOMAD_ADDR", "http://127.0.0.1:4646")
|
||||
NOMAD_TOKEN = environ.get("NOMAD_TOKEN")
|
||||
|
||||
|
||||
def nomad_req(
|
||||
*path: str, params: dict[str, Any] | None = None, method="GET"
|
||||
) -> list[dict[str, Any]] | dict[str, Any] | str:
|
||||
headers = {}
|
||||
if NOMAD_TOKEN:
|
||||
headers["X-Nomad-Token"] = NOMAD_TOKEN
|
||||
|
||||
response = requests.request(
|
||||
method,
|
||||
f"{NOMAD_ADDR}/v1/{'/'.join(path)}",
|
||||
params=params,
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
try:
|
||||
return response.json()
|
||||
except requests.exceptions.JSONDecodeError:
|
||||
return response.text
|
||||
|
||||
|
||||
def extract_job_services(job: dict[str, Any]) -> dict[str, str]:
|
||||
services: dict[str, str] = dict()
|
||||
for group in job["TaskGroups"]:
|
||||
for service in group.get("Services") or []:
|
||||
services[service["Name"]] = group["Name"]
|
||||
for task in group["Tasks"]:
|
||||
for service in task.get("Services") or []:
|
||||
services[service["Name"]] = group["Name"]
|
||||
|
||||
return services
|
||||
|
||||
exit_code = 0
|
||||
parser = ArgumentParser(
|
||||
description="Checks for missing services and optionally restarts their allocs.",
|
||||
)
|
||||
parser.add_argument("-r", "--restart", action="store_true", help="Restart allocs for missing services")
|
||||
args = parser.parse_args()
|
||||
|
||||
for job in nomad_req("jobs"):
|
||||
job = cast(dict[str, Any], job)
|
||||
|
||||
if job["Type"] in ("batch", "sysbatch"):
|
||||
continue
|
||||
|
||||
if job["Status"] != "running":
|
||||
print(f"WARNING: job {job['Name']} is {job['Status']}")
|
||||
continue
|
||||
|
||||
job_detail = nomad_req("job", job["ID"])
|
||||
job_detail = cast(dict[str, Any], job_detail)
|
||||
|
||||
expected_services = extract_job_services(job_detail)
|
||||
|
||||
found_services: set[str] = set()
|
||||
for service in nomad_req("job", job_detail["ID"], "services"):
|
||||
service = cast(dict[str, Any], service)
|
||||
found_services.add(service["ServiceName"])
|
||||
|
||||
missing_services = set(expected_services) - found_services
|
||||
restart_groups: set[str] = set()
|
||||
for missing_service in missing_services:
|
||||
print(f"ERROR: Missing service {missing_service} for job {job_detail['Name']}")
|
||||
# print(job)
|
||||
exit_code = 1
|
||||
|
||||
# Add group associated with missing service to set
|
||||
restart_groups.add(expected_services[missing_service])
|
||||
|
||||
if not restart_groups or not args.restart:
|
||||
continue
|
||||
|
||||
# Get allocts for groups that are missing services
|
||||
restart_allocs: set[str] = set()
|
||||
for allocation in nomad_req("job", job_detail["ID"], "allocations"):
|
||||
allocation = cast(dict[str, Any], allocation)
|
||||
if allocation["ClientStatus"] == "running" and allocation["TaskGroup"] in restart_groups:
|
||||
restart_allocs.add(allocation["ID"])
|
||||
|
||||
# Restart allocs associated with missing services
|
||||
for allocation in restart_allocs:
|
||||
print(f"INFO: Restarting allocation {allocation}")
|
||||
nomad_req("client", "allocation", allocation, "restart")
|
||||
|
||||
|
||||
exit(exit_code)
|
@ -1,72 +0,0 @@
|
||||
#! /usr/bin/env python3
|
||||
from argparse import ArgumentParser
|
||||
from os import environ
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
NOMAD_ADDR = environ.get("NOMAD_ADDR", "http://127.0.0.1:4646")
|
||||
NOMAD_TOKEN = environ.get("NOMAD_TOKEN")
|
||||
|
||||
|
||||
def nomad_req(
|
||||
*path: str, params: dict[str, Any] | None = None, method="GET"
|
||||
) -> list[dict[str, Any]] | dict[str, Any] | str:
|
||||
headers = {}
|
||||
if NOMAD_TOKEN:
|
||||
headers["X-Nomad-Token"] = NOMAD_TOKEN
|
||||
|
||||
response = requests.request(
|
||||
method,
|
||||
f"{NOMAD_ADDR}/v1/{'/'.join(path)}",
|
||||
params=params,
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
try:
|
||||
return response.json()
|
||||
except requests.exceptions.JSONDecodeError:
|
||||
return response.text
|
||||
|
||||
|
||||
exit_code = 0
|
||||
parser = ArgumentParser(
|
||||
description="Checks for orphaned services and optionally deletes them.",
|
||||
)
|
||||
parser.add_argument("-d", "--delete", action="store_true", help="Delete orphan services")
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
for namespace in nomad_req("services"):
|
||||
namespace = cast(dict[str, Any], namespace)
|
||||
for service in namespace["Services"]:
|
||||
service_name = service["ServiceName"]
|
||||
for service_instance in nomad_req("service", service_name):
|
||||
service_instance = cast(dict[str, Any], service_instance)
|
||||
service_id = service_instance["ID"]
|
||||
alloc_id = service_instance["AllocID"]
|
||||
|
||||
alloc_found = True
|
||||
|
||||
try:
|
||||
alloc = nomad_req("allocation", alloc_id)
|
||||
continue
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code == 404:
|
||||
alloc_found = False
|
||||
message = f"alloc {alloc_id} not found for {service_name}."
|
||||
if args.delete:
|
||||
message += f" Deleting {service_id}"
|
||||
|
||||
print(message)
|
||||
else:
|
||||
raise e
|
||||
|
||||
if not alloc_found and args.delete:
|
||||
nomad_req("service", service_name, service_id, method="DELETE")
|
||||
|
||||
|
||||
exit(exit_code)
|
5
services.tf
Normal file
5
services.tf
Normal file
@ -0,0 +1,5 @@
|
||||
module "services" {
|
||||
source = "./services"
|
||||
|
||||
depends_on = [module.databases, module.core]
|
||||
}
|
45
services/.terraform.lock.hcl
generated
45
services/.terraform.lock.hcl
generated
@ -2,39 +2,20 @@
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.terraform.io/hashicorp/nomad" {
|
||||
version = "2.1.1"
|
||||
version = "1.4.19"
|
||||
hashes = [
|
||||
"h1:liQBgBXfQEYmwpoGZUfSsu0U0t/nhvuRZbMhaMz7wcQ=",
|
||||
"zh:28bc6922e8a21334568410760150d9d413d7b190d60b5f0b4aab2f4ef52efeeb",
|
||||
"zh:2d4283740e92ce1403875486cd5ff2c8acf9df28c190873ab4d769ce37db10c1",
|
||||
"zh:457e16d70075eae714a7df249d3ba42c2176f19b6750650152c56f33959028d9",
|
||||
"zh:49ee88371e355c00971eefee6b5001392431b47b3e760a5c649dda76f59fb8fa",
|
||||
"zh:614ad3bf07155ed8a5ced41dafb09042afbd1868490a379654b3e970def8e33d",
|
||||
"zh:75be7199d76987e7549e1f27439922973d1bf27353b44a593bfbbc2e3b9f698f",
|
||||
"h1:EdBny2gaLr/IE+l+6csyCKeIGFMYZ/4tHKpcbS7ArgE=",
|
||||
"zh:2f3ceeb3318a6304026035b0ac9ee3e52df04913bb9ee78827e58c5398b41254",
|
||||
"zh:3fbe76c7d957d20dfe3c8c0528b33084651f22a95be9e0452b658e0922916e2a",
|
||||
"zh:595671a05828cfe6c42ef73aac894ac39f81a52cc662a76f37eb74ebe04ddf75",
|
||||
"zh:5d76e8788d2af3e60daf8076babf763ec887480bbb9734baccccd8fcddf4f03e",
|
||||
"zh:676985afeaca6e67b22d60d43fd0ed7055763029ffebc3026089fe2fd3b4a288",
|
||||
"zh:69152ce6164ac999a640cff962ece45208270e1ac37c10dac484eeea5cf47275",
|
||||
"zh:6da0b15c05b81f947ec8e139bd81eeeb05c0d36eb5a967b985d0625c60998b40",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:888e14a24410d56b37212fbea373a3e0401d0ff8f8e4f4dd00ba8b29de9fed39",
|
||||
"zh:aa261925e8b152636a0886b3a2149864707632d836e98a88dacea6cfb6302082",
|
||||
"zh:ac10cefb4064b3bb63d4b0379624a416a45acf778eac0004466f726ead686196",
|
||||
"zh:b1a3c8b4d5b2dc9b510eac5e9e02665582862c24eb819ab74f44d3d880246d4f",
|
||||
"zh:c552e2fe5670b6d3ad9a5faf78e3a27197eeedbe2b13928d2c491fa509bc47c7",
|
||||
]
|
||||
}
|
||||
|
||||
provider "registry.terraform.io/hashicorp/random" {
|
||||
version = "3.6.0"
|
||||
hashes = [
|
||||
"h1:R5Ucn26riKIEijcsiOMBR3uOAjuOMfI1x7XvH4P6B1w=",
|
||||
"zh:03360ed3ecd31e8c5dac9c95fe0858be50f3e9a0d0c654b5e504109c2159287d",
|
||||
"zh:1c67ac51254ba2a2bb53a25e8ae7e4d076103483f55f39b426ec55e47d1fe211",
|
||||
"zh:24a17bba7f6d679538ff51b3a2f378cedadede97af8a1db7dad4fd8d6d50f829",
|
||||
"zh:30ffb297ffd1633175d6545d37c2217e2cef9545a6e03946e514c59c0859b77d",
|
||||
"zh:454ce4b3dbc73e6775f2f6605d45cee6e16c3872a2e66a2c97993d6e5cbd7055",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:91df0a9fab329aff2ff4cf26797592eb7a3a90b4a0c04d64ce186654e0cc6e17",
|
||||
"zh:aa57384b85622a9f7bfb5d4512ca88e61f22a9cea9f30febaa4c98c68ff0dc21",
|
||||
"zh:c4a3e329ba786ffb6f2b694e1fd41d413a7010f3a53c20b432325a94fa71e839",
|
||||
"zh:e2699bc9116447f96c53d55f2a00570f982e6f9935038c3810603572693712d0",
|
||||
"zh:e747c0fd5d7684e5bfad8aa0ca441903f15ae7a98a737ff6aca24ba223207e2c",
|
||||
"zh:f1ca75f417ce490368f047b63ec09fd003711ae48487fba90b4aba2ccf71920e",
|
||||
"zh:822c0a3bbada5e38099a379db8b2e339526843699627c3be3664cc3b3752bab7",
|
||||
"zh:af23af2f98a84695b25c8eba7028a81ad4aad63c44aefb79e01bbe2dc82e7f78",
|
||||
"zh:e36cac9960b7506d92925b667254322520966b9c3feb3ca6102e57a1fb9b1761",
|
||||
"zh:ffd1e096c1cc35de879c740a91918e9f06b627818a3cb4b1d87b829b54a6985f",
|
||||
]
|
||||
}
|
||||
|
@ -1,18 +0,0 @@
|
||||
module "adminer" {
|
||||
source = "./service"
|
||||
|
||||
name = "adminer"
|
||||
image = "adminer"
|
||||
|
||||
ingress = true
|
||||
service_port = 8080
|
||||
use_wesher = var.use_wesher
|
||||
|
||||
use_mysql = true
|
||||
use_postgres = true
|
||||
|
||||
resources = {
|
||||
cpu = 50
|
||||
memory = 50
|
||||
}
|
||||
}
|
21
services/backups/.terraform.lock.hcl
generated
Normal file
21
services/backups/.terraform.lock.hcl
generated
Normal file
@ -0,0 +1,21 @@
|
||||
# This file is maintained automatically by "terraform init".
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.terraform.io/hashicorp/nomad" {
|
||||
version = "1.4.16"
|
||||
hashes = [
|
||||
"h1:PQxNPNmMVOErxryTWIJwr22k95DTSODmgRylqjc2TjI=",
|
||||
"h1:tyfjD/maKzb0RxxD9KWgLnkJu9lnYziYsQgGw85Giz8=",
|
||||
"zh:0d4fbb7030d9caac3b123e60afa44f50c83cc2a983e1866aec7f30414abe7b0e",
|
||||
"zh:0db080228e07c72d6d8ca8c45249d6f97cd0189fce82a77abbdcd49a52e57572",
|
||||
"zh:0df88393271078533a217654b96f0672c60eb59570d72e6aefcb839eea87a7a0",
|
||||
"zh:2883b335bb6044b0db6a00e602d6926c047c7f330294a73a90d089f98b24d084",
|
||||
"zh:390158d928009a041b3a182bdd82376b50530805ae92be2b84ed7c3b0fa902a0",
|
||||
"zh:7169b8f8df4b8e9659c49043848fd5f7f8473d0471f67815e8b04980f827f5ef",
|
||||
"zh:9417ee1383b1edd137024882d7035be4dca51fb4f725ca00ed87729086ec1755",
|
||||
"zh:a22910b5a29eeab5610350700b4899267c1b09b66cf21f7e4d06afc61d425800",
|
||||
"zh:a6185c9cd7aa458cd81861058ba568b6411fbac344373a20155e20256f4a7557",
|
||||
"zh:b6260ca9f034df1b47905b4e2a9c33b67dbf77224a694d5b10fb09ae92ffad4c",
|
||||
"zh:d87c12a6a7768f2b6c2a59495c7dc00f9ecc52b1b868331d4c284f791e278a1e",
|
||||
]
|
||||
}
|
@ -18,7 +18,14 @@ job "backup%{ if batch_node != null }-oneoff-${batch_node}%{ endif }" {
|
||||
}
|
||||
%{ endif ~}
|
||||
|
||||
%{ if batch_node != null ~}
|
||||
%{ if batch_node == null ~}
|
||||
constraint {
|
||||
attribute = "$${node.unique.name}"
|
||||
operator = "set_contains_any"
|
||||
# Only deploy to nodes running tasks to backup
|
||||
value = "n1,n2"
|
||||
}
|
||||
%{ else ~}
|
||||
constraint {
|
||||
attribute = "$${node.unique.name}"
|
||||
value = "${batch_node}"
|
||||
@ -31,9 +38,7 @@ job "backup%{ if batch_node != null }-oneoff-${batch_node}%{ endif }" {
|
||||
mode = "bridge"
|
||||
|
||||
port "metrics" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
to = 8080
|
||||
}
|
||||
}
|
||||
@ -44,11 +49,6 @@ job "backup%{ if batch_node != null }-oneoff-${batch_node}%{ endif }" {
|
||||
source = "all-volumes"
|
||||
}
|
||||
|
||||
ephemeral_disk {
|
||||
# Try to keep restic cache intact
|
||||
sticky = true
|
||||
}
|
||||
|
||||
service {
|
||||
name = "backup"
|
||||
provider = "nomad"
|
||||
@ -62,8 +62,6 @@ job "backup%{ if batch_node != null }-oneoff-${batch_node}%{ endif }" {
|
||||
task "backup" {
|
||||
driver = "docker"
|
||||
|
||||
shutdown_delay = "5m"
|
||||
|
||||
volume_mount {
|
||||
volume = "all-volumes"
|
||||
destination = "/data"
|
||||
@ -71,123 +69,97 @@ job "backup%{ if batch_node != null }-oneoff-${batch_node}%{ endif }" {
|
||||
}
|
||||
|
||||
config {
|
||||
image = "iamthefij/restic-scheduler:0.4.2"
|
||||
image = "iamthefij/resticscheduler:0.1.1"
|
||||
ports = ["metrics"]
|
||||
args = [
|
||||
"--push-gateway",
|
||||
"http://pushgateway.nomad:9091",
|
||||
%{ if batch_node != null ~}
|
||||
"-once",
|
||||
"-$${NOMAD_META_task}",
|
||||
"$${NOMAD_META_job_name}",
|
||||
"--snapshot",
|
||||
"$${NOMAD_META_snapshot}",
|
||||
"--push-gateway",
|
||||
"http://pushgateway.nomad:9091",
|
||||
%{ endif ~}
|
||||
"$${NOMAD_TASK_DIR}/node-jobs.hcl",
|
||||
]
|
||||
}
|
||||
|
||||
action "unlockenv" {
|
||||
command = "sh"
|
||||
args = ["-c", "/bin/restic-scheduler -once -unlock all $${NOMAD_TASK_DIR}/node-jobs.hcl"]
|
||||
}
|
||||
|
||||
action "unlocktmpl" {
|
||||
command = "/bin/restic-scheduler"
|
||||
args = ["-once", "-unlock", "all", "{{ env 'NOMAD_TASK_DIR' }}/node-jobs.hcl"]
|
||||
}
|
||||
|
||||
action "unlockhc" {
|
||||
command = "/bin/restic-scheduler"
|
||||
args = ["-once", "-unlock", "all", "/local/node-jobs.hcl"]
|
||||
}
|
||||
|
||||
action "backupall" {
|
||||
command = "/bin/restic-scheduler"
|
||||
args = ["-once", "-backup", "all", "/local/node-jobs.hcl"]
|
||||
}
|
||||
|
||||
action "backupallenv" {
|
||||
command = "sh"
|
||||
args = ["-c", "/bin/restic-scheduler -once -backup all $${NOMAD_TASK_DIR}/node-jobs.hcl"]
|
||||
}
|
||||
|
||||
env = {
|
||||
RCLONE_CHECKERS = "2"
|
||||
RCLONE_TRANSFERS = "2"
|
||||
RCLONE_FTP_CONCURRENCY = "5"
|
||||
RESTIC_CACHE_DIR = "$${NOMAD_ALLOC_DIR}/data"
|
||||
TZ = "America/Los_Angeles"
|
||||
"RCLONE_CHECKERS" = "2"
|
||||
"RCLONE_TRANSFERS" = "2"
|
||||
"RCLONE_FTP_CONCURRENCY" = "5"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
MYSQL_HOST=127.0.0.1
|
||||
MYSQL_PORT=3306
|
||||
{{ with nomadVar "secrets/mysql" }}
|
||||
# TODO: Move this to new mysql root pass path
|
||||
{{ with nomadVar "nomad/jobs" }}
|
||||
MYSQL_USER=root
|
||||
MYSQL_PASSWORD={{ .mysql_root_password }}
|
||||
{{ end -}}
|
||||
{{ with nomadVar "secrets/postgres" }}
|
||||
POSTGRES_HOST=127.0.0.1
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_USER={{ .superuser }}
|
||||
POSTGRES_PASSWORD={{ .superuser_password }}
|
||||
{{ end -}}
|
||||
{{ with nomadVar (print "nomad/jobs/" (index (env "NOMAD_JOB_ID" | split "/") 0)) -}}
|
||||
{{ with nomadVar (print "nomad/jobs/" (env "NOMAD_JOB_ID")) -}}
|
||||
BACKUP_PASSPHRASE={{ .backup_passphrase }}
|
||||
RCLONE_FTP_HOST={{ .nas_ftp_host }}
|
||||
RCLONE_FTP_USER={{ .nas_ftp_user }}
|
||||
RCLONE_FTP_PASS={{ .nas_ftp_pass.Value | toJSON }}
|
||||
RCLONE_FTP_EXPLICIT_TLS=true
|
||||
RCLONE_FTP_NO_CHECK_CERTIFICATE=true
|
||||
AWS_ACCESS_KEY_ID={{ .nas_minio_access_key_id }}
|
||||
AWS_SECRET_ACCESS_KEY={{ .nas_minio_secret_access_key }}
|
||||
{{ end -}}
|
||||
EOF
|
||||
destination = "secrets/db.env"
|
||||
env = true
|
||||
}
|
||||
|
||||
|
||||
template {
|
||||
# Build jobs based on node
|
||||
data = <<EOF
|
||||
# Current node is {{ env "node.unique.name" }} {{ env "node.unique.id" }}
|
||||
|
||||
%{ for job_file in fileset(module_path, "jobs/*.hcl") ~}
|
||||
{{ range nomadService 1 "backups" "${trimsuffix(basename(job_file), ".hcl")}" -}}
|
||||
# ${trimsuffix(basename(job_file), ".hcl")} .Node {{ .Node }}
|
||||
{{ range nomadService "grafana" -}}
|
||||
# grafana .Node {{ .Node }}
|
||||
{{ if eq .Node (env "node.unique.id") -}}
|
||||
${file("${module_path}/${job_file}")}
|
||||
${file("${module_path}/jobs/grafana.hcl")}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{ end -}}
|
||||
{{ end -}}
|
||||
%{ endfor ~}
|
||||
{{ range nomadService "photoprism" -}}
|
||||
# photoprism .Node {{ .Node }}
|
||||
{{ if eq .Node (env "node.unique.id") -}}
|
||||
${file("${module_path}/jobs/photoprism.hcl")}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
# Dummy job to keep task healthy on node without any stateful services
|
||||
job "Dummy" {
|
||||
schedule = "@daily"
|
||||
{{ range nomadService "lldap" -}}
|
||||
# lldap .Node {{ .Node }}
|
||||
{{ if eq .Node (env "node.unique.id") -}}
|
||||
${file("${module_path}/jobs/lldap.hcl")}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
config {
|
||||
repo = "/local/dummy-repo"
|
||||
passphrase = env("BACKUP_PASSPHRASE")
|
||||
}
|
||||
{{ range nomadService "sonarr" -}}
|
||||
# sonarr .Node {{ .Node }}
|
||||
{{ if eq .Node (env "node.unique.id") -}}
|
||||
${file("${module_path}/jobs/sonarr.hcl")}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
backup {
|
||||
paths = ["/local/node-jobs.hcl"]
|
||||
}
|
||||
|
||||
forget {
|
||||
KeepLast = 1
|
||||
}
|
||||
}
|
||||
{{ range nomadService "nzbget" -}}
|
||||
# nzbget .Node {{ .Node }}
|
||||
{{ if eq .Node (env "node.unique.id") -}}
|
||||
${file("${module_path}/jobs/nzbget.hcl")}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
EOF
|
||||
destination = "local/node-jobs.hcl"
|
||||
}
|
||||
|
||||
resources {
|
||||
cpu = 50
|
||||
memory = 500
|
||||
memory = 256
|
||||
}
|
||||
}
|
||||
|
||||
@ -200,8 +172,8 @@ job "Dummy" {
|
||||
}
|
||||
|
||||
config {
|
||||
image = "iamthefij/stunnel:1.0.0"
|
||||
args = ["$${NOMAD_TASK_DIR}/stunnel.conf"]
|
||||
image = "alpine:3.17"
|
||||
args = ["/bin/sh", "$${NOMAD_TASK_DIR}/start.sh"]
|
||||
}
|
||||
|
||||
resources {
|
||||
@ -209,6 +181,15 @@ job "Dummy" {
|
||||
memory = 100
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
set -e
|
||||
apk add stunnel
|
||||
exec stunnel {{ env "NOMAD_TASK_DIR" }}/stunnel.conf
|
||||
EOF
|
||||
destination = "$${NOMAD_TASK_DIR}/start.sh"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
syslog = no
|
||||
@ -218,35 +199,22 @@ delay = yes
|
||||
[mysql_client]
|
||||
client = yes
|
||||
accept = 127.0.0.1:3306
|
||||
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-tls" }}
|
||||
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-tls" -}}
|
||||
connect = {{ .Address }}:{{ .Port }}
|
||||
{{ end }}
|
||||
{{- end }}
|
||||
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/mysql_stunnel_psk.txt
|
||||
|
||||
[postgres_client]
|
||||
client = yes
|
||||
accept = 127.0.0.1:5432
|
||||
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "postgres-tls" }}
|
||||
connect = {{ .Address }}:{{ .Port }}
|
||||
{{ end }}
|
||||
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/postgres_stunnel_psk.txt
|
||||
EOF
|
||||
destination = "$${NOMAD_TASK_DIR}/stunnel.conf"
|
||||
}
|
||||
|
||||
# TODO: Get psk for backup jobs despite multiple job declarations
|
||||
# Probably should use variable ACLs to grant each node job to this path
|
||||
template {
|
||||
data = <<EOF
|
||||
{{- with nomadVar "secrets/mysql/allowed_psks/backups" }}{{ .psk }}{{ end -}}
|
||||
{{- with nomadVar (print "nomad/jobs/" (env "NOMAD_JOB_ID")) }}{{ .mysql_stunnel_psk }}{{ end -}}
|
||||
EOF
|
||||
destination = "$${NOMAD_SECRETS_DIR}/mysql_stunnel_psk.txt"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{- with nomadVar "secrets/postgres/allowed_psks/backups" }}{{ .psk }}{{ end -}}
|
||||
EOF
|
||||
destination = "$${NOMAD_SECRETS_DIR}/postgres_stunnel_psk.txt"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
28
services/backups/backups.tf
Normal file
28
services/backups/backups.tf
Normal file
@ -0,0 +1,28 @@
|
||||
resource "nomad_job" "backup" {
|
||||
hcl2 {
|
||||
enabled = true
|
||||
}
|
||||
|
||||
jobspec = templatefile("${path.module}/backup.nomad", {
|
||||
module_path = path.module,
|
||||
batch_node = null,
|
||||
})
|
||||
}
|
||||
|
||||
resource "nomad_job" "backup-oneoff" {
|
||||
# TODO: Get list of nomad hosts dynamically
|
||||
for_each = toset(["n1", "n2"])
|
||||
# for_each = toset([
|
||||
# for node in data.consul_service.nomad.service :
|
||||
# node.node_name
|
||||
# ])
|
||||
|
||||
hcl2 {
|
||||
enabled = true
|
||||
}
|
||||
|
||||
jobspec = templatefile("${path.module}/backup.nomad", {
|
||||
module_path = path.module,
|
||||
batch_node = each.key,
|
||||
})
|
||||
}
|
@ -1,13 +1,9 @@
|
||||
job "grafana" {
|
||||
schedule = "@daily"
|
||||
schedule = "0 * * * *"
|
||||
|
||||
config {
|
||||
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/grafana"
|
||||
repo = "rclone::ftp,env_auth:/nomad/grafana"
|
||||
passphrase = env("BACKUP_PASSPHRASE")
|
||||
|
||||
options {
|
||||
InsecureTls = true
|
||||
}
|
||||
}
|
||||
|
||||
task "Create local grafana dir" {
|
||||
@ -30,14 +26,8 @@ job "grafana" {
|
||||
|
||||
backup {
|
||||
paths = ["/local/grafana"]
|
||||
|
||||
backup_opts {
|
||||
Host = "nomad"
|
||||
}
|
||||
|
||||
# Because path is absolute
|
||||
restore_opts {
|
||||
Host = ["nomad"]
|
||||
# Because path is absolute
|
||||
Target = "/"
|
||||
}
|
||||
}
|
@ -2,12 +2,8 @@ job "lldap" {
|
||||
schedule = "@daily"
|
||||
|
||||
config {
|
||||
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/lldap"
|
||||
repo = "rclone::ftp,env_auth:/nomad/lldap"
|
||||
passphrase = env("BACKUP_PASSPHRASE")
|
||||
|
||||
options {
|
||||
InsecureTls = true
|
||||
}
|
||||
}
|
||||
|
||||
task "Create local backup dir" {
|
||||
@ -30,14 +26,8 @@ job "lldap" {
|
||||
|
||||
backup {
|
||||
paths = ["/local/lldap"]
|
||||
|
||||
backup_opts {
|
||||
Host = "nomad"
|
||||
}
|
||||
|
||||
# Because path is absolute
|
||||
restore_opts {
|
||||
Host = ["nomad"]
|
||||
# Because path is absolute
|
||||
Target = "/"
|
||||
}
|
||||
}
|
35
services/backups/jobs/nextcloud.hcl
Normal file
35
services/backups/jobs/nextcloud.hcl
Normal file
@ -0,0 +1,35 @@
|
||||
job "nextcloud" {
|
||||
schedule = "0 * * * *"
|
||||
|
||||
config {
|
||||
repo = "rclone::ftp,env_auth:/nomad/nextcloud"
|
||||
passphrase = env("BACKUP_PASSPHRASE")
|
||||
}
|
||||
|
||||
mysql "Backup database" {
|
||||
hostname = env("MYSQL_HOST")
|
||||
port = env("MYSQL_PORT")
|
||||
database = env("MYSQL_DATABASE")
|
||||
username = env("MYSQL_USER")
|
||||
password = env("MYSQL_PASSWORD")
|
||||
no_tablespaces = true
|
||||
dump_to = "/local/dump.sql"
|
||||
}
|
||||
|
||||
backup {
|
||||
paths = ["/data/nextcloud"]
|
||||
# Because path is absolute
|
||||
restore_opts {
|
||||
Target = "/"
|
||||
}
|
||||
}
|
||||
|
||||
forget {
|
||||
KeepHourly = 24
|
||||
KeepDaily = 30
|
||||
KeepWeekly = 8
|
||||
KeepMonthly = 6
|
||||
KeepYearly = 2
|
||||
Prune = true
|
||||
}
|
||||
}
|
@ -2,12 +2,8 @@ job "nzbget" {
|
||||
schedule = "@daily"
|
||||
|
||||
config {
|
||||
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/nzbget"
|
||||
repo = "rclone::ftp,env_auth:/nomad/nzbget"
|
||||
passphrase = env("BACKUP_PASSPHRASE")
|
||||
|
||||
options {
|
||||
InsecureTls = true
|
||||
}
|
||||
}
|
||||
|
||||
backup {
|
||||
@ -17,14 +13,8 @@ job "nzbget" {
|
||||
# Queued nzb files
|
||||
"/data/media-write/Downloads/nzb",
|
||||
]
|
||||
|
||||
backup_opts {
|
||||
Host = "nomad"
|
||||
}
|
||||
|
||||
# Because path is absolute
|
||||
restore_opts {
|
||||
Host = ["nomad"]
|
||||
# Because path is absolute
|
||||
Target = "/"
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user