Compare commits
165 Commits
diun-task-
...
main
Author | SHA1 | Date | |
---|---|---|---|
908d960f94 | |||
32e34db160 | |||
4a10b97749 | |||
b6605aa8bc | |||
ae6fe918ef | |||
ee6f7bd937 | |||
36fa152553 | |||
8e3e782c41 | |||
78dba74acf | |||
85256adebe | |||
92559df998 | |||
d4a15d2fd6 | |||
98ba1b60ad | |||
6854347d33 | |||
cdc03b9d93 | |||
5e0393823d | |||
78320a8ea4 | |||
03ce47320c | |||
8641bd50e1 | |||
9d7a8029c1 | |||
9c2bce3fab | |||
c248edfc52 | |||
1bc46d957a | |||
8866434590 | |||
1c783dbdfe | |||
f5a180f019 | |||
98c547ebdf | |||
fc5bce0757 | |||
2a58439ab5 | |||
79648879ab | |||
9a76c9efef | |||
52b0ec3bb6 | |||
cf43d32d06 | |||
03dc79c895 | |||
fafbb246ff | |||
e99c5272cc | |||
64b58230e6 | |||
95ca3f40d0 | |||
534bad2a03 | |||
58c483a051 | |||
84d7a68088 | |||
8e8dbc3e65 | |||
77c7c6b36c | |||
505a6b5e8d | |||
1307864afc | |||
c5743a2578 | |||
bd67c60575 | |||
3e8c03904d | |||
408f526239 | |||
c478ba4278 | |||
9ee660cb6d | |||
2235a00f3b | |||
1f8014e740 | |||
bc620987b7 | |||
7477cb7227 | |||
6906623fda | |||
5547241d11 | |||
81093cedfb | |||
7b41d29eb8 | |||
90b7740343 | |||
e88c7c250d | |||
ed83ab0382 | |||
3cfbda7a27 | |||
85c626c96f | |||
634d63c26c | |||
205388f283 | |||
bdfde48bec | |||
9af55580e7 | |||
b9c35bf18f | |||
e7f740a2d9 | |||
57efee14e9 | |||
c711c25737 | |||
24122c2a3e | |||
13121862ec | |||
28da3f425b | |||
2d59886378 | |||
da0f52dab3 | |||
beac302a53 | |||
5edcb86e7e | |||
3dcd4c44b3 | |||
e6653f6495 | |||
a9a919b8f2 | |||
cc66bfdbcb | |||
b02050112e | |||
d5c2a0d185 | |||
6a3ae49d8e | |||
75ee09d7e6 | |||
8b90aa0d74 | |||
62e120ce51 | |||
5fb510202d | |||
64a085ef80 | |||
f2f415aeac | |||
bb291b1f01 | |||
056eac976c | |||
198f96f3f7 | |||
6b5adbdf39 | |||
77ef4b4167 | |||
b35b8cecd5 | |||
b9dfeff6d8 | |||
2ff954b4b5 | |||
2528dafcc6 | |||
0e168376b8 | |||
a16dc204fe | |||
93d340c182 | |||
37ee67b2e6 | |||
35dfeb3093 | |||
0a2eace3dd | |||
6fe1b200f2 | |||
c5d5ab42b8 | |||
efe7864cc9 | |||
9ba74ce698 | |||
4fe3d46d5f | |||
cf8bde7920 | |||
bc87688f1a | |||
3491c1f679 | |||
7b019e0787 | |||
0f19e2433f | |||
c01d45c7a2 | |||
d07afe2319 | |||
b025e4a87e | |||
9be16fef1f | |||
c26da678b3 | |||
6b9533ef71 | |||
0bd995ec2b | |||
0d340f3349 | |||
bcad131aa7 | |||
cda2842f8f | |||
9544222961 | |||
7bc4ae1f8b | |||
1a3c096b65 | |||
25e533287d | |||
7e87002be2 | |||
ab6906e989 | |||
ca55209316 | |||
1b49f015c5 | |||
eb25138675 | |||
69a0f760b4 | |||
3fcedaddb7 | |||
bb34b434b8 | |||
36cdb8f41b | |||
cdd4e9b5d5 | |||
f06e90ab0d | |||
2d733b278c | |||
b218633c2c | |||
e21ec11eb5 | |||
d6f9c2a7e4 | |||
891cfa7b2d | |||
c11b8e157b | |||
0d208b7394 | |||
9b347880cc | |||
a0185d9642 | |||
f2f5f4407c | |||
0b3d3caff6 | |||
52abd94a38 | |||
0391fd95ad | |||
df1ae60936 | |||
a2d33ac309 | |||
1b48892172 | |||
48a48bb080 | |||
bd2c5ca3db | |||
48074bdc39 | |||
2f3fc87f12 | |||
369802cacc | |||
0c3f98d5c3 | |||
b97cfb68ad |
@ -132,7 +132,7 @@
|
|||||||
"filename": "core/authelia.yml",
|
"filename": "core/authelia.yml",
|
||||||
"hashed_secret": "a32b08d97b1615dc27f58b6b17f67624c04e2c4f",
|
"hashed_secret": "a32b08d97b1615dc27f58b6b17f67624c04e2c4f",
|
||||||
"is_verified": false,
|
"is_verified": false,
|
||||||
"line_number": 185,
|
"line_number": 201,
|
||||||
"is_secret": false
|
"is_secret": false
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@ -187,5 +187,5 @@
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"generated_at": "2023-08-24T20:00:24Z"
|
"generated_at": "2024-08-30T18:12:43Z"
|
||||||
}
|
}
|
||||||
|
52
.terraform.lock.hcl
generated
52
.terraform.lock.hcl
generated
@ -2,39 +2,39 @@
|
|||||||
# Manual edits may be lost in future updates.
|
# Manual edits may be lost in future updates.
|
||||||
|
|
||||||
provider "registry.terraform.io/hashicorp/nomad" {
|
provider "registry.terraform.io/hashicorp/nomad" {
|
||||||
version = "2.0.0"
|
version = "2.2.0"
|
||||||
hashes = [
|
hashes = [
|
||||||
"h1:lIHIxA6ZmfyTGL3J9YIddhxlfit4ipSS09BLxkwo6L0=",
|
"h1:BAjqzVkuXxHtRKG+l9unaZJPk2kWZpSTCEcQPRcl2so=",
|
||||||
"zh:09b897d64db293f9a904a4a0849b11ec1e3fff5c638f734d82ae36d8dc044b72",
|
"zh:052f909d25121e93dc799290216292fca67943ccde12ba515068b838a6ff8c66",
|
||||||
"zh:435cc106799290f64078ec24b6c59cb32b33784d609088638ed32c6d12121199",
|
"zh:20e29aeb9989f7a1e04bb4093817c7acc4e1e737bb21a3066f3ea46f2001feff",
|
||||||
"zh:7073444bd064e8c4ec115ca7d9d7f030cc56795c0a83c27f6668bba519e6849a",
|
"zh:2326d101ef427599b72cce30c0e0c1d18ae783f1a897c20f2319fbf54bab0a61",
|
||||||
|
"zh:3420cbe4fd19cdc96d715d0ae8e79c272608023a76033bbf582c30637f6d570f",
|
||||||
|
"zh:41ec570f87f578f1c57655e2e4fbdb9932d94cf92dc9cd11828cccedf36dd4a4",
|
||||||
|
"zh:5f90dcc58e3356ffead82ea211ecb4a2d7094d3c2fbd14ff85527c3652a595a2",
|
||||||
|
"zh:64aaa48609d2db868fcfd347490df0e12c6c3fcb8e4f12908c5d52b1a0adf73f",
|
||||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||||
"zh:79d238c35d650d2d83a439716182da63f3b2767e72e4cbd0b69cb13d9b1aebfc",
|
"zh:86b4923e10e6ba407d1d2aab83740b702058e8b01460af4f5f0e4008f40e492c",
|
||||||
"zh:7ef5f49344278fe0bbc5447424e6aa5425ff1821d010d944a444d7fa2c751acf",
|
"zh:ae89dcba33097af33a306344d20e4e25181f15dcc1a860b42db5b7199a97c6a6",
|
||||||
"zh:92179091638c8ba03feef371c4361a790190f9955caea1fa59de2055c701a251",
|
"zh:ce56d68cdfba60891765e94f9c0bf69eddb985d44d97db9f91874bea027f08e2",
|
||||||
"zh:a8a34398851761368eb8e7c171f24e55efa6e9fdbb5c455f6dec34dc17f631bc",
|
"zh:e993bcde5dbddaedf3331e3014ffab904f98ab0f5e8b5d6082b7ca5083e0a2f1",
|
||||||
"zh:b38fd5338625ebace5a4a94cea1a28b11bd91995d834e318f47587cfaf6ec599",
|
|
||||||
"zh:b71b273a2aca7ad5f1e07c767b25b5a888881ba9ca93b30044ccc39c2937f03c",
|
|
||||||
"zh:cd14357e520e0f09fb25badfb4f2ee37d7741afdc3ed47c7bcf54c1683772543",
|
|
||||||
"zh:e05e025f4bb95138c3c8a75c636e97cd7cfd2fc1525b0c8bd097db8c5f02df6e",
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
provider "registry.terraform.io/hashicorp/random" {
|
provider "registry.terraform.io/hashicorp/random" {
|
||||||
version = "3.5.1"
|
version = "3.6.0"
|
||||||
hashes = [
|
hashes = [
|
||||||
"h1:VSnd9ZIPyfKHOObuQCaKfnjIHRtR7qTw19Rz8tJxm+k=",
|
"h1:R5Ucn26riKIEijcsiOMBR3uOAjuOMfI1x7XvH4P6B1w=",
|
||||||
"zh:04e3fbd610cb52c1017d282531364b9c53ef72b6bc533acb2a90671957324a64",
|
"zh:03360ed3ecd31e8c5dac9c95fe0858be50f3e9a0d0c654b5e504109c2159287d",
|
||||||
"zh:119197103301ebaf7efb91df8f0b6e0dd31e6ff943d231af35ee1831c599188d",
|
"zh:1c67ac51254ba2a2bb53a25e8ae7e4d076103483f55f39b426ec55e47d1fe211",
|
||||||
"zh:4d2b219d09abf3b1bb4df93d399ed156cadd61f44ad3baf5cf2954df2fba0831",
|
"zh:24a17bba7f6d679538ff51b3a2f378cedadede97af8a1db7dad4fd8d6d50f829",
|
||||||
"zh:6130bdde527587bbe2dcaa7150363e96dbc5250ea20154176d82bc69df5d4ce3",
|
"zh:30ffb297ffd1633175d6545d37c2217e2cef9545a6e03946e514c59c0859b77d",
|
||||||
"zh:6cc326cd4000f724d3086ee05587e7710f032f94fc9af35e96a386a1c6f2214f",
|
"zh:454ce4b3dbc73e6775f2f6605d45cee6e16c3872a2e66a2c97993d6e5cbd7055",
|
||||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||||
"zh:b6d88e1d28cf2dfa24e9fdcc3efc77adcdc1c3c3b5c7ce503a423efbdd6de57b",
|
"zh:91df0a9fab329aff2ff4cf26797592eb7a3a90b4a0c04d64ce186654e0cc6e17",
|
||||||
"zh:ba74c592622ecbcef9dc2a4d81ed321c4e44cddf7da799faa324da9bf52a22b2",
|
"zh:aa57384b85622a9f7bfb5d4512ca88e61f22a9cea9f30febaa4c98c68ff0dc21",
|
||||||
"zh:c7c5cde98fe4ef1143bd1b3ec5dc04baf0d4cc3ca2c5c7d40d17c0e9b2076865",
|
"zh:c4a3e329ba786ffb6f2b694e1fd41d413a7010f3a53c20b432325a94fa71e839",
|
||||||
"zh:dac4bad52c940cd0dfc27893507c1e92393846b024c5a9db159a93c534a3da03",
|
"zh:e2699bc9116447f96c53d55f2a00570f982e6f9935038c3810603572693712d0",
|
||||||
"zh:de8febe2a2acd9ac454b844a4106ed295ae9520ef54dc8ed2faf29f12716b602",
|
"zh:e747c0fd5d7684e5bfad8aa0ca441903f15ae7a98a737ff6aca24ba223207e2c",
|
||||||
"zh:eab0d0495e7e711cca367f7d4df6e322e6c562fc52151ec931176115b83ed014",
|
"zh:f1ca75f417ce490368f047b63ec09fd003711ae48487fba90b4aba2ccf71920e",
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
10
Makefile
10
Makefile
@ -87,6 +87,16 @@ apply:
|
|||||||
-auto-approve \
|
-auto-approve \
|
||||||
-var "nomad_secret_id=$(shell jq -r .SecretID nomad_bootstrap.json)" \
|
-var "nomad_secret_id=$(shell jq -r .SecretID nomad_bootstrap.json)" \
|
||||||
|
|
||||||
|
.PHONY: refresh
|
||||||
|
refresh:
|
||||||
|
@terraform refresh \
|
||||||
|
-var "nomad_secret_id=$(shell jq -r .SecretID nomad_bootstrap.json)" \
|
||||||
|
|
||||||
|
.PHONY: destroy
|
||||||
|
destroy:
|
||||||
|
@terraform destroy \
|
||||||
|
-var "nomad_secret_id=$(shell jq -r .SecretID nomad_bootstrap.json)" \
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean:
|
clean:
|
||||||
env VIRTUAL_ENV=$(VENV) $(VENV)/bin/ansible-playbook -vv \
|
env VIRTUAL_ENV=$(VENV) $(VENV)/bin/ansible-playbook -vv \
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
resource "nomad_acl_policy" "anon_policy" {
|
resource "nomad_acl_policy" "anon_policy" {
|
||||||
name = "anonymous"
|
name = "anonymous"
|
||||||
description = "Anon RO"
|
description = "Anon read only"
|
||||||
rules_hcl = file("${path.module}/nomad-anon-policy.hcl")
|
rules_hcl = file("${path.module}/nomad-anon-policy.hcl")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,62 +1,72 @@
|
|||||||
---
|
---
|
||||||
all:
|
all:
|
||||||
children:
|
hosts:
|
||||||
servers:
|
n1.thefij:
|
||||||
hosts:
|
nomad_node_class: ingress
|
||||||
n1.thefij:
|
nomad_reserved_memory: 1024
|
||||||
nomad_node_role: both
|
# nomad_meta:
|
||||||
# nomad_meta:
|
# hw_transcode.device: /dev/dri
|
||||||
# hw_transcode.device: /dev/dri
|
# hw_transcode.type: intel
|
||||||
# hw_transcode.type: intel
|
nfs_mounts:
|
||||||
nfs_mounts:
|
- src: 10.50.250.2:/srv/volumes
|
||||||
- src: 10.50.250.2:/srv/volumes
|
path: /srv/volumes/moxy
|
||||||
path: /srv/volumes/moxy
|
opts: proto=tcp,rw
|
||||||
opts: proto=tcp,rw
|
nomad_unique_host_volumes:
|
||||||
nomad_unique_host_volumes:
|
- name: mysql-data
|
||||||
- name: mysql-data
|
path: /srv/volumes/mysql
|
||||||
path: /srv/volumes/mysql
|
owner: "999"
|
||||||
owner: "999"
|
group: "100"
|
||||||
group: "100"
|
mode: "0755"
|
||||||
mode: "0755"
|
read_only: false
|
||||||
read_only: false
|
- name: postgres-data
|
||||||
- name: postgres-data
|
path: /srv/volumes/postgres
|
||||||
path: /srv/volumes/postgres
|
owner: "999"
|
||||||
owner: "999"
|
group: "999"
|
||||||
group: "999"
|
mode: "0755"
|
||||||
mode: "0755"
|
read_only: false
|
||||||
read_only: false
|
# n2.thefij:
|
||||||
n2.thefij:
|
# nomad_node_class: ingress
|
||||||
nfs_mounts:
|
# nomad_reserved_memory: 1024
|
||||||
- src: 10.50.250.2:/srv/volumes
|
# nfs_mounts:
|
||||||
path: /srv/volumes/moxy
|
# - src: 10.50.250.2:/srv/volumes
|
||||||
opts: proto=tcp,rw
|
# path: /srv/volumes/moxy
|
||||||
nomad_node_class: ingress
|
# opts: proto=tcp,rw
|
||||||
nomad_node_role: both
|
# nomad_unique_host_volumes:
|
||||||
nomad_unique_host_volumes:
|
# - name: nextcloud-data
|
||||||
- name: nextcloud-data
|
# path: /srv/volumes/nextcloud
|
||||||
path: /srv/volumes/nextcloud
|
# owner: "root"
|
||||||
owner: "root"
|
# group: "bin"
|
||||||
group: "bin"
|
# mode: "0755"
|
||||||
mode: "0755"
|
# read_only: false
|
||||||
read_only: false
|
pi4:
|
||||||
- name: gitea-data
|
nomad_node_class: ingress
|
||||||
path: /srv/volumes/gitea
|
nomad_reserved_memory: 512
|
||||||
owner: "root"
|
nomad_meta:
|
||||||
group: "bin"
|
hw_transcode.device: /dev/video11
|
||||||
mode: "0755"
|
hw_transcode.type: raspberry
|
||||||
read_only: false
|
qnomad.thefij:
|
||||||
- name: sonarr-data
|
ansible_host: 192.168.2.234
|
||||||
path: /srv/volumes/sonarr
|
nomad_reserved_memory: 1024
|
||||||
owner: "root"
|
# This VM uses a non-standard interface
|
||||||
group: "bin"
|
nomad_network_interface: ens3
|
||||||
mode: "0755"
|
|
||||||
read_only: false
|
|
||||||
pi4:
|
|
||||||
nomad_node_role: both
|
|
||||||
nomad_meta:
|
|
||||||
hw_transcode.device: /dev/video11
|
|
||||||
hw_transcode.type: raspberry
|
|
||||||
|
|
||||||
nomad_instances:
|
nomad_instances:
|
||||||
children:
|
vars:
|
||||||
servers: {}
|
nomad_network_interface: eth0
|
||||||
|
children:
|
||||||
|
nomad_servers: {}
|
||||||
|
nomad_clients: {}
|
||||||
|
nomad_servers:
|
||||||
|
hosts:
|
||||||
|
nonopi.thefij:
|
||||||
|
ansible_host: 192.168.2.170
|
||||||
|
n1.thefij: {}
|
||||||
|
# n2.thefij: {}
|
||||||
|
pi4: {}
|
||||||
|
# qnomad.thefij: {}
|
||||||
|
nomad_clients:
|
||||||
|
hosts:
|
||||||
|
n1.thefij: {}
|
||||||
|
# n2.thefij: {}
|
||||||
|
pi4: {}
|
||||||
|
# qnomad.thefij: {}
|
||||||
|
@ -14,8 +14,14 @@
|
|||||||
state: restarted
|
state: restarted
|
||||||
become: true
|
become: true
|
||||||
|
|
||||||
|
- name: Start Docker
|
||||||
|
systemd:
|
||||||
|
name: docker
|
||||||
|
state: started
|
||||||
|
become: true
|
||||||
|
|
||||||
- name: Start Nomad
|
- name: Start Nomad
|
||||||
systemd:
|
systemd:
|
||||||
name: nomad
|
name: nomad
|
||||||
state: stopped
|
state: started
|
||||||
become: true
|
become: true
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
- name: Recover Nomad
|
- name: Recover Nomad
|
||||||
hosts: nomad_instances
|
hosts: nomad_servers
|
||||||
any_errors_fatal: true
|
any_errors_fatal: true
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
line: "nameserver {{ non_nomad_dns }}"
|
line: "nameserver {{ non_nomad_dns }}"
|
||||||
|
|
||||||
- name: Install Docker
|
- name: Install Docker
|
||||||
hosts: nomad_instances
|
hosts: nomad_clients
|
||||||
become: true
|
become: true
|
||||||
vars:
|
vars:
|
||||||
docker_architecture_map:
|
docker_architecture_map:
|
||||||
@ -44,7 +44,7 @@
|
|||||||
# state: present
|
# state: present
|
||||||
|
|
||||||
- name: Create NFS mounts
|
- name: Create NFS mounts
|
||||||
hosts: nomad_instances
|
hosts: nomad_clients
|
||||||
become: true
|
become: true
|
||||||
vars:
|
vars:
|
||||||
shared_nfs_mounts:
|
shared_nfs_mounts:
|
||||||
@ -56,6 +56,10 @@
|
|||||||
path: /srv/volumes/media-write
|
path: /srv/volumes/media-write
|
||||||
opts: proto=tcp,port=2049,rw
|
opts: proto=tcp,port=2049,rw
|
||||||
|
|
||||||
|
- src: 192.168.2.10:/Overflow
|
||||||
|
path: /srv/volumes/nas-overflow
|
||||||
|
opts: proto=tcp,port=2049,rw
|
||||||
|
|
||||||
- src: 192.168.2.10:/Photos
|
- src: 192.168.2.10:/Photos
|
||||||
path: /srv/volumes/photos
|
path: /srv/volumes/photos
|
||||||
opts: proto=tcp,port=2049,rw
|
opts: proto=tcp,port=2049,rw
|
||||||
@ -97,6 +101,12 @@
|
|||||||
group: "root"
|
group: "root"
|
||||||
mode: "0755"
|
mode: "0755"
|
||||||
read_only: false
|
read_only: false
|
||||||
|
- name: media-overflow-write
|
||||||
|
path: /srv/volumes/nas-overflow/Media
|
||||||
|
owner: "root"
|
||||||
|
group: "root"
|
||||||
|
mode: "0755"
|
||||||
|
read_only: false
|
||||||
- name: media-downloads
|
- name: media-downloads
|
||||||
path: /srv/volumes/media-write/Downloads
|
path: /srv/volumes/media-write/Downloads
|
||||||
read_only: false
|
read_only: false
|
||||||
@ -112,12 +122,27 @@
|
|||||||
- name: nzbget-config
|
- name: nzbget-config
|
||||||
path: /srv/volumes/nas-container/nzbget
|
path: /srv/volumes/nas-container/nzbget
|
||||||
read_only: false
|
read_only: false
|
||||||
|
- name: sonarr-config
|
||||||
|
path: /srv/volumes/nas-container/sonarr
|
||||||
|
read_only: false
|
||||||
- name: lidarr-config
|
- name: lidarr-config
|
||||||
path: /srv/volumes/nas-container/lidarr
|
path: /srv/volumes/nas-container/lidarr
|
||||||
read_only: false
|
read_only: false
|
||||||
|
- name: radarr-config
|
||||||
|
path: /srv/volumes/nas-container/radarr
|
||||||
|
read_only: false
|
||||||
- name: bazarr-config
|
- name: bazarr-config
|
||||||
path: /srv/volumes/nas-container/bazarr
|
path: /srv/volumes/nas-container/bazarr
|
||||||
read_only: false
|
read_only: false
|
||||||
|
- name: gitea-data
|
||||||
|
path: /srv/volumes/nas-container/gitea
|
||||||
|
read_only: false
|
||||||
|
- name: ytdl-web
|
||||||
|
path: /srv/volumes/nas-container/ytdl-web
|
||||||
|
read_only: false
|
||||||
|
- name: christmas-community
|
||||||
|
path: /srv/volumes/nas-container/christmas-community
|
||||||
|
read_only: false
|
||||||
- name: all-volumes
|
- name: all-volumes
|
||||||
path: /srv/volumes
|
path: /srv/volumes
|
||||||
owner: "root"
|
owner: "root"
|
||||||
@ -128,9 +153,10 @@
|
|||||||
roles:
|
roles:
|
||||||
- name: ansible-nomad
|
- name: ansible-nomad
|
||||||
vars:
|
vars:
|
||||||
nomad_version: "1.6.1-1"
|
nomad_version: "1.9.3-1"
|
||||||
nomad_install_upgrade: true
|
nomad_install_upgrade: true
|
||||||
nomad_allow_purge_config: true
|
nomad_allow_purge_config: true
|
||||||
|
nomad_node_role: "{% if 'nomad_clients' in group_names %}{% if 'nomad_servers' in group_names %}both{% else %}client{% endif %}{% else %}server{% endif %}"
|
||||||
|
|
||||||
# Where nomad gets installed to
|
# Where nomad gets installed to
|
||||||
nomad_bin_dir: /usr/bin
|
nomad_bin_dir: /usr/bin
|
||||||
@ -184,7 +210,8 @@
|
|||||||
nomad_bind_address: 0.0.0.0
|
nomad_bind_address: 0.0.0.0
|
||||||
|
|
||||||
# Default interface for binding tasks
|
# Default interface for binding tasks
|
||||||
nomad_network_interface: eth0
|
# This is now set at the inventory level
|
||||||
|
# nomad_network_interface: eth0
|
||||||
|
|
||||||
# Create networks for binding task ports
|
# Create networks for binding task ports
|
||||||
nomad_host_networks:
|
nomad_host_networks:
|
||||||
@ -203,7 +230,7 @@
|
|||||||
enabled: true
|
enabled: true
|
||||||
|
|
||||||
- name: Bootstrap Nomad ACLs and scheduler
|
- name: Bootstrap Nomad ACLs and scheduler
|
||||||
hosts: nomad_instances
|
hosts: nomad_servers
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- name: Start Nomad
|
- name: Start Nomad
|
||||||
@ -233,6 +260,7 @@
|
|||||||
run_once: true
|
run_once: true
|
||||||
ignore_errors: true
|
ignore_errors: true
|
||||||
register: bootstrap_result
|
register: bootstrap_result
|
||||||
|
changed_when: bootstrap_result is succeeded
|
||||||
|
|
||||||
- name: Save bootstrap result
|
- name: Save bootstrap result
|
||||||
copy:
|
copy:
|
||||||
@ -264,13 +292,15 @@
|
|||||||
- list
|
- list
|
||||||
environment:
|
environment:
|
||||||
NOMAD_TOKEN: "{{ read_secretid.stdout }}"
|
NOMAD_TOKEN: "{{ read_secretid.stdout }}"
|
||||||
run_once: true
|
|
||||||
register: policies
|
register: policies
|
||||||
|
run_once: true
|
||||||
|
changed_when: false
|
||||||
|
|
||||||
- name: Copy policy
|
- name: Copy policy
|
||||||
copy:
|
copy:
|
||||||
src: ../acls/nomad-anon-policy.hcl
|
src: ../acls/nomad-anon-policy.hcl
|
||||||
dest: /tmp/anonymous.policy.hcl
|
dest: /tmp/anonymous.policy.hcl
|
||||||
|
delegate_to: "{{ play_hosts[0] }}"
|
||||||
run_once: true
|
run_once: true
|
||||||
register: anon_policy
|
register: anon_policy
|
||||||
|
|
||||||
@ -281,7 +311,7 @@
|
|||||||
- acl
|
- acl
|
||||||
- policy
|
- policy
|
||||||
- apply
|
- apply
|
||||||
- -description="Anon read only"
|
- -description=Anon read only
|
||||||
- anonymous
|
- anonymous
|
||||||
- /tmp/anonymous.policy.hcl
|
- /tmp/anonymous.policy.hcl
|
||||||
environment:
|
environment:
|
||||||
@ -290,6 +320,18 @@
|
|||||||
delegate_to: "{{ play_hosts[0] }}"
|
delegate_to: "{{ play_hosts[0] }}"
|
||||||
run_once: true
|
run_once: true
|
||||||
|
|
||||||
|
- name: Read scheduler config
|
||||||
|
command:
|
||||||
|
argv:
|
||||||
|
- nomad
|
||||||
|
- operator
|
||||||
|
- scheduler
|
||||||
|
- get-config
|
||||||
|
- -json
|
||||||
|
run_once: true
|
||||||
|
register: scheduler_config
|
||||||
|
changed_when: false
|
||||||
|
|
||||||
- name: Enable service scheduler preemption
|
- name: Enable service scheduler preemption
|
||||||
command:
|
command:
|
||||||
argv:
|
argv:
|
||||||
@ -297,12 +339,24 @@
|
|||||||
- operator
|
- operator
|
||||||
- scheduler
|
- scheduler
|
||||||
- set-config
|
- set-config
|
||||||
- -preempt-system-scheduler=true
|
|
||||||
- -preempt-service-scheduler=true
|
- -preempt-service-scheduler=true
|
||||||
environment:
|
environment:
|
||||||
NOMAD_TOKEN: "{{ read_secretid.stdout }}"
|
NOMAD_TOKEN: "{{ read_secretid.stdout }}"
|
||||||
delegate_to: "{{ play_hosts[0] }}"
|
|
||||||
run_once: true
|
run_once: true
|
||||||
|
when: (scheduler_config.stdout | from_json)["SchedulerConfig"]["PreemptionConfig"]["ServiceSchedulerEnabled"] is false
|
||||||
|
|
||||||
|
- name: Enable system scheduler preemption
|
||||||
|
command:
|
||||||
|
argv:
|
||||||
|
- nomad
|
||||||
|
- operator
|
||||||
|
- scheduler
|
||||||
|
- set-config
|
||||||
|
- -preempt-system-scheduler=true
|
||||||
|
environment:
|
||||||
|
NOMAD_TOKEN: "{{ read_secretid.stdout }}"
|
||||||
|
run_once: true
|
||||||
|
when: (scheduler_config.stdout | from_json)["SchedulerConfig"]["PreemptionConfig"]["SystemSchedulerEnabled"] is false
|
||||||
|
|
||||||
# - name: Set up Nomad backend and roles in Vault
|
# - name: Set up Nomad backend and roles in Vault
|
||||||
# community.general.terraform:
|
# community.general.terraform:
|
||||||
|
@ -9,8 +9,6 @@ nomad/jobs/authelia:
|
|||||||
db_user: VALUE
|
db_user: VALUE
|
||||||
email_sender: VALUE
|
email_sender: VALUE
|
||||||
jwt_secret: VALUE
|
jwt_secret: VALUE
|
||||||
lldap_admin_password: VALUE
|
|
||||||
lldap_admin_user: VALUE
|
|
||||||
oidc_clients: VALUE
|
oidc_clients: VALUE
|
||||||
oidc_hmac_secret: VALUE
|
oidc_hmac_secret: VALUE
|
||||||
oidc_issuer_certificate_chain: VALUE
|
oidc_issuer_certificate_chain: VALUE
|
||||||
@ -24,21 +22,29 @@ nomad/jobs/backup:
|
|||||||
nas_ftp_host: VALUE
|
nas_ftp_host: VALUE
|
||||||
nas_ftp_pass: VALUE
|
nas_ftp_pass: VALUE
|
||||||
nas_ftp_user: VALUE
|
nas_ftp_user: VALUE
|
||||||
|
nas_minio_access_key_id: VALUE
|
||||||
|
nas_minio_secret_access_key: VALUE
|
||||||
nomad/jobs/backup-oneoff-n1:
|
nomad/jobs/backup-oneoff-n1:
|
||||||
backup_passphrase: VALUE
|
backup_passphrase: VALUE
|
||||||
nas_ftp_host: VALUE
|
nas_ftp_host: VALUE
|
||||||
nas_ftp_pass: VALUE
|
nas_ftp_pass: VALUE
|
||||||
nas_ftp_user: VALUE
|
nas_ftp_user: VALUE
|
||||||
|
nas_minio_access_key_id: VALUE
|
||||||
|
nas_minio_secret_access_key: VALUE
|
||||||
nomad/jobs/backup-oneoff-n2:
|
nomad/jobs/backup-oneoff-n2:
|
||||||
backup_passphrase: VALUE
|
backup_passphrase: VALUE
|
||||||
nas_ftp_host: VALUE
|
nas_ftp_host: VALUE
|
||||||
nas_ftp_pass: VALUE
|
nas_ftp_pass: VALUE
|
||||||
nas_ftp_user: VALUE
|
nas_ftp_user: VALUE
|
||||||
|
nas_minio_access_key_id: VALUE
|
||||||
|
nas_minio_secret_access_key: VALUE
|
||||||
nomad/jobs/backup-oneoff-pi4:
|
nomad/jobs/backup-oneoff-pi4:
|
||||||
backup_passphrase: VALUE
|
backup_passphrase: VALUE
|
||||||
nas_ftp_host: VALUE
|
nas_ftp_host: VALUE
|
||||||
nas_ftp_pass: VALUE
|
nas_ftp_pass: VALUE
|
||||||
nas_ftp_user: VALUE
|
nas_ftp_user: VALUE
|
||||||
|
nas_minio_access_key_id: VALUE
|
||||||
|
nas_minio_secret_access_key: VALUE
|
||||||
nomad/jobs/bazarr:
|
nomad/jobs/bazarr:
|
||||||
db_name: VALUE
|
db_name: VALUE
|
||||||
db_pass: VALUE
|
db_pass: VALUE
|
||||||
@ -61,6 +67,7 @@ nomad/jobs/git:
|
|||||||
db_name: VALUE
|
db_name: VALUE
|
||||||
db_pass: VALUE
|
db_pass: VALUE
|
||||||
db_user: VALUE
|
db_user: VALUE
|
||||||
|
oidc_secret: VALUE
|
||||||
secret_key: VALUE
|
secret_key: VALUE
|
||||||
smtp_sender: VALUE
|
smtp_sender: VALUE
|
||||||
nomad/jobs/grafana:
|
nomad/jobs/grafana:
|
||||||
@ -83,17 +90,15 @@ nomad/jobs/immich:
|
|||||||
db_name: VALUE
|
db_name: VALUE
|
||||||
db_pass: VALUE
|
db_pass: VALUE
|
||||||
db_user: VALUE
|
db_user: VALUE
|
||||||
nomad/jobs/ipdvr/radarr:
|
nomad/jobs/lego:
|
||||||
db_pass: VALUE
|
acme_email: VALUE
|
||||||
db_user: VALUE
|
domain_lego_dns: VALUE
|
||||||
|
usersfile: VALUE
|
||||||
nomad/jobs/lidarr:
|
nomad/jobs/lidarr:
|
||||||
db_name: VALUE
|
db_name: VALUE
|
||||||
db_pass: VALUE
|
db_pass: VALUE
|
||||||
db_user: VALUE
|
db_user: VALUE
|
||||||
nomad/jobs/lldap:
|
nomad/jobs/lldap:
|
||||||
admin_email: VALUE
|
|
||||||
admin_password: VALUE
|
|
||||||
admin_user: VALUE
|
|
||||||
db_name: VALUE
|
db_name: VALUE
|
||||||
db_pass: VALUE
|
db_pass: VALUE
|
||||||
db_user: VALUE
|
db_user: VALUE
|
||||||
@ -111,21 +116,38 @@ nomad/jobs/photoprism:
|
|||||||
db_name: VALUE
|
db_name: VALUE
|
||||||
db_pass: VALUE
|
db_pass: VALUE
|
||||||
db_user: VALUE
|
db_user: VALUE
|
||||||
|
oidc_secret: VALUE
|
||||||
nomad/jobs/postgres-server:
|
nomad/jobs/postgres-server:
|
||||||
superuser: VALUE
|
superuser: VALUE
|
||||||
superuser_pass: VALUE
|
superuser_pass: VALUE
|
||||||
|
nomad/jobs/radarr:
|
||||||
|
db_name: VALUE
|
||||||
|
db_pass: VALUE
|
||||||
|
db_user: VALUE
|
||||||
nomad/jobs/redis-authelia:
|
nomad/jobs/redis-authelia:
|
||||||
allowed_psks: VALUE
|
allowed_psks: VALUE
|
||||||
nomad/jobs/redis-blocky:
|
nomad/jobs/redis-blocky:
|
||||||
allowed_psks: VALUE
|
allowed_psks: VALUE
|
||||||
nomad/jobs/rediscommander:
|
nomad/jobs/rediscommander:
|
||||||
redis_stunnel_psk: VALUE
|
redis_stunnel_psk: VALUE
|
||||||
|
nomad/jobs/sonarr:
|
||||||
|
db_name: VALUE
|
||||||
|
db_pass: VALUE
|
||||||
|
db_user: VALUE
|
||||||
nomad/jobs/traefik:
|
nomad/jobs/traefik:
|
||||||
acme_email: VALUE
|
external: VALUE
|
||||||
domain_lego_dns: VALUE
|
|
||||||
usersfile: VALUE
|
usersfile: VALUE
|
||||||
|
nomad/jobs/unifi-traffic-route-ips:
|
||||||
|
unifi_password: VALUE
|
||||||
|
unifi_username: VALUE
|
||||||
|
nomad/jobs/wishlist:
|
||||||
|
guest_password: VALUE
|
||||||
nomad/oidc:
|
nomad/oidc:
|
||||||
secret: VALUE
|
secret: VALUE
|
||||||
|
secrets/ldap:
|
||||||
|
admin_email: VALUE
|
||||||
|
admin_password: VALUE
|
||||||
|
admin_user: VALUE
|
||||||
secrets/mysql:
|
secrets/mysql:
|
||||||
mysql_root_password: VALUE
|
mysql_root_password: VALUE
|
||||||
secrets/postgres:
|
secrets/postgres:
|
||||||
|
@ -44,6 +44,11 @@ job "backup%{ if batch_node != null }-oneoff-${batch_node}%{ endif }" {
|
|||||||
source = "all-volumes"
|
source = "all-volumes"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ephemeral_disk {
|
||||||
|
# Try to keep restic cache intact
|
||||||
|
sticky = true
|
||||||
|
}
|
||||||
|
|
||||||
service {
|
service {
|
||||||
name = "backup"
|
name = "backup"
|
||||||
provider = "nomad"
|
provider = "nomad"
|
||||||
@ -57,6 +62,8 @@ job "backup%{ if batch_node != null }-oneoff-${batch_node}%{ endif }" {
|
|||||||
task "backup" {
|
task "backup" {
|
||||||
driver = "docker"
|
driver = "docker"
|
||||||
|
|
||||||
|
shutdown_delay = "5m"
|
||||||
|
|
||||||
volume_mount {
|
volume_mount {
|
||||||
volume = "all-volumes"
|
volume = "all-volumes"
|
||||||
destination = "/data"
|
destination = "/data"
|
||||||
@ -64,26 +71,53 @@ job "backup%{ if batch_node != null }-oneoff-${batch_node}%{ endif }" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "iamthefij/resticscheduler:0.2.0"
|
image = "iamthefij/restic-scheduler:0.4.2"
|
||||||
ports = ["metrics"]
|
ports = ["metrics"]
|
||||||
args = [
|
args = [
|
||||||
|
"--push-gateway",
|
||||||
|
"http://pushgateway.nomad:9091",
|
||||||
%{ if batch_node != null ~}
|
%{ if batch_node != null ~}
|
||||||
"-once",
|
"-once",
|
||||||
"-$${NOMAD_META_task}",
|
"-$${NOMAD_META_task}",
|
||||||
"$${NOMAD_META_job_name}",
|
"$${NOMAD_META_job_name}",
|
||||||
"--snapshot",
|
"--snapshot",
|
||||||
"$${NOMAD_META_snapshot}",
|
"$${NOMAD_META_snapshot}",
|
||||||
"--push-gateway",
|
|
||||||
"http://pushgateway.nomad:9091",
|
|
||||||
%{ endif ~}
|
%{ endif ~}
|
||||||
"$${NOMAD_TASK_DIR}/node-jobs.hcl",
|
"$${NOMAD_TASK_DIR}/node-jobs.hcl",
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
action "unlockenv" {
|
||||||
|
command = "sh"
|
||||||
|
args = ["-c", "/bin/restic-scheduler -once -unlock all $${NOMAD_TASK_DIR}/node-jobs.hcl"]
|
||||||
|
}
|
||||||
|
|
||||||
|
action "unlocktmpl" {
|
||||||
|
command = "/bin/restic-scheduler"
|
||||||
|
args = ["-once", "-unlock", "all", "{{ env 'NOMAD_TASK_DIR' }}/node-jobs.hcl"]
|
||||||
|
}
|
||||||
|
|
||||||
|
action "unlockhc" {
|
||||||
|
command = "/bin/restic-scheduler"
|
||||||
|
args = ["-once", "-unlock", "all", "/local/node-jobs.hcl"]
|
||||||
|
}
|
||||||
|
|
||||||
|
action "backupall" {
|
||||||
|
command = "/bin/restic-scheduler"
|
||||||
|
args = ["-once", "-backup", "all", "/local/node-jobs.hcl"]
|
||||||
|
}
|
||||||
|
|
||||||
|
action "backupallenv" {
|
||||||
|
command = "sh"
|
||||||
|
args = ["-c", "/bin/restic-scheduler -once -backup all $${NOMAD_TASK_DIR}/node-jobs.hcl"]
|
||||||
|
}
|
||||||
|
|
||||||
env = {
|
env = {
|
||||||
"RCLONE_CHECKERS" = "2"
|
RCLONE_CHECKERS = "2"
|
||||||
"RCLONE_TRANSFERS" = "2"
|
RCLONE_TRANSFERS = "2"
|
||||||
"RCLONE_FTP_CONCURRENCY" = "5"
|
RCLONE_FTP_CONCURRENCY = "5"
|
||||||
|
RESTIC_CACHE_DIR = "$${NOMAD_ALLOC_DIR}/data"
|
||||||
|
TZ = "America/Los_Angeles"
|
||||||
}
|
}
|
||||||
|
|
||||||
template {
|
template {
|
||||||
@ -107,13 +141,14 @@ RCLONE_FTP_USER={{ .nas_ftp_user }}
|
|||||||
RCLONE_FTP_PASS={{ .nas_ftp_pass.Value | toJSON }}
|
RCLONE_FTP_PASS={{ .nas_ftp_pass.Value | toJSON }}
|
||||||
RCLONE_FTP_EXPLICIT_TLS=true
|
RCLONE_FTP_EXPLICIT_TLS=true
|
||||||
RCLONE_FTP_NO_CHECK_CERTIFICATE=true
|
RCLONE_FTP_NO_CHECK_CERTIFICATE=true
|
||||||
|
AWS_ACCESS_KEY_ID={{ .nas_minio_access_key_id }}
|
||||||
|
AWS_SECRET_ACCESS_KEY={{ .nas_minio_secret_access_key }}
|
||||||
{{ end -}}
|
{{ end -}}
|
||||||
EOF
|
EOF
|
||||||
destination = "secrets/db.env"
|
destination = "secrets/db.env"
|
||||||
env = true
|
env = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
template {
|
template {
|
||||||
# Build jobs based on node
|
# Build jobs based on node
|
||||||
data = <<EOF
|
data = <<EOF
|
||||||
@ -128,13 +163,31 @@ ${file("${module_path}/${job_file}")}
|
|||||||
{{ end -}}
|
{{ end -}}
|
||||||
{{ end -}}
|
{{ end -}}
|
||||||
%{ endfor ~}
|
%{ endfor ~}
|
||||||
|
|
||||||
|
# Dummy job to keep task healthy on node without any stateful services
|
||||||
|
job "Dummy" {
|
||||||
|
schedule = "@daily"
|
||||||
|
|
||||||
|
config {
|
||||||
|
repo = "/local/dummy-repo"
|
||||||
|
passphrase = env("BACKUP_PASSPHRASE")
|
||||||
|
}
|
||||||
|
|
||||||
|
backup {
|
||||||
|
paths = ["/local/node-jobs.hcl"]
|
||||||
|
}
|
||||||
|
|
||||||
|
forget {
|
||||||
|
KeepLast = 1
|
||||||
|
}
|
||||||
|
}
|
||||||
EOF
|
EOF
|
||||||
destination = "local/node-jobs.hcl"
|
destination = "local/node-jobs.hcl"
|
||||||
}
|
}
|
||||||
|
|
||||||
resources {
|
resources {
|
||||||
cpu = 50
|
cpu = 50
|
||||||
memory = 256
|
memory = 500
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -147,8 +200,8 @@ ${file("${module_path}/${job_file}")}
|
|||||||
}
|
}
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "alpine:3.17"
|
image = "iamthefij/stunnel:1.0.0"
|
||||||
args = ["/bin/sh", "$${NOMAD_TASK_DIR}/start.sh"]
|
args = ["$${NOMAD_TASK_DIR}/stunnel.conf"]
|
||||||
}
|
}
|
||||||
|
|
||||||
resources {
|
resources {
|
||||||
@ -156,15 +209,6 @@ ${file("${module_path}/${job_file}")}
|
|||||||
memory = 100
|
memory = 100
|
||||||
}
|
}
|
||||||
|
|
||||||
template {
|
|
||||||
data = <<EOF
|
|
||||||
set -e
|
|
||||||
apk add stunnel
|
|
||||||
exec stunnel {{ env "NOMAD_TASK_DIR" }}/stunnel.conf
|
|
||||||
EOF
|
|
||||||
destination = "$${NOMAD_TASK_DIR}/start.sh"
|
|
||||||
}
|
|
||||||
|
|
||||||
template {
|
template {
|
||||||
data = <<EOF
|
data = <<EOF
|
||||||
syslog = no
|
syslog = no
|
||||||
|
@ -8,7 +8,7 @@ resource "nomad_job" "backup" {
|
|||||||
|
|
||||||
resource "nomad_job" "backup-oneoff" {
|
resource "nomad_job" "backup-oneoff" {
|
||||||
# TODO: Get list of nomad hosts dynamically
|
# TODO: Get list of nomad hosts dynamically
|
||||||
for_each = toset(["n1", "n2", "pi4"])
|
for_each = toset(["n1", "pi4"])
|
||||||
# for_each = toset([
|
# for_each = toset([
|
||||||
# for node in data.consul_service.nomad.service :
|
# for node in data.consul_service.nomad.service :
|
||||||
# node.node_name
|
# node.node_name
|
||||||
@ -22,7 +22,9 @@ resource "nomad_job" "backup-oneoff" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
locals {
|
locals {
|
||||||
all_job_ids = toset(flatten([[for job in resource.nomad_job.backup-oneoff : job.id], [resource.nomad_job.backup.id]]))
|
# NOTE: This can't be dynamic in first deploy since these values are not known
|
||||||
|
# all_job_ids = toset(flatten([[for job in resource.nomad_job.backup-oneoff : job.id], [resource.nomad_job.backup.id]]))
|
||||||
|
all_job_ids = toset(["backup", "backup-oneoff-n1", "backup-oneoff-pi4"])
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "nomad_acl_policy" "secrets_mysql" {
|
resource "nomad_acl_policy" "secrets_mysql" {
|
||||||
|
@ -2,8 +2,12 @@ job "authelia" {
|
|||||||
schedule = "@daily"
|
schedule = "@daily"
|
||||||
|
|
||||||
config {
|
config {
|
||||||
repo = "rclone::ftp,env_auth:/nomad/authelia"
|
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/authelia"
|
||||||
passphrase = env("BACKUP_PASSPHRASE")
|
passphrase = env("BACKUP_PASSPHRASE")
|
||||||
|
|
||||||
|
options {
|
||||||
|
InsecureTls = true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
task "Create local authelia dir" {
|
task "Create local authelia dir" {
|
||||||
|
57
backups/jobs/git.hcl
Normal file
57
backups/jobs/git.hcl
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
job "git" {
|
||||||
|
schedule = "@daily"
|
||||||
|
|
||||||
|
config {
|
||||||
|
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/gitea"
|
||||||
|
passphrase = env("BACKUP_PASSPHRASE")
|
||||||
|
|
||||||
|
options {
|
||||||
|
InsecureTls = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
task "Create local gitea dir" {
|
||||||
|
pre_script {
|
||||||
|
on_backup = "mkdir -p /local/gitea"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
task "Backup database" {
|
||||||
|
mysql "Backup database" {
|
||||||
|
hostname = env("MYSQL_HOST")
|
||||||
|
port = env("MYSQL_PORT")
|
||||||
|
database = "gitea"
|
||||||
|
username = env("MYSQL_USER")
|
||||||
|
password = env("MYSQL_PASSWORD")
|
||||||
|
no_tablespaces = true
|
||||||
|
dump_to = "/local/gitea/dump.sql"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
backup {
|
||||||
|
paths = [
|
||||||
|
"/local/gitea",
|
||||||
|
"/data/nas-container/gitea",
|
||||||
|
]
|
||||||
|
|
||||||
|
backup_opts {
|
||||||
|
Host = "nomad"
|
||||||
|
}
|
||||||
|
|
||||||
|
restore_opts {
|
||||||
|
Host = ["nomad"]
|
||||||
|
# Because path is absolute
|
||||||
|
Target = "/"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
forget {
|
||||||
|
KeepLast = 2
|
||||||
|
KeepHourly = 24
|
||||||
|
KeepDaily = 30
|
||||||
|
KeepWeekly = 8
|
||||||
|
KeepMonthly = 6
|
||||||
|
KeepYearly = 2
|
||||||
|
Prune = true
|
||||||
|
}
|
||||||
|
}
|
@ -2,8 +2,12 @@ job "grafana" {
|
|||||||
schedule = "@daily"
|
schedule = "@daily"
|
||||||
|
|
||||||
config {
|
config {
|
||||||
repo = "rclone::ftp,env_auth:/nomad/grafana"
|
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/grafana"
|
||||||
passphrase = env("BACKUP_PASSPHRASE")
|
passphrase = env("BACKUP_PASSPHRASE")
|
||||||
|
|
||||||
|
options {
|
||||||
|
InsecureTls = true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
task "Create local grafana dir" {
|
task "Create local grafana dir" {
|
||||||
|
@ -2,8 +2,12 @@ job "lidarr" {
|
|||||||
schedule = "@daily"
|
schedule = "@daily"
|
||||||
|
|
||||||
config {
|
config {
|
||||||
repo = "rclone::ftp,env_auth:/nomad/lidarr"
|
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/lidarr"
|
||||||
passphrase = env("BACKUP_PASSPHRASE")
|
passphrase = env("BACKUP_PASSPHRASE")
|
||||||
|
|
||||||
|
options {
|
||||||
|
InsecureTls = true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
task "Backup main database" {
|
task "Backup main database" {
|
||||||
@ -34,7 +38,11 @@ job "lidarr" {
|
|||||||
paths = ["/data/nas-container/lidarr"]
|
paths = ["/data/nas-container/lidarr"]
|
||||||
|
|
||||||
backup_opts {
|
backup_opts {
|
||||||
Exclude = ["lidarr_backup_*.zip"]
|
Exclude = [
|
||||||
|
"lidarr_backup_*.zip",
|
||||||
|
"/data/nas-container/lidarr/MediaCover",
|
||||||
|
"/data/nas-container/lidarr/logs",
|
||||||
|
]
|
||||||
Host = "nomad"
|
Host = "nomad"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,8 +2,12 @@ job "lldap" {
|
|||||||
schedule = "@daily"
|
schedule = "@daily"
|
||||||
|
|
||||||
config {
|
config {
|
||||||
repo = "rclone::ftp,env_auth:/nomad/lldap"
|
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/lldap"
|
||||||
passphrase = env("BACKUP_PASSPHRASE")
|
passphrase = env("BACKUP_PASSPHRASE")
|
||||||
|
|
||||||
|
options {
|
||||||
|
InsecureTls = true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
task "Create local backup dir" {
|
task "Create local backup dir" {
|
||||||
|
@ -2,8 +2,12 @@ job "nzbget" {
|
|||||||
schedule = "@daily"
|
schedule = "@daily"
|
||||||
|
|
||||||
config {
|
config {
|
||||||
repo = "rclone::ftp,env_auth:/nomad/nzbget"
|
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/nzbget"
|
||||||
passphrase = env("BACKUP_PASSPHRASE")
|
passphrase = env("BACKUP_PASSPHRASE")
|
||||||
|
|
||||||
|
options {
|
||||||
|
InsecureTls = true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
backup {
|
backup {
|
||||||
|
@ -2,8 +2,12 @@ job "photoprism" {
|
|||||||
schedule = "10 * * * *"
|
schedule = "10 * * * *"
|
||||||
|
|
||||||
config {
|
config {
|
||||||
repo = "rclone::ftp,env_auth:/nomad/photoprism"
|
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/photoprism"
|
||||||
passphrase = env("BACKUP_PASSPHRASE")
|
passphrase = env("BACKUP_PASSPHRASE")
|
||||||
|
|
||||||
|
options {
|
||||||
|
InsecureTls = true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
task "Create local photoprism dir" {
|
task "Create local photoprism dir" {
|
||||||
@ -32,6 +36,9 @@ job "photoprism" {
|
|||||||
|
|
||||||
backup_opts {
|
backup_opts {
|
||||||
Host = "nomad"
|
Host = "nomad"
|
||||||
|
Exclude = [
|
||||||
|
"/data/nas-container/photoprism/cache",
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
restore_opts {
|
restore_opts {
|
||||||
|
64
backups/jobs/radarr.hcl
Normal file
64
backups/jobs/radarr.hcl
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
job "radarr" {
|
||||||
|
schedule = "@daily"
|
||||||
|
|
||||||
|
config {
|
||||||
|
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/radarr"
|
||||||
|
passphrase = env("BACKUP_PASSPHRASE")
|
||||||
|
|
||||||
|
options {
|
||||||
|
InsecureTls = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
task "Backup main database" {
|
||||||
|
postgres "Backup database" {
|
||||||
|
hostname = env("POSTGRES_HOST")
|
||||||
|
port = env("POSTGRES_PORT")
|
||||||
|
username = env("POSTGRES_USER")
|
||||||
|
password = env("POSTGRES_PASSWORD")
|
||||||
|
database = "radarr"
|
||||||
|
no_tablespaces = true
|
||||||
|
dump_to = "/data/nas-container/radarr/Backups/dump-radarr.sql"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
task "Backup logs database" {
|
||||||
|
postgres "Backup database" {
|
||||||
|
hostname = env("POSTGRES_HOST")
|
||||||
|
port = env("POSTGRES_PORT")
|
||||||
|
username = env("POSTGRES_USER")
|
||||||
|
password = env("POSTGRES_PASSWORD")
|
||||||
|
database = "radarr-logs"
|
||||||
|
no_tablespaces = true
|
||||||
|
dump_to = "/data/nas-container/radarr/Backups/dump-radarr-logs.sql"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
backup {
|
||||||
|
paths = ["/data/nas-container/radarr"]
|
||||||
|
|
||||||
|
backup_opts {
|
||||||
|
Exclude = [
|
||||||
|
"radarr_backup_*.zip",
|
||||||
|
"/data/nas-container/radarr/MediaCover",
|
||||||
|
"/data/nas-container/radarr/logs",
|
||||||
|
]
|
||||||
|
Host = "nomad"
|
||||||
|
}
|
||||||
|
|
||||||
|
restore_opts {
|
||||||
|
Host = ["nomad"]
|
||||||
|
# Because path is absolute
|
||||||
|
Target = "/"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
forget {
|
||||||
|
KeepLast = 2
|
||||||
|
KeepDaily = 30
|
||||||
|
KeepWeekly = 8
|
||||||
|
KeepMonthly = 6
|
||||||
|
KeepYearly = 2
|
||||||
|
Prune = true
|
||||||
|
}
|
||||||
|
}
|
@ -2,8 +2,12 @@ job "sabnzbd" {
|
|||||||
schedule = "@daily"
|
schedule = "@daily"
|
||||||
|
|
||||||
config {
|
config {
|
||||||
repo = "rclone::ftp,env_auth:/nomad/sabnzbd"
|
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/sabnzbd"
|
||||||
passphrase = env("BACKUP_PASSPHRASE")
|
passphrase = env("BACKUP_PASSPHRASE")
|
||||||
|
|
||||||
|
options {
|
||||||
|
InsecureTls = true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
backup {
|
backup {
|
||||||
|
@ -2,30 +2,46 @@ job "sonarr" {
|
|||||||
schedule = "@daily"
|
schedule = "@daily"
|
||||||
|
|
||||||
config {
|
config {
|
||||||
repo = "rclone::ftp,env_auth:/nomad/sonarr"
|
repo = "s3://backups-minio.agnosticfront.thefij:8443/nomad/sonarr"
|
||||||
passphrase = env("BACKUP_PASSPHRASE")
|
passphrase = env("BACKUP_PASSPHRASE")
|
||||||
|
|
||||||
|
options {
|
||||||
|
InsecureTls = true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
task "Backup main database" {
|
task "Backup main database" {
|
||||||
sqlite "Backup database" {
|
postgres "Backup database" {
|
||||||
path = "/data/sonarr/sonarr.db"
|
hostname = env("POSTGRES_HOST")
|
||||||
dump_to = "/data/sonarr/Backups/sonarr.db.bak"
|
port = env("POSTGRES_PORT")
|
||||||
|
username = env("POSTGRES_USER")
|
||||||
|
password = env("POSTGRES_PASSWORD")
|
||||||
|
database = "sonarr"
|
||||||
|
no_tablespaces = true
|
||||||
|
dump_to = "/data/nas-container/sonarr/Backups/dump-sonarr.sql"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
task "Backup logs database" {
|
task "Backup logs database" {
|
||||||
sqlite "Backup database" {
|
postgres "Backup database" {
|
||||||
path = "/data/sonarr/logs.db"
|
hostname = env("POSTGRES_HOST")
|
||||||
dump_to = "/data/sonarr/Backups/logs.db.bak"
|
port = env("POSTGRES_PORT")
|
||||||
|
username = env("POSTGRES_USER")
|
||||||
|
password = env("POSTGRES_PASSWORD")
|
||||||
|
database = "sonarr-logs"
|
||||||
|
no_tablespaces = true
|
||||||
|
dump_to = "/data/nas-container/sonarr/Backups/dump-sonarr-logs.sql"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
backup {
|
backup {
|
||||||
paths = ["/data/sonarr"]
|
paths = ["/data/nas-container/sonarr"]
|
||||||
|
|
||||||
backup_opts {
|
backup_opts {
|
||||||
Exclude = [
|
Exclude = [
|
||||||
"sonarr_backup_*.zip",
|
"sonarr_backup_*.zip",
|
||||||
|
"/data/nas-container/sonarr/MediaCover",
|
||||||
|
"/data/nas-container/sonarr/logs",
|
||||||
"*.db",
|
"*.db",
|
||||||
"*.db-shm",
|
"*.db-shm",
|
||||||
"*.db-wal",
|
"*.db-wal",
|
||||||
|
52
core/.terraform.lock.hcl
generated
52
core/.terraform.lock.hcl
generated
@ -2,39 +2,39 @@
|
|||||||
# Manual edits may be lost in future updates.
|
# Manual edits may be lost in future updates.
|
||||||
|
|
||||||
provider "registry.terraform.io/hashicorp/nomad" {
|
provider "registry.terraform.io/hashicorp/nomad" {
|
||||||
version = "2.0.0"
|
version = "2.1.1"
|
||||||
hashes = [
|
hashes = [
|
||||||
"h1:lIHIxA6ZmfyTGL3J9YIddhxlfit4ipSS09BLxkwo6L0=",
|
"h1:liQBgBXfQEYmwpoGZUfSsu0U0t/nhvuRZbMhaMz7wcQ=",
|
||||||
"zh:09b897d64db293f9a904a4a0849b11ec1e3fff5c638f734d82ae36d8dc044b72",
|
"zh:28bc6922e8a21334568410760150d9d413d7b190d60b5f0b4aab2f4ef52efeeb",
|
||||||
"zh:435cc106799290f64078ec24b6c59cb32b33784d609088638ed32c6d12121199",
|
"zh:2d4283740e92ce1403875486cd5ff2c8acf9df28c190873ab4d769ce37db10c1",
|
||||||
"zh:7073444bd064e8c4ec115ca7d9d7f030cc56795c0a83c27f6668bba519e6849a",
|
"zh:457e16d70075eae714a7df249d3ba42c2176f19b6750650152c56f33959028d9",
|
||||||
|
"zh:49ee88371e355c00971eefee6b5001392431b47b3e760a5c649dda76f59fb8fa",
|
||||||
|
"zh:614ad3bf07155ed8a5ced41dafb09042afbd1868490a379654b3e970def8e33d",
|
||||||
|
"zh:75be7199d76987e7549e1f27439922973d1bf27353b44a593bfbbc2e3b9f698f",
|
||||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||||
"zh:79d238c35d650d2d83a439716182da63f3b2767e72e4cbd0b69cb13d9b1aebfc",
|
"zh:888e14a24410d56b37212fbea373a3e0401d0ff8f8e4f4dd00ba8b29de9fed39",
|
||||||
"zh:7ef5f49344278fe0bbc5447424e6aa5425ff1821d010d944a444d7fa2c751acf",
|
"zh:aa261925e8b152636a0886b3a2149864707632d836e98a88dacea6cfb6302082",
|
||||||
"zh:92179091638c8ba03feef371c4361a790190f9955caea1fa59de2055c701a251",
|
"zh:ac10cefb4064b3bb63d4b0379624a416a45acf778eac0004466f726ead686196",
|
||||||
"zh:a8a34398851761368eb8e7c171f24e55efa6e9fdbb5c455f6dec34dc17f631bc",
|
"zh:b1a3c8b4d5b2dc9b510eac5e9e02665582862c24eb819ab74f44d3d880246d4f",
|
||||||
"zh:b38fd5338625ebace5a4a94cea1a28b11bd91995d834e318f47587cfaf6ec599",
|
"zh:c552e2fe5670b6d3ad9a5faf78e3a27197eeedbe2b13928d2c491fa509bc47c7",
|
||||||
"zh:b71b273a2aca7ad5f1e07c767b25b5a888881ba9ca93b30044ccc39c2937f03c",
|
|
||||||
"zh:cd14357e520e0f09fb25badfb4f2ee37d7741afdc3ed47c7bcf54c1683772543",
|
|
||||||
"zh:e05e025f4bb95138c3c8a75c636e97cd7cfd2fc1525b0c8bd097db8c5f02df6e",
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
provider "registry.terraform.io/hashicorp/random" {
|
provider "registry.terraform.io/hashicorp/random" {
|
||||||
version = "3.5.1"
|
version = "3.6.0"
|
||||||
hashes = [
|
hashes = [
|
||||||
"h1:VSnd9ZIPyfKHOObuQCaKfnjIHRtR7qTw19Rz8tJxm+k=",
|
"h1:R5Ucn26riKIEijcsiOMBR3uOAjuOMfI1x7XvH4P6B1w=",
|
||||||
"zh:04e3fbd610cb52c1017d282531364b9c53ef72b6bc533acb2a90671957324a64",
|
"zh:03360ed3ecd31e8c5dac9c95fe0858be50f3e9a0d0c654b5e504109c2159287d",
|
||||||
"zh:119197103301ebaf7efb91df8f0b6e0dd31e6ff943d231af35ee1831c599188d",
|
"zh:1c67ac51254ba2a2bb53a25e8ae7e4d076103483f55f39b426ec55e47d1fe211",
|
||||||
"zh:4d2b219d09abf3b1bb4df93d399ed156cadd61f44ad3baf5cf2954df2fba0831",
|
"zh:24a17bba7f6d679538ff51b3a2f378cedadede97af8a1db7dad4fd8d6d50f829",
|
||||||
"zh:6130bdde527587bbe2dcaa7150363e96dbc5250ea20154176d82bc69df5d4ce3",
|
"zh:30ffb297ffd1633175d6545d37c2217e2cef9545a6e03946e514c59c0859b77d",
|
||||||
"zh:6cc326cd4000f724d3086ee05587e7710f032f94fc9af35e96a386a1c6f2214f",
|
"zh:454ce4b3dbc73e6775f2f6605d45cee6e16c3872a2e66a2c97993d6e5cbd7055",
|
||||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||||
"zh:b6d88e1d28cf2dfa24e9fdcc3efc77adcdc1c3c3b5c7ce503a423efbdd6de57b",
|
"zh:91df0a9fab329aff2ff4cf26797592eb7a3a90b4a0c04d64ce186654e0cc6e17",
|
||||||
"zh:ba74c592622ecbcef9dc2a4d81ed321c4e44cddf7da799faa324da9bf52a22b2",
|
"zh:aa57384b85622a9f7bfb5d4512ca88e61f22a9cea9f30febaa4c98c68ff0dc21",
|
||||||
"zh:c7c5cde98fe4ef1143bd1b3ec5dc04baf0d4cc3ca2c5c7d40d17c0e9b2076865",
|
"zh:c4a3e329ba786ffb6f2b694e1fd41d413a7010f3a53c20b432325a94fa71e839",
|
||||||
"zh:dac4bad52c940cd0dfc27893507c1e92393846b024c5a9db159a93c534a3da03",
|
"zh:e2699bc9116447f96c53d55f2a00570f982e6f9935038c3810603572693712d0",
|
||||||
"zh:de8febe2a2acd9ac454b844a4106ed295ae9520ef54dc8ed2faf29f12716b602",
|
"zh:e747c0fd5d7684e5bfad8aa0ca441903f15ae7a98a737ff6aca24ba223207e2c",
|
||||||
"zh:eab0d0495e7e711cca367f7d4df6e322e6c562fc52151ec931176115b83ed014",
|
"zh:f1ca75f417ce490368f047b63ec09fd003711ae48487fba90b4aba2ccf71920e",
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,7 @@ module "authelia" {
|
|||||||
name = "authelia"
|
name = "authelia"
|
||||||
instance_count = 2
|
instance_count = 2
|
||||||
priority = 70
|
priority = 70
|
||||||
image = "authelia/authelia:4.37"
|
image = "authelia/authelia:4.38"
|
||||||
args = ["--config", "$${NOMAD_TASK_DIR}/authelia.yml"]
|
args = ["--config", "$${NOMAD_TASK_DIR}/authelia.yml"]
|
||||||
ingress = true
|
ingress = true
|
||||||
service_port = 9999
|
service_port = 9999
|
||||||
@ -49,7 +49,7 @@ module "authelia" {
|
|||||||
mount = false
|
mount = false
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
data = "{{ with nomadVar \"nomad/jobs/authelia\" }}{{ .lldap_admin_password }}{{ end }}"
|
data = "{{ with nomadVar \"secrets/ldap\" }}{{ .admin_password }}{{ end }}"
|
||||||
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||||
dest = "ldap_password.txt"
|
dest = "ldap_password.txt"
|
||||||
mount = false
|
mount = false
|
||||||
@ -105,6 +105,62 @@ module "authelia" {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
resource "nomad_acl_policy" "authelia" {
|
||||||
|
name = "authelia"
|
||||||
|
description = "Give access to shared authelia variables"
|
||||||
|
rules_hcl = <<EOH
|
||||||
|
namespace "default" {
|
||||||
|
variables {
|
||||||
|
path "authelia/*" {
|
||||||
|
capabilities = ["read"]
|
||||||
|
}
|
||||||
|
path "secrets/authelia/*" {
|
||||||
|
capabilities = ["read"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOH
|
||||||
|
|
||||||
|
job_acl {
|
||||||
|
job_id = module.authelia.job_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Give access to ldap secrets
|
||||||
|
resource "nomad_acl_policy" "authelia_ldap_secrets" {
|
||||||
|
name = "authelia-secrets-ldap"
|
||||||
|
description = "Give access to LDAP secrets"
|
||||||
|
rules_hcl = <<EOH
|
||||||
|
namespace "default" {
|
||||||
|
variables {
|
||||||
|
path "secrets/ldap" {
|
||||||
|
capabilities = ["read"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOH
|
||||||
|
|
||||||
|
job_acl {
|
||||||
|
job_id = module.authelia.job_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Enable oidc for nomad clients
|
||||||
|
module "nomad_oidc_client" {
|
||||||
|
source = "./oidc_client"
|
||||||
|
|
||||||
|
name = "nomad"
|
||||||
|
oidc_client_config = {
|
||||||
|
description = "Nomad"
|
||||||
|
authorization_policy = "two_factor"
|
||||||
|
redirect_uris = [
|
||||||
|
"https://nomad.${var.base_hostname}/oidc/callback",
|
||||||
|
"https://nomad.${var.base_hostname}/ui/settings/tokens",
|
||||||
|
]
|
||||||
|
scopes = ["openid", "groups"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
resource "nomad_acl_auth_method" "nomad_authelia" {
|
resource "nomad_acl_auth_method" "nomad_authelia" {
|
||||||
name = "authelia"
|
name = "authelia"
|
||||||
type = "OIDC"
|
type = "OIDC"
|
||||||
@ -114,9 +170,9 @@ resource "nomad_acl_auth_method" "nomad_authelia" {
|
|||||||
|
|
||||||
config {
|
config {
|
||||||
oidc_discovery_url = "https://authelia.${var.base_hostname}"
|
oidc_discovery_url = "https://authelia.${var.base_hostname}"
|
||||||
oidc_client_id = "nomad"
|
oidc_client_id = module.nomad_oidc_client.client_id
|
||||||
oidc_client_secret = yamldecode(file("${path.module}/../ansible_playbooks/vars/nomad_vars.yml"))["nomad/oidc"]["secret"]
|
oidc_client_secret = module.nomad_oidc_client.secret
|
||||||
bound_audiences = ["nomad"]
|
bound_audiences = [module.nomad_oidc_client.client_id]
|
||||||
oidc_scopes = [
|
oidc_scopes = [
|
||||||
"groups",
|
"groups",
|
||||||
"openid",
|
"openid",
|
||||||
@ -134,7 +190,7 @@ resource "nomad_acl_auth_method" "nomad_authelia" {
|
|||||||
resource "nomad_acl_binding_rule" "nomad_authelia_admin" {
|
resource "nomad_acl_binding_rule" "nomad_authelia_admin" {
|
||||||
description = "engineering rule"
|
description = "engineering rule"
|
||||||
auth_method = nomad_acl_auth_method.nomad_authelia.name
|
auth_method = nomad_acl_auth_method.nomad_authelia.name
|
||||||
selector = "\"nomad-deploy\" in list.roles"
|
selector = "\"nomad-admin\" in list.roles"
|
||||||
bind_type = "role"
|
bind_type = "role"
|
||||||
bind_name = "admin" # acls.nomad_acl_role.admin.name
|
bind_name = "admin" # acls.nomad_acl_role.admin.name
|
||||||
}
|
}
|
||||||
|
@ -89,8 +89,8 @@ authentication_backend:
|
|||||||
groups_filter: (member={dn})
|
groups_filter: (member={dn})
|
||||||
|
|
||||||
## The username and password of the admin user.
|
## The username and password of the admin user.
|
||||||
{{ with nomadVar "nomad/jobs/authelia" }}
|
{{ with nomadVar "secrets/ldap" }}
|
||||||
user: uid={{ .lldap_admin_user }},ou=people,{{ with nomadVar "nomad/jobs" }}{{ .ldap_base_dn }}{{ end }}
|
user: uid={{ .admin_user }},ou=people,{{ with nomadVar "nomad/jobs" }}{{ .ldap_base_dn }}{{ end }}
|
||||||
{{ end }}
|
{{ end }}
|
||||||
# password set using secrets file
|
# password set using secrets file
|
||||||
# password: <secret>
|
# password: <secret>
|
||||||
@ -151,6 +151,22 @@ access_control:
|
|||||||
networks: 192.168.5.0/24
|
networks: 192.168.5.0/24
|
||||||
|
|
||||||
rules:
|
rules:
|
||||||
|
## Allow favicons on internal network
|
||||||
|
- domain: '*.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}'
|
||||||
|
resources:
|
||||||
|
- '^/apple-touch-icon-precomposed\.png$'
|
||||||
|
- '^/assets/safari-pinned-tab\.svg$'
|
||||||
|
- '^/apple-touch-icon-180x180\.png$'
|
||||||
|
- '^/apple-touch-icon\.png$'
|
||||||
|
- '^/favicon\.ico$'
|
||||||
|
networks:
|
||||||
|
- internal
|
||||||
|
policy: bypass
|
||||||
|
|
||||||
|
{{ range nomadVarList "authelia/access_control/service_rules" }}{{ with nomadVar .Path }}
|
||||||
|
- domain: '{{ .name }}.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}'
|
||||||
|
{{ .rule.Value | indent 6 }}
|
||||||
|
{{ end }}{{ end }}
|
||||||
## Rules applied to everyone
|
## Rules applied to everyone
|
||||||
- domain: '*.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}'
|
- domain: '*.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}'
|
||||||
networks:
|
networks:
|
||||||
@ -219,7 +235,7 @@ storage:
|
|||||||
## The available providers are: filesystem, smtp. You must use only one of these providers.
|
## The available providers are: filesystem, smtp. You must use only one of these providers.
|
||||||
notifier:
|
notifier:
|
||||||
## You can disable the notifier startup check by setting this to true.
|
## You can disable the notifier startup check by setting this to true.
|
||||||
disable_startup_check: false
|
disable_startup_check: true
|
||||||
|
|
||||||
{{ with nomadVar "secrets/smtp" }}
|
{{ with nomadVar "secrets/smtp" }}
|
||||||
smtp:
|
smtp:
|
||||||
@ -245,4 +261,18 @@ identity_providers:
|
|||||||
# hmac_secret: <file>
|
# hmac_secret: <file>
|
||||||
# issuer_private_key: <file>
|
# issuer_private_key: <file>
|
||||||
|
|
||||||
clients: {{ with nomadVar "nomad/jobs/authelia" }}{{ .oidc_clients.Value }}{{ end }}
|
clients:
|
||||||
|
{{ range nomadVarList "authelia/access_control/oidc_clients" -}}
|
||||||
|
{{- $name := (sprig_last (sprig_splitList "/" .Path)) -}}
|
||||||
|
{{ "-" | indent 6 }}
|
||||||
|
{{ with nomadVar .Path }}
|
||||||
|
|
||||||
|
{{- $im := .ItemsMap -}}
|
||||||
|
{{- $im = sprig_set $im "redirect_uris" (.redirect_uris.Value | parseYAML) -}}
|
||||||
|
{{- $im = sprig_set $im "scopes" (.scopes.Value | parseYAML) -}}
|
||||||
|
{{- with nomadVar (printf "secrets/authelia/%s" $name) -}}
|
||||||
|
{{- $im = sprig_set $im "secret" .secret_hash.Value -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{ $im | toYAML | indent 8 }}
|
||||||
|
{{ end }}
|
||||||
|
{{ end }}
|
||||||
|
@ -1,20 +1,23 @@
|
|||||||
variable "config_data" {
|
|
||||||
type = string
|
|
||||||
description = "Plain text config file for blocky"
|
|
||||||
}
|
|
||||||
|
|
||||||
job "blocky" {
|
job "blocky" {
|
||||||
datacenters = ["dc1"]
|
datacenters = ["dc1"]
|
||||||
type = "system"
|
type = "service"
|
||||||
priority = 100
|
priority = 100
|
||||||
|
|
||||||
|
constraint {
|
||||||
|
distinct_hosts = true
|
||||||
|
}
|
||||||
|
|
||||||
update {
|
update {
|
||||||
max_parallel = 1
|
max_parallel = 1
|
||||||
# TODO: maybe switch to service job from system so we can use canary and autorollback
|
auto_revert = true
|
||||||
# auto_revert = true
|
min_healthy_time = "60s"
|
||||||
|
healthy_deadline = "5m"
|
||||||
}
|
}
|
||||||
|
|
||||||
group "blocky" {
|
group "blocky" {
|
||||||
|
# TODO: This must be updated to match the nubmer of servers (possibly grabbed from TF)
|
||||||
|
# I am moving away from `system` jobs because of https://github.com/hashicorp/nomad/issues/12023
|
||||||
|
count = 2
|
||||||
|
|
||||||
network {
|
network {
|
||||||
mode = "bridge"
|
mode = "bridge"
|
||||||
@ -32,7 +35,9 @@ job "blocky" {
|
|||||||
|
|
||||||
dns {
|
dns {
|
||||||
# Set expclicit DNS servers because tasks, by default, use this task
|
# Set expclicit DNS servers because tasks, by default, use this task
|
||||||
servers = ["1.1.1.1", "1.0.0.1"]
|
servers = [
|
||||||
|
"192.168.2.1",
|
||||||
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -60,6 +65,11 @@ job "blocky" {
|
|||||||
path = "/"
|
path = "/"
|
||||||
interval = "10s"
|
interval = "10s"
|
||||||
timeout = "3s"
|
timeout = "3s"
|
||||||
|
|
||||||
|
check_restart {
|
||||||
|
limit = 3
|
||||||
|
grace = "5m"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -67,19 +77,31 @@ job "blocky" {
|
|||||||
driver = "docker"
|
driver = "docker"
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "ghcr.io/0xerr0r/blocky:v0.22"
|
image = "ghcr.io/0xerr0r/blocky:v0.24"
|
||||||
args = ["-c", "$${NOMAD_TASK_DIR}/config.yml"]
|
args = ["-c", "$${NOMAD_TASK_DIR}/config.yml"]
|
||||||
ports = ["dns", "api"]
|
ports = ["dns", "api"]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
action "refresh-lists" {
|
||||||
|
command = "/app/blocky"
|
||||||
|
args = ["lists", "refresh"]
|
||||||
|
}
|
||||||
|
|
||||||
|
action "healthcheck" {
|
||||||
|
command = "/app/blocky"
|
||||||
|
args = ["healthcheck"]
|
||||||
|
}
|
||||||
|
|
||||||
resources {
|
resources {
|
||||||
cpu = 50
|
cpu = 50
|
||||||
memory = 50
|
memory = 75
|
||||||
memory_max = 100
|
memory_max = 150
|
||||||
}
|
}
|
||||||
|
|
||||||
template {
|
template {
|
||||||
data = var.config_data
|
data = <<EOF
|
||||||
|
${file("${module_path}/config.yml")}
|
||||||
|
EOF
|
||||||
destination = "$${NOMAD_TASK_DIR}/config.yml"
|
destination = "$${NOMAD_TASK_DIR}/config.yml"
|
||||||
splay = "1m"
|
splay = "1m"
|
||||||
|
|
||||||
@ -105,6 +127,121 @@ job "blocky" {
|
|||||||
max = "20s"
|
max = "20s"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template {
|
||||||
|
data = <<EOF
|
||||||
|
{{ if nomadVarExists "blocky_lists/user" }}
|
||||||
|
{{ with nomadVar "blocky_lists/user" -}}
|
||||||
|
{{ .block_list.Value }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
EOF
|
||||||
|
destination = "$${NOMAD_TASK_DIR}/block"
|
||||||
|
change_mode = "script"
|
||||||
|
|
||||||
|
change_script {
|
||||||
|
command = "/app/blocky"
|
||||||
|
args = ["lists", "refresh"]
|
||||||
|
timeout = "20s"
|
||||||
|
}
|
||||||
|
|
||||||
|
wait {
|
||||||
|
min = "30s"
|
||||||
|
max = "1m"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template {
|
||||||
|
data = <<EOF
|
||||||
|
{{ if nomadVarExists "blocky_lists/user" }}
|
||||||
|
{{ with nomadVar "blocky_lists/user" -}}
|
||||||
|
{{ .allow_list.Value }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
EOF
|
||||||
|
destination = "$${NOMAD_TASK_DIR}/allow"
|
||||||
|
change_mode = "script"
|
||||||
|
|
||||||
|
change_script {
|
||||||
|
command = "/app/blocky"
|
||||||
|
args = ["lists", "refresh"]
|
||||||
|
timeout = "20s"
|
||||||
|
}
|
||||||
|
|
||||||
|
wait {
|
||||||
|
min = "30s"
|
||||||
|
max = "1m"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template {
|
||||||
|
data = <<EOF
|
||||||
|
{{ if nomadVarExists "blocky_lists/terraform" }}
|
||||||
|
{{ with nomadVar "blocky_lists/terraform" -}}
|
||||||
|
{{ .smarttv_regex.Value }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
EOF
|
||||||
|
destination = "$${NOMAD_TASK_DIR}/smarttv-regex.txt"
|
||||||
|
change_mode = "script"
|
||||||
|
|
||||||
|
change_script {
|
||||||
|
command = "/app/blocky"
|
||||||
|
args = ["lists", "refresh"]
|
||||||
|
timeout = "20s"
|
||||||
|
}
|
||||||
|
|
||||||
|
wait {
|
||||||
|
min = "10s"
|
||||||
|
max = "20s"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template {
|
||||||
|
data = <<EOF
|
||||||
|
{{ if nomadVarExists "blocky_lists/terraform" }}
|
||||||
|
{{ with nomadVar "blocky_lists/terraform" -}}
|
||||||
|
{{ .wemo.Value }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
EOF
|
||||||
|
destination = "$${NOMAD_TASK_DIR}/wemo.txt"
|
||||||
|
change_mode = "script"
|
||||||
|
|
||||||
|
change_script {
|
||||||
|
command = "/app/blocky"
|
||||||
|
args = ["lists", "refresh"]
|
||||||
|
timeout = "20s"
|
||||||
|
}
|
||||||
|
|
||||||
|
wait {
|
||||||
|
min = "10s"
|
||||||
|
max = "20s"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template {
|
||||||
|
data = <<EOF
|
||||||
|
{{ if nomadVarExists "blocky_lists/terraform" }}
|
||||||
|
{{ with nomadVar "blocky_lists/terraform" -}}
|
||||||
|
{{ .sonos.Value }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
EOF
|
||||||
|
destination = "$${NOMAD_TASK_DIR}/sonos.txt"
|
||||||
|
change_mode = "script"
|
||||||
|
|
||||||
|
change_script {
|
||||||
|
command = "/app/blocky"
|
||||||
|
args = ["lists", "refresh"]
|
||||||
|
timeout = "20s"
|
||||||
|
}
|
||||||
|
|
||||||
|
wait {
|
||||||
|
min = "10s"
|
||||||
|
max = "20s"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
task "stunnel" {
|
task "stunnel" {
|
||||||
@ -116,9 +253,9 @@ job "blocky" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "alpine:3.17"
|
image = "iamthefij/stunnel:1.0.0"
|
||||||
|
args = ["$${NOMAD_TASK_DIR}/stunnel.conf"]
|
||||||
ports = ["tls"]
|
ports = ["tls"]
|
||||||
args = ["/bin/sh", "$${NOMAD_TASK_DIR}/start.sh"]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
resources {
|
resources {
|
||||||
@ -126,36 +263,34 @@ job "blocky" {
|
|||||||
memory = 100
|
memory = 100
|
||||||
}
|
}
|
||||||
|
|
||||||
template {
|
|
||||||
data = <<EOF
|
|
||||||
set -e
|
|
||||||
apk add stunnel
|
|
||||||
exec stunnel {{ env "NOMAD_TASK_DIR" }}/stunnel.conf
|
|
||||||
EOF
|
|
||||||
destination = "$${NOMAD_TASK_DIR}/start.sh"
|
|
||||||
}
|
|
||||||
|
|
||||||
template {
|
template {
|
||||||
data = <<EOF
|
data = <<EOF
|
||||||
syslog = no
|
syslog = no
|
||||||
foreground = yes
|
foreground = yes
|
||||||
delay = yes
|
delay = yes
|
||||||
|
|
||||||
|
[dns_server]
|
||||||
|
# Dummy server to keep stunnel running if no mysql is present
|
||||||
|
accept = 8053
|
||||||
|
connect = 127.0.0.1:53
|
||||||
|
ciphers = PSK
|
||||||
|
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/mysql_stunnel_psk.txt
|
||||||
|
|
||||||
|
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-tls" -}}
|
||||||
[mysql_client]
|
[mysql_client]
|
||||||
client = yes
|
client = yes
|
||||||
accept = 127.0.0.1:3306
|
accept = 127.0.0.1:3306
|
||||||
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-tls" -}}
|
|
||||||
connect = {{ .Address }}:{{ .Port }}
|
connect = {{ .Address }}:{{ .Port }}
|
||||||
{{- end }}
|
|
||||||
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/mysql_stunnel_psk.txt
|
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/mysql_stunnel_psk.txt
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "redis-blocky" -}}
|
||||||
[redis_client]
|
[redis_client]
|
||||||
client = yes
|
client = yes
|
||||||
accept = 127.0.0.1:6379
|
accept = 127.0.0.1:6379
|
||||||
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "redis-blocky" -}}
|
|
||||||
connect = {{ .Address }}:{{ .Port }}
|
connect = {{ .Address }}:{{ .Port }}
|
||||||
{{- end }}
|
|
||||||
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/stunnel_psk.txt
|
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/stunnel_psk.txt
|
||||||
|
{{- end }}
|
||||||
EOF
|
EOF
|
||||||
destination = "$${NOMAD_TASK_DIR}/stunnel.conf"
|
destination = "$${NOMAD_TASK_DIR}/stunnel.conf"
|
||||||
}
|
}
|
||||||
@ -186,11 +321,9 @@ EOF
|
|||||||
config {
|
config {
|
||||||
image = "mariadb:10"
|
image = "mariadb:10"
|
||||||
args = [
|
args = [
|
||||||
"/usr/bin/timeout",
|
|
||||||
"2m",
|
|
||||||
"/bin/bash",
|
"/bin/bash",
|
||||||
"-c",
|
"-c",
|
||||||
"until /usr/bin/mysql --defaults-extra-file=$${NOMAD_SECRETS_DIR}/my.cnf < $${NOMAD_SECRETS_DIR}/bootstrap.sql; do sleep 10; done",
|
"/usr/bin/timeout 2m /bin/bash -c \"until /usr/bin/mysql --defaults-extra-file=$${NOMAD_SECRETS_DIR}/my.cnf < $${NOMAD_SECRETS_DIR}/bootstrap.sql; do echo 'Retry in 10s'; sleep 10; done\" || true",
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,16 +1,7 @@
|
|||||||
locals {
|
|
||||||
config_data = file("${path.module}/config.yml")
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "nomad_job" "blocky" {
|
resource "nomad_job" "blocky" {
|
||||||
hcl2 {
|
|
||||||
vars = {
|
|
||||||
"config_data" = local.config_data,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
jobspec = templatefile("${path.module}/blocky.nomad", {
|
jobspec = templatefile("${path.module}/blocky.nomad", {
|
||||||
use_wesher = var.use_wesher,
|
use_wesher = var.use_wesher,
|
||||||
|
module_path = path.module,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -66,3 +57,32 @@ EOH
|
|||||||
task = "stunnel"
|
task = "stunnel"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
resource "nomad_variable" "blocky_lists_terraform" {
|
||||||
|
path = "blocky_lists/terraform"
|
||||||
|
items = {
|
||||||
|
smarttv_regex = file("${path.module}/list-smarttv-regex.txt")
|
||||||
|
wemo = file("${path.module}/list-wemo.txt")
|
||||||
|
sonos = file("${path.module}/list-sonos.txt")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_acl_policy" "blocky_lists" {
|
||||||
|
name = "blocky-lists"
|
||||||
|
description = "Give access Blocky lists"
|
||||||
|
rules_hcl = <<EOH
|
||||||
|
namespace "default" {
|
||||||
|
variables {
|
||||||
|
path "blocky_lists/*" {
|
||||||
|
capabilities = ["read"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOH
|
||||||
|
|
||||||
|
job_acl {
|
||||||
|
job_id = "blocky"
|
||||||
|
group = "blocky"
|
||||||
|
task = "blocky"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -2,28 +2,53 @@ ports:
|
|||||||
dns: 53
|
dns: 53
|
||||||
http: 4000
|
http: 4000
|
||||||
|
|
||||||
|
# I must have ip v6 blocked or something
|
||||||
|
connectIPVersion: v4
|
||||||
|
|
||||||
bootstrapDns:
|
bootstrapDns:
|
||||||
- upstream: 1.1.1.1
|
- upstream: 1.1.1.1
|
||||||
- upstream: 1.0.0.1
|
- upstream: 1.0.0.1
|
||||||
|
- upstream: 9.9.9.9
|
||||||
|
- upstream: 149.112.112.112
|
||||||
|
|
||||||
upstream:
|
|
||||||
default:
|
upstreams:
|
||||||
- 1.1.1.1
|
init:
|
||||||
- 1.0.0.1
|
strategy: fast
|
||||||
quad9:
|
groups:
|
||||||
- 9.9.9.9
|
default:
|
||||||
- 149.112.112.112
|
- https://dns.quad9.net/dns-query
|
||||||
- 2620:fe::fe
|
- tcp-tls:dns.quad9.net
|
||||||
- 2620:fe::9
|
- https://one.one.one.one/dns-query
|
||||||
- https://dns.quad9.net/dns-query
|
- tcp-tls:one.one.one.one
|
||||||
- tcp-tls:dns.quad9.net
|
# cloudflare:
|
||||||
quad9-unsecured:
|
# - 1.1.1.1
|
||||||
- 9.9.9.10
|
# - 1.0.0.1
|
||||||
- 149.112.112.10
|
# - 2606:4700:4700::1111
|
||||||
- 2620:fe::10
|
# - 2606:4700:4700::1001
|
||||||
- 2620:fe::fe:10
|
# - https://one.one.one.one/dns-query
|
||||||
- https://dns10.quad9.net/dns-query
|
# - tcp-tls:one.one.one.one
|
||||||
- tcp-tls:dns10.quad9.net
|
# quad9:
|
||||||
|
# - 9.9.9.9
|
||||||
|
# - 149.112.112.112
|
||||||
|
# - 2620:fe::fe
|
||||||
|
# - 2620:fe::9
|
||||||
|
# - https://dns.quad9.net/dns-query
|
||||||
|
# - tcp-tls:dns.quad9.net
|
||||||
|
# quad9-secured:
|
||||||
|
# - 9.9.9.11
|
||||||
|
# - 149.112.112.11
|
||||||
|
# - 2620:fe::11
|
||||||
|
# - 2620:fe::fe:11
|
||||||
|
# - https://dns11.quad9.net/dns-query
|
||||||
|
# - tcp-tls:dns11.quad9.net
|
||||||
|
# quad9-unsecured:
|
||||||
|
# - 9.9.9.10
|
||||||
|
# - 149.112.112.10
|
||||||
|
# - 2620:fe::10
|
||||||
|
# - 2620:fe::fe:10
|
||||||
|
# - https://dns10.quad9.net/dns-query
|
||||||
|
# - tcp-tls:dns10.quad9.net
|
||||||
|
|
||||||
conditional:
|
conditional:
|
||||||
fallbackUpstream: false
|
fallbackUpstream: false
|
||||||
@ -36,9 +61,11 @@ conditional:
|
|||||||
.: 192.168.2.1
|
.: 192.168.2.1
|
||||||
|
|
||||||
hostsFile:
|
hostsFile:
|
||||||
filePath: {{ env "NOMAD_TASK_DIR" }}/nomad.hosts
|
sources:
|
||||||
|
- {{ env "NOMAD_TASK_DIR" }}/nomad.hosts
|
||||||
hostsTTL: 30s
|
hostsTTL: 30s
|
||||||
refreshPeriod: 30s
|
loading:
|
||||||
|
refreshPeriod: 30s
|
||||||
|
|
||||||
clientLookup:
|
clientLookup:
|
||||||
upstream: 192.168.2.1
|
upstream: 192.168.2.1
|
||||||
@ -50,22 +77,12 @@ blocking:
|
|||||||
- http://sysctl.org/cameleon/hosts
|
- http://sysctl.org/cameleon/hosts
|
||||||
- https://s3.amazonaws.com/lists.disconnect.me/simple_tracking.txt
|
- https://s3.amazonaws.com/lists.disconnect.me/simple_tracking.txt
|
||||||
- https://s3.amazonaws.com/lists.disconnect.me/simple_ad.txt
|
- https://s3.amazonaws.com/lists.disconnect.me/simple_ad.txt
|
||||||
- https://hosts-file.net/ad_servers.txt
|
# - https://hosts-file.net/ad_servers.txt
|
||||||
smarttv:
|
iot:
|
||||||
- https://perflyst.github.io/PiHoleBlocklist/SmartTV.txt
|
- https://raw.githubusercontent.com/Perflyst/PiHoleBlocklist/master/SmartTV.txt
|
||||||
- https://perflyst.github.io/PiHoleBlocklist/regex.list
|
- {{ env "NOMAD_TASK_DIR" }}/smarttv-regex.txt
|
||||||
wemo:
|
- {{ env "NOMAD_TASK_DIR" }}/wemo.txt
|
||||||
- |
|
- {{ env "NOMAD_TASK_DIR" }}/sonos.txt
|
||||||
# Remote commands
|
|
||||||
api.xbcs.net
|
|
||||||
# Firmware updates
|
|
||||||
fw.xbcs.net
|
|
||||||
# TURN service
|
|
||||||
nat.wemo2.com
|
|
||||||
# Connectivity checks
|
|
||||||
heartbeat.xwemo.com
|
|
||||||
malware:
|
|
||||||
- https://mirror1.malwaredomains.com/files/justdomains
|
|
||||||
antisocial:
|
antisocial:
|
||||||
- |
|
- |
|
||||||
facebook.com
|
facebook.com
|
||||||
@ -73,20 +90,21 @@ blocking:
|
|||||||
reddit.com
|
reddit.com
|
||||||
twitter.com
|
twitter.com
|
||||||
youtube.com
|
youtube.com
|
||||||
|
custom:
|
||||||
|
- {{ env "NOMAD_TASK_DIR" }}/block
|
||||||
|
|
||||||
whiteLists:
|
whiteLists:
|
||||||
# Move to Gitea when deployed internally
|
custom:
|
||||||
ads:
|
- {{ env "NOMAD_TASK_DIR" }}/allow
|
||||||
{{ with nomadVar "nomad/jobs/blocky" -}}
|
|
||||||
{{ .whitelists_ads.Value | indent 6 }}
|
|
||||||
{{- end }}
|
|
||||||
|
|
||||||
clientGroupsBlock:
|
clientGroupsBlock:
|
||||||
default:
|
default:
|
||||||
- ads
|
- ads
|
||||||
- malware
|
- custom
|
||||||
- smarttv
|
192.168.3.1/24:
|
||||||
- wemo
|
- ads
|
||||||
|
- iot
|
||||||
|
- custom
|
||||||
|
|
||||||
customDNS:
|
customDNS:
|
||||||
customTTL: 1h
|
customTTL: 1h
|
||||||
@ -105,7 +123,7 @@ customDNS:
|
|||||||
prometheus:
|
prometheus:
|
||||||
enable: true
|
enable: true
|
||||||
|
|
||||||
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "redis-tls" -}}
|
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "redis-blocky" -}}
|
||||||
redis:
|
redis:
|
||||||
address: 127.0.0.1:6379
|
address: 127.0.0.1:6379
|
||||||
# password: ""
|
# password: ""
|
||||||
@ -114,7 +132,6 @@ redis:
|
|||||||
connectionCooldown: 3s
|
connectionCooldown: 3s
|
||||||
{{ end -}}
|
{{ end -}}
|
||||||
|
|
||||||
|
|
||||||
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-tls" -}}
|
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-tls" -}}
|
||||||
{{ with nomadVar "nomad/jobs/blocky" -}}
|
{{ with nomadVar "nomad/jobs/blocky" -}}
|
||||||
queryLog:
|
queryLog:
|
||||||
|
13
core/blocky/list-smarttv-regex.txt
Normal file
13
core/blocky/list-smarttv-regex.txt
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
# From: https://perflyst.github.io/PiHoleBlocklist/regex.list
|
||||||
|
# Title: Perflyst's SmartTV Blocklist for Pi-hole - RegEx extension
|
||||||
|
# Version: 13July2023v1
|
||||||
|
# Samsung
|
||||||
|
/(^|\.)giraffic\.com$/
|
||||||
|
/(^|\.)internetat\.tv$/
|
||||||
|
/(^|\.)pavv\.co\.kr$/
|
||||||
|
/(^|\.)samsungcloudsolution\.net$/
|
||||||
|
/(^|\.)samsungelectronics\.com$/
|
||||||
|
/(^|\.)samsungrm\.net$/
|
||||||
|
# /(^|\.)samsungotn\.net$/ # prevents updates
|
||||||
|
# /(^|\.)samsungcloudcdn\.com$/ # prevents updates
|
||||||
|
# /(^|\.)samsungcloudsolution\.com$/ # prevents internet connection
|
2
core/blocky/list-sonos.txt
Normal file
2
core/blocky/list-sonos.txt
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
# Block Sonos devices from phoning home and allowing remote access
|
||||||
|
/(^|\.)sonos\.com$/
|
8
core/blocky/list-wemo.txt
Normal file
8
core/blocky/list-wemo.txt
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
# Remote commands
|
||||||
|
api.xbcs.net
|
||||||
|
# Firmware updates
|
||||||
|
fw.xbcs.net
|
||||||
|
# TURN service
|
||||||
|
nat.wemo2.com
|
||||||
|
# Connectivity checks
|
||||||
|
heartbeat.xwemo.com
|
@ -1,8 +1,16 @@
|
|||||||
job "exporters" {
|
job "exporters" {
|
||||||
datacenters = ["dc1"]
|
datacenters = ["dc1"]
|
||||||
type = "system"
|
type = "service"
|
||||||
|
priority = 55
|
||||||
|
|
||||||
|
constraint {
|
||||||
|
distinct_hosts = true
|
||||||
|
}
|
||||||
|
|
||||||
group "promtail" {
|
group "promtail" {
|
||||||
|
# TODO: This must be updated to match the nubmer of servers (possibly grabbed from TF)
|
||||||
|
# I am moving away from `system` jobs because of https://github.com/hashicorp/nomad/issues/1202
|
||||||
|
count = 2
|
||||||
|
|
||||||
network {
|
network {
|
||||||
mode = "bridge"
|
mode = "bridge"
|
||||||
@ -33,14 +41,8 @@ job "exporters" {
|
|||||||
task "promtail" {
|
task "promtail" {
|
||||||
driver = "docker"
|
driver = "docker"
|
||||||
|
|
||||||
meta = {
|
|
||||||
"diun.sort_tags" = "semver"
|
|
||||||
"diun.watch_repo" = true
|
|
||||||
"diun.include_tags" = "^[0-9]+\\.[0-9]+\\.[0-9]+$"
|
|
||||||
}
|
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "grafana/promtail:2.9.1"
|
image = "grafana/promtail:3.3.0"
|
||||||
args = ["-config.file=$${NOMAD_TASK_DIR}/promtail.yml"]
|
args = ["-config.file=$${NOMAD_TASK_DIR}/promtail.yml"]
|
||||||
ports = ["promtail"]
|
ports = ["promtail"]
|
||||||
|
|
||||||
|
5
core/exporters.tf
Normal file
5
core/exporters.tf
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
resource "nomad_job" "exporters" {
|
||||||
|
jobspec = templatefile("${path.module}/exporters.nomad", {
|
||||||
|
use_wesher = var.use_wesher,
|
||||||
|
})
|
||||||
|
}
|
@ -28,7 +28,6 @@ job "grafana" {
|
|||||||
tags = [
|
tags = [
|
||||||
"traefik.enable=true",
|
"traefik.enable=true",
|
||||||
"traefik.http.routers.grafana.entryPoints=websecure",
|
"traefik.http.routers.grafana.entryPoints=websecure",
|
||||||
# "traefik.http.routers.grafana.middlewares=authelia@nomad",
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -41,8 +40,8 @@ job "grafana" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "alpine:3.17"
|
image = "iamthefij/stunnel:1.0.0"
|
||||||
args = ["/bin/sh", "$${NOMAD_TASK_DIR}/start.sh"]
|
args = ["$${NOMAD_TASK_DIR}/stunnel.conf"]
|
||||||
}
|
}
|
||||||
|
|
||||||
resources {
|
resources {
|
||||||
@ -50,15 +49,6 @@ job "grafana" {
|
|||||||
memory = 100
|
memory = 100
|
||||||
}
|
}
|
||||||
|
|
||||||
template {
|
|
||||||
data = <<EOF
|
|
||||||
set -e
|
|
||||||
apk add stunnel
|
|
||||||
exec stunnel {{ env "NOMAD_TASK_DIR" }}/stunnel.conf
|
|
||||||
EOF
|
|
||||||
destination = "$${NOMAD_TASK_DIR}/start.sh"
|
|
||||||
}
|
|
||||||
|
|
||||||
template {
|
template {
|
||||||
data = <<EOF
|
data = <<EOF
|
||||||
syslog = no
|
syslog = no
|
||||||
@ -96,10 +86,10 @@ EOF
|
|||||||
image = "mariadb:10"
|
image = "mariadb:10"
|
||||||
args = [
|
args = [
|
||||||
"/usr/bin/timeout",
|
"/usr/bin/timeout",
|
||||||
"2m",
|
"20m",
|
||||||
"/bin/bash",
|
"/bin/bash",
|
||||||
"-c",
|
"-c",
|
||||||
"until /usr/bin/mysql --defaults-extra-file=$${NOMAD_SECRETS_DIR}/my.cnf < $${NOMAD_SECRETS_DIR}/bootstrap.sql; do sleep 10; done",
|
"until /usr/bin/mysql --defaults-extra-file=$${NOMAD_SECRETS_DIR}/my.cnf < $${NOMAD_SECRETS_DIR}/bootstrap.sql; do echo 'Retry in 10s'; sleep 10; done",
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -143,14 +133,15 @@ SELECT 'NOOP';
|
|||||||
driver = "docker"
|
driver = "docker"
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "grafana/grafana:9.4.2"
|
image = "grafana/grafana:10.0.10"
|
||||||
args = ["--config", "$${NOMAD_ALLOC_DIR}/config/grafana.ini"]
|
args = ["--config", "$${NOMAD_ALLOC_DIR}/config/grafana.ini"]
|
||||||
ports = ["web"]
|
ports = ["web"]
|
||||||
}
|
}
|
||||||
|
|
||||||
env = {
|
env = {
|
||||||
"GF_INSTALL_PLUGINS" = "grafana-clock-panel,grafana-piechart-panel,grafana-polystat-panel,natel-discrete-panel",
|
"GF_INSTALL_PLUGINS" = "grafana-clock-panel,grafana-piechart-panel,grafana-polystat-panel,natel-discrete-panel",
|
||||||
"GF_PATHS_PROVISIONING" = "$${NOMAD_ALLOC_DIR}/config/provisioning"
|
"GF_PATHS_CONFIG" = "$${NOMAD_ALLOC_DIR}/config/grafana.ini",
|
||||||
|
"GF_PATHS_PROVISIONING" = "$${NOMAD_ALLOC_DIR}/config/provisioning",
|
||||||
}
|
}
|
||||||
|
|
||||||
template {
|
template {
|
||||||
@ -164,7 +155,6 @@ GF_SECURITY_ADMIN_PASSWORD={{ .admin_pw }}
|
|||||||
GF_EXTERNAL_IMAGE_STORAGE_S3_ACCESS_KEY={{ .minio_access_key }}
|
GF_EXTERNAL_IMAGE_STORAGE_S3_ACCESS_KEY={{ .minio_access_key }}
|
||||||
GF_EXTERNAL_IMAGE_STORAGE_S3_SECRET_KEY={{ .minio_secret_key }}
|
GF_EXTERNAL_IMAGE_STORAGE_S3_SECRET_KEY={{ .minio_secret_key }}
|
||||||
GRAFANA_ALERT_EMAIL_ADDRESSES={{ .alert_email_addresses }}
|
GRAFANA_ALERT_EMAIL_ADDRESSES={{ .alert_email_addresses }}
|
||||||
GF_AUTH_GENERIC_OAUTH_CLIENT_SECRET={{ .oidc_secret }}
|
|
||||||
{{ if .db_name -}}
|
{{ if .db_name -}}
|
||||||
# Database storage
|
# Database storage
|
||||||
GF_DATABASE_TYPE=mysql
|
GF_DATABASE_TYPE=mysql
|
||||||
@ -176,6 +166,10 @@ GF_DATABASE_PASSWORD={{ .db_pass }}
|
|||||||
SLACK_BOT_URL={{ .slack_bot_url }}
|
SLACK_BOT_URL={{ .slack_bot_url }}
|
||||||
SLACK_BOT_TOKEN={{ .slack_bot_token }}
|
SLACK_BOT_TOKEN={{ .slack_bot_token }}
|
||||||
SLACK_HOOK_URL={{ .slack_hook_url }}
|
SLACK_HOOK_URL={{ .slack_hook_url }}
|
||||||
|
{{ end -}}
|
||||||
|
{{ with nomadVar "secrets/authelia/grafana" -}}
|
||||||
|
GF_AUTH_GENERIC_OAUTH_CLIENT_ID={{ .client_id }}
|
||||||
|
GF_AUTH_GENERIC_OAUTH_CLIENT_SECRET={{ .secret }}
|
||||||
{{ end -}}
|
{{ end -}}
|
||||||
EOF
|
EOF
|
||||||
env = true
|
env = true
|
||||||
@ -202,8 +196,12 @@ SLACK_HOOK_URL={{ .slack_hook_url }}
|
|||||||
}
|
}
|
||||||
|
|
||||||
resources {
|
resources {
|
||||||
cpu = 100
|
cpu = 50
|
||||||
memory = 100
|
memory = 50
|
||||||
|
}
|
||||||
|
|
||||||
|
action "reloadnow" {
|
||||||
|
command = "/local/reload_config.sh"
|
||||||
}
|
}
|
||||||
|
|
||||||
env = {
|
env = {
|
||||||
@ -265,7 +263,7 @@ ${file(join("/", [module_path, "grafana", config_file]))}
|
|||||||
# Set owner to grafana uid
|
# Set owner to grafana uid
|
||||||
# uid = 472
|
# uid = 472
|
||||||
# Change template delimeter for dashboard files that use json and have double curly braces and square braces
|
# Change template delimeter for dashboard files that use json and have double curly braces and square braces
|
||||||
%{ if length(regexall("dashboard", config_file)) > 0 ~}
|
%{ if endswith(config_file, ".json") ~}
|
||||||
left_delimiter = "<<<<"
|
left_delimiter = "<<<<"
|
||||||
right_delimiter = ">>>>"
|
right_delimiter = ">>>>"
|
||||||
%{ endif }
|
%{ endif }
|
||||||
@ -281,6 +279,11 @@ ${file(join("/", [module_path, "grafana", config_file]))}
|
|||||||
task "grafana-image-renderer" {
|
task "grafana-image-renderer" {
|
||||||
driver = "docker"
|
driver = "docker"
|
||||||
|
|
||||||
|
constraint {
|
||||||
|
attribute = "$${attr.cpu.arch}"
|
||||||
|
value = "amd64"
|
||||||
|
}
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "grafana/grafana-image-renderer:3.6.1"
|
image = "grafana/grafana-image-renderer:3.6.1"
|
||||||
ports = ["renderer"]
|
ports = ["renderer"]
|
||||||
|
@ -1,17 +1,3 @@
|
|||||||
resource "nomad_job" "exporters" {
|
|
||||||
jobspec = templatefile("${path.module}/exporters.nomad", {
|
|
||||||
use_wesher = var.use_wesher,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "nomad_job" "prometheus" {
|
|
||||||
jobspec = templatefile("${path.module}/prometheus.nomad", {
|
|
||||||
use_wesher = var.use_wesher,
|
|
||||||
})
|
|
||||||
|
|
||||||
detach = false
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "nomad_job" "grafana" {
|
resource "nomad_job" "grafana" {
|
||||||
jobspec = templatefile("${path.module}/grafana.nomad", {
|
jobspec = templatefile("${path.module}/grafana.nomad", {
|
||||||
module_path = path.module
|
module_path = path.module
|
||||||
@ -93,3 +79,39 @@ EOH
|
|||||||
task = "stunnel"
|
task = "stunnel"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module "grafana_oidc" {
|
||||||
|
source = "./oidc_client"
|
||||||
|
|
||||||
|
name = "grafana"
|
||||||
|
oidc_client_config = {
|
||||||
|
description = "Grafana"
|
||||||
|
scopes = [
|
||||||
|
"openid",
|
||||||
|
"groups",
|
||||||
|
"email",
|
||||||
|
"profile",
|
||||||
|
]
|
||||||
|
redirect_uris = [
|
||||||
|
"https://grafana.thefij.rocks/login/generic_oauth",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
job_acl = {
|
||||||
|
job_id = "grafana"
|
||||||
|
group = "grafana"
|
||||||
|
task = "grafana"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# resource "nomad_variable" "grafana_config" {
|
||||||
|
# for_each = fileset("${path.module}/grafana", "**")
|
||||||
|
#
|
||||||
|
# path = "nomad/jobs/grafana/${replace(each.key, ".", "_")}"
|
||||||
|
# items = {
|
||||||
|
# path = "${each.key}"
|
||||||
|
# value = file("${path.module}/grafana/${each.key}")
|
||||||
|
# left_delimiter = endswith(each.key, ".json") ? "<<<<" : "{{"
|
||||||
|
# right_delimiter = endswith(each.key, ".json") ? ">>>>" : "}}"
|
||||||
|
# }
|
||||||
|
# }
|
@ -20,8 +20,8 @@ data = /var/lib/grafana
|
|||||||
# Directory where grafana will automatically scan and look for plugins
|
# Directory where grafana will automatically scan and look for plugins
|
||||||
;plugins = /var/lib/grafana/plugins
|
;plugins = /var/lib/grafana/plugins
|
||||||
|
|
||||||
# folder that contains provisioning config files that grafana will apply on startup and while running.
|
# folder that contains PROVISIONING config files that grafana will apply on startup and while running.
|
||||||
; provisioning = /etc/grafana/provisioning
|
provisioning = from_env
|
||||||
|
|
||||||
#################################### Server ####################################
|
#################################### Server ####################################
|
||||||
[server]
|
[server]
|
||||||
@ -261,7 +261,7 @@ log_queries =
|
|||||||
enabled = true
|
enabled = true
|
||||||
name = Authelia
|
name = Authelia
|
||||||
;allow_sign_up = true
|
;allow_sign_up = true
|
||||||
client_id = grafana
|
client_id = from_env
|
||||||
client_secret = from_env
|
client_secret = from_env
|
||||||
scopes = openid profile email groups
|
scopes = openid profile email groups
|
||||||
auth_url = https://authelia.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}/api/oidc/authorization
|
auth_url = https://authelia.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}/api/oidc/authorization
|
||||||
@ -270,6 +270,10 @@ api_url = https://authelia.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{
|
|||||||
login_attribute_path = preferred_username
|
login_attribute_path = preferred_username
|
||||||
groups_attribute_path = groups
|
groups_attribute_path = groups
|
||||||
name_attribute_path = name
|
name_attribute_path = name
|
||||||
|
# Role attribute path is not working
|
||||||
|
role_attribute_path = contains(groups[*], 'admin') && 'Admin' || contains(groups[*], 'grafana-admin') && 'Admin' || contains(groups[*], 'grafana-editor') && 'Editor' || contains(groups[*], 'developer') && 'Editor'
|
||||||
|
allow_assign_grafana_admin = true
|
||||||
|
skip_org_role_sync = true
|
||||||
use_pkce = true
|
use_pkce = true
|
||||||
|
|
||||||
;team_ids =
|
;team_ids =
|
||||||
|
@ -104,7 +104,7 @@
|
|||||||
"uid": "Prometheus"
|
"uid": "Prometheus"
|
||||||
},
|
},
|
||||||
"exemplar": false,
|
"exemplar": false,
|
||||||
"expr": "sum(up{job=\"exporters\", consul_service=\"blocky-api\"})",
|
"expr": "sum(nomad_client_allocs_running{exported_job=\"blocky\", task=\"blocky\"})",
|
||||||
"format": "table",
|
"format": "table",
|
||||||
"instant": true,
|
"instant": true,
|
||||||
"interval": "",
|
"interval": "",
|
||||||
@ -458,7 +458,7 @@
|
|||||||
"uid": "Prometheus"
|
"uid": "Prometheus"
|
||||||
},
|
},
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "sum(blocky_blacklist_cache) / sum(up{job=\"exporters\", consul_service=\"blocky-api\"})",
|
"expr": "sum(blocky_blacklist_cache) / sum(nomad_client_allocs_running{exported_job=\"blocky\", task=\"blocky\"})",
|
||||||
"format": "table",
|
"format": "table",
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"interval": "",
|
"interval": "",
|
||||||
@ -533,7 +533,7 @@
|
|||||||
"uid": "Prometheus"
|
"uid": "Prometheus"
|
||||||
},
|
},
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "sum(go_memstats_sys_bytes{job=\"exporters\", consul_service=\"blocky-api\"})/sum(up{job=\"exporters\", consul_service=\"blocky-api\"})",
|
"expr": "sum(go_memstats_sys_bytes{job=\"exporters\", consul_service=\"blocky-api\"})/sum(nomad_client_allocs_running{exported_job=\"blocky\", task=\"blocky\"})",
|
||||||
"format": "table",
|
"format": "table",
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"interval": "",
|
"interval": "",
|
||||||
@ -753,7 +753,7 @@
|
|||||||
"uid": "Prometheus"
|
"uid": "Prometheus"
|
||||||
},
|
},
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "sum(blocky_cache_entry_count)/ sum(up{job=\"exporters\", consul_service=\"blocky-api\"})",
|
"expr": "sum(blocky_cache_entry_count)/ sum(nomad_client_allocs_running{exported_job=\"blocky\", task=\"blocky\"})",
|
||||||
"format": "table",
|
"format": "table",
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"interval": "",
|
"interval": "",
|
||||||
@ -1162,7 +1162,7 @@
|
|||||||
"uid": "Prometheus"
|
"uid": "Prometheus"
|
||||||
},
|
},
|
||||||
"exemplar": false,
|
"exemplar": false,
|
||||||
"expr": "sum(time() -blocky_last_list_group_refresh)/ sum(up{job=\"exporters\", consul_service=\"blocky-api\"})",
|
"expr": "sum(time() -blocky_last_list_group_refresh)/ sum(nomad_client_allocs_running{exported_job=\"blocky\", task=\"blocky\"})",
|
||||||
"format": "table",
|
"format": "table",
|
||||||
"instant": true,
|
"instant": true,
|
||||||
"interval": "",
|
"interval": "",
|
||||||
@ -1224,7 +1224,7 @@
|
|||||||
"uid": "Prometheus"
|
"uid": "Prometheus"
|
||||||
},
|
},
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "sum(blocky_prefetch_domain_name_cache_count)/ sum(up{job=\"exporters\", consul_service=\"blocky-api\"})",
|
"expr": "sum(blocky_prefetch_domain_name_cache_count)/ sum(nomad_client_allocs_running{exported_job=\"blocky\", task=\"blocky\"})",
|
||||||
"format": "table",
|
"format": "table",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"legendFormat": "",
|
"legendFormat": "",
|
||||||
|
@ -1,783 +0,0 @@
|
|||||||
{
|
|
||||||
"__inputs": [
|
|
||||||
{
|
|
||||||
"name": "DS_PROMETHEUS",
|
|
||||||
"label": "Prometheus",
|
|
||||||
"description": "",
|
|
||||||
"type": "datasource",
|
|
||||||
"pluginId": "prometheus",
|
|
||||||
"pluginName": "Prometheus"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"__requires": [
|
|
||||||
{
|
|
||||||
"type": "grafana",
|
|
||||||
"id": "grafana",
|
|
||||||
"name": "Grafana",
|
|
||||||
"version": "7.5.5"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "panel",
|
|
||||||
"id": "graph",
|
|
||||||
"name": "Graph",
|
|
||||||
"version": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "panel",
|
|
||||||
"id": "piechart",
|
|
||||||
"name": "Pie chart v2",
|
|
||||||
"version": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "datasource",
|
|
||||||
"id": "prometheus",
|
|
||||||
"name": "Prometheus",
|
|
||||||
"version": "1.0.0"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "panel",
|
|
||||||
"id": "singlestat",
|
|
||||||
"name": "Singlestat",
|
|
||||||
"version": ""
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"annotations": {
|
|
||||||
"list": [
|
|
||||||
{
|
|
||||||
"builtIn": 1,
|
|
||||||
"datasource": "-- Grafana --",
|
|
||||||
"enable": true,
|
|
||||||
"hide": true,
|
|
||||||
"iconColor": "rgba(0, 211, 255, 1)",
|
|
||||||
"name": "Annotations & Alerts",
|
|
||||||
"type": "dashboard"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"description": "Traefik dashboard prometheus",
|
|
||||||
"editable": true,
|
|
||||||
"gnetId": 4475,
|
|
||||||
"graphTooltip": 0,
|
|
||||||
"id": null,
|
|
||||||
"iteration": 1620932097756,
|
|
||||||
"links": [],
|
|
||||||
"panels": [
|
|
||||||
{
|
|
||||||
"datasource": null,
|
|
||||||
"gridPos": {
|
|
||||||
"h": 1,
|
|
||||||
"w": 24,
|
|
||||||
"x": 0,
|
|
||||||
"y": 0
|
|
||||||
},
|
|
||||||
"id": 10,
|
|
||||||
"title": "$backend stats",
|
|
||||||
"type": "row"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cacheTimeout": null,
|
|
||||||
"datasource": "${DS_PROMETHEUS}",
|
|
||||||
"fieldConfig": {
|
|
||||||
"defaults": {
|
|
||||||
"color": {
|
|
||||||
"mode": "palette-classic"
|
|
||||||
},
|
|
||||||
"decimals": 0,
|
|
||||||
"mappings": [],
|
|
||||||
"thresholds": {
|
|
||||||
"mode": "absolute",
|
|
||||||
"steps": [
|
|
||||||
{
|
|
||||||
"color": "green",
|
|
||||||
"value": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"color": "red",
|
|
||||||
"value": 80
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"unit": "short"
|
|
||||||
},
|
|
||||||
"overrides": []
|
|
||||||
},
|
|
||||||
"gridPos": {
|
|
||||||
"h": 7,
|
|
||||||
"w": 12,
|
|
||||||
"x": 0,
|
|
||||||
"y": 1
|
|
||||||
},
|
|
||||||
"id": 2,
|
|
||||||
"interval": null,
|
|
||||||
"links": [],
|
|
||||||
"maxDataPoints": 3,
|
|
||||||
"options": {
|
|
||||||
"displayLabels": [],
|
|
||||||
"legend": {
|
|
||||||
"calcs": [],
|
|
||||||
"displayMode": "table",
|
|
||||||
"placement": "right",
|
|
||||||
"values": [
|
|
||||||
"value",
|
|
||||||
"percent"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"pieType": "pie",
|
|
||||||
"reduceOptions": {
|
|
||||||
"calcs": [
|
|
||||||
"lastNotNull"
|
|
||||||
],
|
|
||||||
"fields": "",
|
|
||||||
"values": false
|
|
||||||
},
|
|
||||||
"text": {}
|
|
||||||
},
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"exemplar": true,
|
|
||||||
"expr": "traefik_service_requests_total{service=\"$service\"}",
|
|
||||||
"format": "time_series",
|
|
||||||
"interval": "",
|
|
||||||
"intervalFactor": 2,
|
|
||||||
"legendFormat": "{{method}} : {{code}}",
|
|
||||||
"refId": "A"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"title": "$service return code",
|
|
||||||
"type": "piechart"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cacheTimeout": null,
|
|
||||||
"colorBackground": false,
|
|
||||||
"colorValue": false,
|
|
||||||
"colors": [
|
|
||||||
"#299c46",
|
|
||||||
"rgba(237, 129, 40, 0.89)",
|
|
||||||
"#d44a3a"
|
|
||||||
],
|
|
||||||
"datasource": "${DS_PROMETHEUS}",
|
|
||||||
"fieldConfig": {
|
|
||||||
"defaults": {},
|
|
||||||
"overrides": []
|
|
||||||
},
|
|
||||||
"format": "ms",
|
|
||||||
"gauge": {
|
|
||||||
"maxValue": 100,
|
|
||||||
"minValue": 0,
|
|
||||||
"show": false,
|
|
||||||
"thresholdLabels": false,
|
|
||||||
"thresholdMarkers": true
|
|
||||||
},
|
|
||||||
"gridPos": {
|
|
||||||
"h": 7,
|
|
||||||
"w": 12,
|
|
||||||
"x": 12,
|
|
||||||
"y": 1
|
|
||||||
},
|
|
||||||
"id": 4,
|
|
||||||
"interval": null,
|
|
||||||
"links": [],
|
|
||||||
"mappingType": 1,
|
|
||||||
"mappingTypes": [
|
|
||||||
{
|
|
||||||
"name": "value to text",
|
|
||||||
"value": 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "range to text",
|
|
||||||
"value": 2
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"maxDataPoints": 100,
|
|
||||||
"nullPointMode": "connected",
|
|
||||||
"nullText": null,
|
|
||||||
"postfix": "",
|
|
||||||
"postfixFontSize": "50%",
|
|
||||||
"prefix": "",
|
|
||||||
"prefixFontSize": "50%",
|
|
||||||
"rangeMaps": [
|
|
||||||
{
|
|
||||||
"from": "null",
|
|
||||||
"text": "N/A",
|
|
||||||
"to": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"sparkline": {
|
|
||||||
"fillColor": "rgba(31, 118, 189, 0.18)",
|
|
||||||
"full": false,
|
|
||||||
"lineColor": "rgb(31, 120, 193)",
|
|
||||||
"show": true
|
|
||||||
},
|
|
||||||
"tableColumn": "",
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"exemplar": true,
|
|
||||||
"expr": "sum(traefik_service_request_duration_seconds_sum{service=\"$service\"}) / sum(traefik_service_requests_total{service=\"$service\"}) * 1000",
|
|
||||||
"format": "time_series",
|
|
||||||
"interval": "",
|
|
||||||
"intervalFactor": 2,
|
|
||||||
"legendFormat": "",
|
|
||||||
"refId": "A"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"thresholds": "",
|
|
||||||
"title": "$service response time",
|
|
||||||
"type": "singlestat",
|
|
||||||
"valueFontSize": "80%",
|
|
||||||
"valueMaps": [
|
|
||||||
{
|
|
||||||
"op": "=",
|
|
||||||
"text": "N/A",
|
|
||||||
"value": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"valueName": "avg"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"aliasColors": {},
|
|
||||||
"bars": true,
|
|
||||||
"dashLength": 10,
|
|
||||||
"dashes": false,
|
|
||||||
"datasource": "${DS_PROMETHEUS}",
|
|
||||||
"fieldConfig": {
|
|
||||||
"defaults": {},
|
|
||||||
"overrides": []
|
|
||||||
},
|
|
||||||
"fill": 1,
|
|
||||||
"fillGradient": 0,
|
|
||||||
"gridPos": {
|
|
||||||
"h": 7,
|
|
||||||
"w": 24,
|
|
||||||
"x": 0,
|
|
||||||
"y": 8
|
|
||||||
},
|
|
||||||
"hiddenSeries": false,
|
|
||||||
"id": 3,
|
|
||||||
"legend": {
|
|
||||||
"alignAsTable": true,
|
|
||||||
"avg": true,
|
|
||||||
"current": false,
|
|
||||||
"max": true,
|
|
||||||
"min": true,
|
|
||||||
"rightSide": true,
|
|
||||||
"show": true,
|
|
||||||
"total": false,
|
|
||||||
"values": true
|
|
||||||
},
|
|
||||||
"lines": false,
|
|
||||||
"linewidth": 1,
|
|
||||||
"links": [],
|
|
||||||
"nullPointMode": "null",
|
|
||||||
"options": {
|
|
||||||
"alertThreshold": true
|
|
||||||
},
|
|
||||||
"percentage": false,
|
|
||||||
"pluginVersion": "7.5.5",
|
|
||||||
"pointradius": 5,
|
|
||||||
"points": false,
|
|
||||||
"renderer": "flot",
|
|
||||||
"seriesOverrides": [],
|
|
||||||
"spaceLength": 10,
|
|
||||||
"stack": false,
|
|
||||||
"steppedLine": false,
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"exemplar": true,
|
|
||||||
"expr": "sum(rate(traefik_service_requests_total{service=\"$service\"}[5m]))",
|
|
||||||
"format": "time_series",
|
|
||||||
"interval": "",
|
|
||||||
"intervalFactor": 2,
|
|
||||||
"legendFormat": "Total requests $service",
|
|
||||||
"refId": "A"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"thresholds": [],
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeRegions": [],
|
|
||||||
"timeShift": null,
|
|
||||||
"title": "Total requests over 5min $service",
|
|
||||||
"tooltip": {
|
|
||||||
"shared": true,
|
|
||||||
"sort": 0,
|
|
||||||
"value_type": "individual"
|
|
||||||
},
|
|
||||||
"type": "graph",
|
|
||||||
"xaxis": {
|
|
||||||
"buckets": null,
|
|
||||||
"mode": "time",
|
|
||||||
"name": null,
|
|
||||||
"show": true,
|
|
||||||
"values": []
|
|
||||||
},
|
|
||||||
"yaxes": [
|
|
||||||
{
|
|
||||||
"format": "short",
|
|
||||||
"label": null,
|
|
||||||
"logBase": 1,
|
|
||||||
"max": null,
|
|
||||||
"min": null,
|
|
||||||
"show": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"format": "short",
|
|
||||||
"label": null,
|
|
||||||
"logBase": 1,
|
|
||||||
"max": null,
|
|
||||||
"min": null,
|
|
||||||
"show": true
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"yaxis": {
|
|
||||||
"align": false,
|
|
||||||
"alignLevel": null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"collapsed": false,
|
|
||||||
"datasource": null,
|
|
||||||
"gridPos": {
|
|
||||||
"h": 1,
|
|
||||||
"w": 24,
|
|
||||||
"x": 0,
|
|
||||||
"y": 15
|
|
||||||
},
|
|
||||||
"id": 12,
|
|
||||||
"panels": [],
|
|
||||||
"title": "Global stats",
|
|
||||||
"type": "row"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"aliasColors": {},
|
|
||||||
"bars": true,
|
|
||||||
"dashLength": 10,
|
|
||||||
"dashes": false,
|
|
||||||
"datasource": "${DS_PROMETHEUS}",
|
|
||||||
"fieldConfig": {
|
|
||||||
"defaults": {},
|
|
||||||
"overrides": []
|
|
||||||
},
|
|
||||||
"fill": 1,
|
|
||||||
"fillGradient": 0,
|
|
||||||
"gridPos": {
|
|
||||||
"h": 7,
|
|
||||||
"w": 12,
|
|
||||||
"x": 0,
|
|
||||||
"y": 16
|
|
||||||
},
|
|
||||||
"hiddenSeries": false,
|
|
||||||
"id": 5,
|
|
||||||
"legend": {
|
|
||||||
"alignAsTable": true,
|
|
||||||
"avg": false,
|
|
||||||
"current": true,
|
|
||||||
"max": true,
|
|
||||||
"min": true,
|
|
||||||
"rightSide": true,
|
|
||||||
"show": true,
|
|
||||||
"total": false,
|
|
||||||
"values": true
|
|
||||||
},
|
|
||||||
"lines": false,
|
|
||||||
"linewidth": 1,
|
|
||||||
"links": [],
|
|
||||||
"nullPointMode": "null",
|
|
||||||
"options": {
|
|
||||||
"alertThreshold": true
|
|
||||||
},
|
|
||||||
"percentage": false,
|
|
||||||
"pluginVersion": "7.5.5",
|
|
||||||
"pointradius": 5,
|
|
||||||
"points": false,
|
|
||||||
"renderer": "flot",
|
|
||||||
"seriesOverrides": [],
|
|
||||||
"spaceLength": 10,
|
|
||||||
"stack": true,
|
|
||||||
"steppedLine": false,
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"expr": "rate(traefik_entrypoint_requests_total{entrypoint=~\"$entrypoint\",code=\"200\"}[5m])",
|
|
||||||
"format": "time_series",
|
|
||||||
"intervalFactor": 2,
|
|
||||||
"legendFormat": "{{method}} : {{code}}",
|
|
||||||
"refId": "A"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"thresholds": [],
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeRegions": [],
|
|
||||||
"timeShift": null,
|
|
||||||
"title": "Status code 200 over 5min",
|
|
||||||
"tooltip": {
|
|
||||||
"shared": true,
|
|
||||||
"sort": 0,
|
|
||||||
"value_type": "individual"
|
|
||||||
},
|
|
||||||
"type": "graph",
|
|
||||||
"xaxis": {
|
|
||||||
"buckets": null,
|
|
||||||
"mode": "time",
|
|
||||||
"name": null,
|
|
||||||
"show": true,
|
|
||||||
"values": []
|
|
||||||
},
|
|
||||||
"yaxes": [
|
|
||||||
{
|
|
||||||
"format": "short",
|
|
||||||
"label": null,
|
|
||||||
"logBase": 1,
|
|
||||||
"max": null,
|
|
||||||
"min": null,
|
|
||||||
"show": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"format": "short",
|
|
||||||
"label": null,
|
|
||||||
"logBase": 1,
|
|
||||||
"max": null,
|
|
||||||
"min": null,
|
|
||||||
"show": true
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"yaxis": {
|
|
||||||
"align": false,
|
|
||||||
"alignLevel": null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"aliasColors": {},
|
|
||||||
"bars": true,
|
|
||||||
"dashLength": 10,
|
|
||||||
"dashes": false,
|
|
||||||
"datasource": "${DS_PROMETHEUS}",
|
|
||||||
"fieldConfig": {
|
|
||||||
"defaults": {},
|
|
||||||
"overrides": []
|
|
||||||
},
|
|
||||||
"fill": 1,
|
|
||||||
"fillGradient": 0,
|
|
||||||
"gridPos": {
|
|
||||||
"h": 7,
|
|
||||||
"w": 12,
|
|
||||||
"x": 12,
|
|
||||||
"y": 16
|
|
||||||
},
|
|
||||||
"hiddenSeries": false,
|
|
||||||
"id": 6,
|
|
||||||
"legend": {
|
|
||||||
"alignAsTable": true,
|
|
||||||
"avg": false,
|
|
||||||
"current": true,
|
|
||||||
"max": true,
|
|
||||||
"min": true,
|
|
||||||
"rightSide": true,
|
|
||||||
"show": true,
|
|
||||||
"total": false,
|
|
||||||
"values": true
|
|
||||||
},
|
|
||||||
"lines": false,
|
|
||||||
"linewidth": 1,
|
|
||||||
"links": [],
|
|
||||||
"nullPointMode": "null",
|
|
||||||
"options": {
|
|
||||||
"alertThreshold": true
|
|
||||||
},
|
|
||||||
"percentage": false,
|
|
||||||
"pluginVersion": "7.5.5",
|
|
||||||
"pointradius": 5,
|
|
||||||
"points": false,
|
|
||||||
"renderer": "flot",
|
|
||||||
"seriesOverrides": [],
|
|
||||||
"spaceLength": 10,
|
|
||||||
"stack": true,
|
|
||||||
"steppedLine": false,
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"expr": "rate(traefik_entrypoint_requests_total{entrypoint=~\"$entrypoint\",code!=\"200\"}[5m])",
|
|
||||||
"format": "time_series",
|
|
||||||
"intervalFactor": 2,
|
|
||||||
"legendFormat": "{{ method }} : {{code}}",
|
|
||||||
"refId": "A"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"thresholds": [],
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeRegions": [],
|
|
||||||
"timeShift": null,
|
|
||||||
"title": "Others status code over 5min",
|
|
||||||
"tooltip": {
|
|
||||||
"shared": true,
|
|
||||||
"sort": 0,
|
|
||||||
"value_type": "individual"
|
|
||||||
},
|
|
||||||
"type": "graph",
|
|
||||||
"xaxis": {
|
|
||||||
"buckets": null,
|
|
||||||
"mode": "time",
|
|
||||||
"name": null,
|
|
||||||
"show": true,
|
|
||||||
"values": []
|
|
||||||
},
|
|
||||||
"yaxes": [
|
|
||||||
{
|
|
||||||
"format": "short",
|
|
||||||
"label": null,
|
|
||||||
"logBase": 1,
|
|
||||||
"max": null,
|
|
||||||
"min": null,
|
|
||||||
"show": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"format": "short",
|
|
||||||
"label": null,
|
|
||||||
"logBase": 1,
|
|
||||||
"max": null,
|
|
||||||
"min": null,
|
|
||||||
"show": true
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"yaxis": {
|
|
||||||
"align": false,
|
|
||||||
"alignLevel": null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cacheTimeout": null,
|
|
||||||
"datasource": "${DS_PROMETHEUS}",
|
|
||||||
"fieldConfig": {
|
|
||||||
"defaults": {
|
|
||||||
"color": {
|
|
||||||
"mode": "palette-classic"
|
|
||||||
},
|
|
||||||
"decimals": 0,
|
|
||||||
"mappings": [],
|
|
||||||
"thresholds": {
|
|
||||||
"mode": "absolute",
|
|
||||||
"steps": [
|
|
||||||
{
|
|
||||||
"color": "green",
|
|
||||||
"value": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"color": "red",
|
|
||||||
"value": 80
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"unit": "short"
|
|
||||||
},
|
|
||||||
"overrides": []
|
|
||||||
},
|
|
||||||
"gridPos": {
|
|
||||||
"h": 7,
|
|
||||||
"w": 12,
|
|
||||||
"x": 0,
|
|
||||||
"y": 23
|
|
||||||
},
|
|
||||||
"id": 7,
|
|
||||||
"interval": null,
|
|
||||||
"links": [],
|
|
||||||
"maxDataPoints": 3,
|
|
||||||
"options": {
|
|
||||||
"displayLabels": [],
|
|
||||||
"legend": {
|
|
||||||
"calcs": [],
|
|
||||||
"displayMode": "table",
|
|
||||||
"placement": "right",
|
|
||||||
"values": [
|
|
||||||
"value"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"pieType": "pie",
|
|
||||||
"reduceOptions": {
|
|
||||||
"calcs": [
|
|
||||||
"sum"
|
|
||||||
],
|
|
||||||
"fields": "",
|
|
||||||
"values": false
|
|
||||||
},
|
|
||||||
"text": {}
|
|
||||||
},
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"exemplar": true,
|
|
||||||
"expr": "sum(rate(traefik_service_requests_total[5m])) by (service) ",
|
|
||||||
"format": "time_series",
|
|
||||||
"interval": "",
|
|
||||||
"intervalFactor": 2,
|
|
||||||
"legendFormat": "{{ service }}",
|
|
||||||
"refId": "A"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"title": "Requests by service",
|
|
||||||
"type": "piechart"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cacheTimeout": null,
|
|
||||||
"datasource": "${DS_PROMETHEUS}",
|
|
||||||
"fieldConfig": {
|
|
||||||
"defaults": {
|
|
||||||
"color": {
|
|
||||||
"mode": "palette-classic"
|
|
||||||
},
|
|
||||||
"decimals": 0,
|
|
||||||
"mappings": [],
|
|
||||||
"thresholds": {
|
|
||||||
"mode": "absolute",
|
|
||||||
"steps": [
|
|
||||||
{
|
|
||||||
"color": "green",
|
|
||||||
"value": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"color": "red",
|
|
||||||
"value": 80
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"unit": "short"
|
|
||||||
},
|
|
||||||
"overrides": []
|
|
||||||
},
|
|
||||||
"gridPos": {
|
|
||||||
"h": 7,
|
|
||||||
"w": 12,
|
|
||||||
"x": 12,
|
|
||||||
"y": 23
|
|
||||||
},
|
|
||||||
"id": 8,
|
|
||||||
"interval": null,
|
|
||||||
"links": [],
|
|
||||||
"maxDataPoints": 3,
|
|
||||||
"options": {
|
|
||||||
"displayLabels": [],
|
|
||||||
"legend": {
|
|
||||||
"calcs": [],
|
|
||||||
"displayMode": "table",
|
|
||||||
"placement": "right",
|
|
||||||
"values": [
|
|
||||||
"value"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"pieType": "pie",
|
|
||||||
"reduceOptions": {
|
|
||||||
"calcs": [
|
|
||||||
"sum"
|
|
||||||
],
|
|
||||||
"fields": "",
|
|
||||||
"values": false
|
|
||||||
},
|
|
||||||
"text": {}
|
|
||||||
},
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"exemplar": true,
|
|
||||||
"expr": "sum(rate(traefik_entrypoint_requests_total{entrypoint =~ \"$entrypoint\"}[5m])) by (entrypoint) ",
|
|
||||||
"format": "time_series",
|
|
||||||
"interval": "",
|
|
||||||
"intervalFactor": 2,
|
|
||||||
"legendFormat": "{{ entrypoint }}",
|
|
||||||
"refId": "A"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"title": "Requests by protocol",
|
|
||||||
"type": "piechart"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"schemaVersion": 27,
|
|
||||||
"style": "dark",
|
|
||||||
"tags": [
|
|
||||||
"traefik",
|
|
||||||
"prometheus"
|
|
||||||
],
|
|
||||||
"templating": {
|
|
||||||
"list": [
|
|
||||||
{
|
|
||||||
"allValue": null,
|
|
||||||
"current": {},
|
|
||||||
"datasource": "${DS_PROMETHEUS}",
|
|
||||||
"definition": "label_values(service)",
|
|
||||||
"description": null,
|
|
||||||
"error": null,
|
|
||||||
"hide": 0,
|
|
||||||
"includeAll": false,
|
|
||||||
"label": null,
|
|
||||||
"multi": false,
|
|
||||||
"name": "service",
|
|
||||||
"options": [],
|
|
||||||
"query": {
|
|
||||||
"query": "label_values(service)",
|
|
||||||
"refId": "StandardVariableQuery"
|
|
||||||
},
|
|
||||||
"refresh": 1,
|
|
||||||
"regex": "",
|
|
||||||
"skipUrlSync": false,
|
|
||||||
"sort": 0,
|
|
||||||
"tagValuesQuery": "",
|
|
||||||
"tags": [],
|
|
||||||
"tagsQuery": "",
|
|
||||||
"type": "query",
|
|
||||||
"useTags": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"allValue": null,
|
|
||||||
"current": {},
|
|
||||||
"datasource": "${DS_PROMETHEUS}",
|
|
||||||
"definition": "",
|
|
||||||
"description": null,
|
|
||||||
"error": null,
|
|
||||||
"hide": 0,
|
|
||||||
"includeAll": true,
|
|
||||||
"label": null,
|
|
||||||
"multi": true,
|
|
||||||
"name": "entrypoint",
|
|
||||||
"options": [],
|
|
||||||
"query": {
|
|
||||||
"query": "label_values(entrypoint)",
|
|
||||||
"refId": "Prometheus-entrypoint-Variable-Query"
|
|
||||||
},
|
|
||||||
"refresh": 1,
|
|
||||||
"regex": "",
|
|
||||||
"skipUrlSync": false,
|
|
||||||
"sort": 0,
|
|
||||||
"tagValuesQuery": "",
|
|
||||||
"tags": [],
|
|
||||||
"tagsQuery": "",
|
|
||||||
"type": "query",
|
|
||||||
"useTags": false
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"time": {
|
|
||||||
"from": "now-1h",
|
|
||||||
"to": "now"
|
|
||||||
},
|
|
||||||
"timepicker": {
|
|
||||||
"refresh_intervals": [
|
|
||||||
"5s",
|
|
||||||
"10s",
|
|
||||||
"30s",
|
|
||||||
"1m",
|
|
||||||
"5m",
|
|
||||||
"15m",
|
|
||||||
"30m",
|
|
||||||
"1h",
|
|
||||||
"2h",
|
|
||||||
"1d"
|
|
||||||
],
|
|
||||||
"time_options": [
|
|
||||||
"5m",
|
|
||||||
"15m",
|
|
||||||
"1h",
|
|
||||||
"6h",
|
|
||||||
"12h",
|
|
||||||
"24h",
|
|
||||||
"2d",
|
|
||||||
"7d",
|
|
||||||
"30d"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"timezone": "",
|
|
||||||
"title": "Traefik",
|
|
||||||
"uid": "qPdAviJmz",
|
|
||||||
"version": 10
|
|
||||||
}
|
|
@ -5,4 +5,4 @@ providers:
|
|||||||
type: file
|
type: file
|
||||||
disableDeletion: false
|
disableDeletion: false
|
||||||
options:
|
options:
|
||||||
path: /etc/grafana/provisioning/dashboards/default
|
path: {{ env "NOMAD_ALLOC_DIR" }}/config/provisioning/dashboards/default
|
||||||
|
19
core/grafana/provisioning/datasources/influxdb.yml
Normal file
19
core/grafana/provisioning/datasources/influxdb.yml
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
---
|
||||||
|
apiVersion: 1
|
||||||
|
|
||||||
|
datasources:
|
||||||
|
- name: HASS Metrics
|
||||||
|
url: "http://192.168.2.75:8086"
|
||||||
|
type: influxdb
|
||||||
|
access: proxy
|
||||||
|
database: hass
|
||||||
|
jsonData:
|
||||||
|
dbName: hass
|
||||||
|
|
||||||
|
- name: Proxmox Metrics
|
||||||
|
url: "http://192.168.2.75:8086"
|
||||||
|
type: influxdb
|
||||||
|
access: proxy
|
||||||
|
database: proxmox
|
||||||
|
jsonData:
|
||||||
|
dbName: proxmox
|
96
core/lego.nomad
Normal file
96
core/lego.nomad
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
variable "lego_version" {
|
||||||
|
default = "4.14.2"
|
||||||
|
type = string
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "nomad_var_dirsync_version" {
|
||||||
|
default = "0.0.2"
|
||||||
|
type = string
|
||||||
|
}
|
||||||
|
|
||||||
|
job "lego" {
|
||||||
|
|
||||||
|
type = "batch"
|
||||||
|
|
||||||
|
periodic {
|
||||||
|
cron = "@weekly"
|
||||||
|
prohibit_overlap = true
|
||||||
|
}
|
||||||
|
|
||||||
|
group "main" {
|
||||||
|
|
||||||
|
network {
|
||||||
|
dns {
|
||||||
|
servers = ["1.1.1.1", "1.0.0.1"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
task "main" {
|
||||||
|
driver = "exec"
|
||||||
|
|
||||||
|
config {
|
||||||
|
command = "/bin/bash"
|
||||||
|
args = ["${NOMAD_TASK_DIR}/start.sh"]
|
||||||
|
}
|
||||||
|
|
||||||
|
artifact {
|
||||||
|
source = "https://github.com/go-acme/lego/releases/download/v${var.lego_version}/lego_v${var.lego_version}_linux_${attr.cpu.arch}.tar.gz"
|
||||||
|
}
|
||||||
|
|
||||||
|
artifact {
|
||||||
|
source = "https://git.iamthefij.com/iamthefij/nomad-var-dirsync/releases/download/v${var.nomad_var_dirsync_version}/nomad-var-dirsync-linux-${attr.cpu.arch}.tar.gz"
|
||||||
|
}
|
||||||
|
|
||||||
|
template {
|
||||||
|
data = <<EOH
|
||||||
|
#! /bin/sh
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
cd ${NOMAD_TASK_DIR}
|
||||||
|
|
||||||
|
echo "Read certs from nomad vars"
|
||||||
|
${NOMAD_TASK_DIR}/nomad-var-dirsync-linux-{{ env "attr.cpu.arch" }} -root-var=secrets/certs read .
|
||||||
|
|
||||||
|
action=run
|
||||||
|
if [ -f /.lego/certificates/_.thefij.rocks.crt ]; then
|
||||||
|
action=renew
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Attempt to $action certificates"
|
||||||
|
${NOMAD_TASK_DIR}/lego \
|
||||||
|
--accept-tos --pem \
|
||||||
|
--email=iamthefij@gmail.com \
|
||||||
|
--domains="*.thefij.rocks" \
|
||||||
|
--dns="cloudflare" \
|
||||||
|
$action \
|
||||||
|
--$action-hook="${NOMAD_TASK_DIR}/nomad-var-dirsync-linux-{{ env "attr.cpu.arch" }} -root-var=secrets/certs write .lego" \
|
||||||
|
EOH
|
||||||
|
destination = "${NOMAD_TASK_DIR}/start.sh"
|
||||||
|
}
|
||||||
|
|
||||||
|
template {
|
||||||
|
data = <<EOH
|
||||||
|
{{ with nomadVar "nomad/jobs/lego" -}}
|
||||||
|
CF_DNS_API_TOKEN={{ .domain_lego_dns }}
|
||||||
|
CF_ZONE_API_TOKEN={{ .domain_lego_dns }}
|
||||||
|
{{- end }}
|
||||||
|
EOH
|
||||||
|
destination = "secrets/cloudflare.env"
|
||||||
|
env = true
|
||||||
|
}
|
||||||
|
|
||||||
|
env = {
|
||||||
|
NOMAD_ADDR = "unix:///secrets/api.sock"
|
||||||
|
}
|
||||||
|
|
||||||
|
identity {
|
||||||
|
env = true
|
||||||
|
}
|
||||||
|
|
||||||
|
resources {
|
||||||
|
cpu = 50
|
||||||
|
memory = 100
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
23
core/lego.tf
Normal file
23
core/lego.tf
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
resource "nomad_job" "lego" {
|
||||||
|
jobspec = file("${path.module}/lego.nomad")
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_acl_policy" "secrets_certs_write" {
|
||||||
|
name = "secrets-certs-write"
|
||||||
|
description = "Write certs to secrets store"
|
||||||
|
rules_hcl = <<EOH
|
||||||
|
namespace "default" {
|
||||||
|
variables {
|
||||||
|
path "secrets/certs/*" {
|
||||||
|
capabilities = ["write", "read"]
|
||||||
|
}
|
||||||
|
path "secrets/certs" {
|
||||||
|
capabilities = ["write", "read"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOH
|
||||||
|
job_acl {
|
||||||
|
job_id = "lego/*"
|
||||||
|
}
|
||||||
|
}
|
@ -3,31 +3,27 @@ auth_enabled: false
|
|||||||
server:
|
server:
|
||||||
http_listen_port: 3100
|
http_listen_port: 3100
|
||||||
|
|
||||||
ingester:
|
common:
|
||||||
lifecycler:
|
ring:
|
||||||
address: 127.0.0.1
|
instance_addr: 127.0.0.1
|
||||||
ring:
|
kvstore:
|
||||||
kvstore:
|
store: inmemory
|
||||||
store: inmemory
|
replication_factor: 1
|
||||||
replication_factor: 1
|
path_prefix: /tmp/loki
|
||||||
final_sleep: 0s
|
|
||||||
chunk_idle_period: 5m
|
|
||||||
chunk_retain_period: 30s
|
|
||||||
max_transfer_retries: 0
|
|
||||||
|
|
||||||
schema_config:
|
schema_config:
|
||||||
configs:
|
configs:
|
||||||
- from: 2018-04-15
|
- from: 2020-05-15
|
||||||
store: boltdb
|
store: boltdb-shipper
|
||||||
object_store: filesystem
|
object_store: filesystem
|
||||||
schema: v11
|
schema: v11
|
||||||
index:
|
index:
|
||||||
prefix: index_
|
prefix: index_
|
||||||
period: 168h
|
period: 24h
|
||||||
|
|
||||||
storage_config:
|
storage_config:
|
||||||
boltdb:
|
boltdb_shipper:
|
||||||
directory: {{ env "NOMAD_TASK_DIR" }}/index
|
active_index_directory: {{ env "NOMAD_TASK_DIR" }}/index
|
||||||
|
|
||||||
filesystem:
|
filesystem:
|
||||||
directory: {{ env "NOMAD_TASK_DIR" }}/chunks
|
directory: {{ env "NOMAD_TASK_DIR" }}/chunks
|
||||||
@ -38,8 +34,8 @@ limits_config:
|
|||||||
reject_old_samples_max_age: 168h
|
reject_old_samples_max_age: 168h
|
||||||
|
|
||||||
chunk_store_config:
|
chunk_store_config:
|
||||||
max_look_back_period: 0s
|
max_look_back_period: 168h
|
||||||
|
|
||||||
table_manager:
|
table_manager:
|
||||||
retention_deletes_enabled: false
|
retention_deletes_enabled: true
|
||||||
retention_period: 0s
|
retention_period: 168h
|
||||||
|
@ -3,15 +3,17 @@ module "loki" {
|
|||||||
detach = false
|
detach = false
|
||||||
|
|
||||||
name = "loki"
|
name = "loki"
|
||||||
image = "grafana/loki:2.2.1"
|
image = "grafana/loki:2.8.7"
|
||||||
args = ["--config.file=$${NOMAD_TASK_DIR}/loki-config.yml"]
|
args = ["--config.file=$${NOMAD_TASK_DIR}/loki-config.yml"]
|
||||||
|
|
||||||
service_port = 3100
|
service_port = 3100
|
||||||
ingress = true
|
ingress = true
|
||||||
use_wesher = var.use_wesher
|
use_wesher = var.use_wesher
|
||||||
|
service_check = {
|
||||||
|
path = "/ready"
|
||||||
|
}
|
||||||
|
|
||||||
sticky_disk = true
|
sticky_disk = true
|
||||||
# healthcheck = "/ready"
|
|
||||||
templates = [
|
templates = [
|
||||||
{
|
{
|
||||||
data = file("${path.module}/loki-config.yml")
|
data = file("${path.module}/loki-config.yml")
|
||||||
|
@ -24,7 +24,8 @@ job "nomad-client-stalker" {
|
|||||||
|
|
||||||
resources {
|
resources {
|
||||||
cpu = 10
|
cpu = 10
|
||||||
memory = 10
|
memory = 15
|
||||||
|
memory_max = 30
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
40
core/oidc_client/.terraform.lock.hcl
generated
Normal file
40
core/oidc_client/.terraform.lock.hcl
generated
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
# This file is maintained automatically by "terraform init".
|
||||||
|
# Manual edits may be lost in future updates.
|
||||||
|
|
||||||
|
provider "registry.terraform.io/hashicorp/nomad" {
|
||||||
|
version = "2.3.1"
|
||||||
|
hashes = [
|
||||||
|
"h1:lMueBNB2GJ/a5rweL9NPybwVfDH/Q1s+rQvt5Y+kuYs=",
|
||||||
|
"zh:1e7893a3fbebff171bcc5581b70a16eea33193c7e9dd73402ba5c04b7202f0bb",
|
||||||
|
"zh:252cfd3fee4811c83bc74406ba1bc1bbb83d6de20e50a86f93737f8f86864171",
|
||||||
|
"zh:387a7140be6dfa3f8d27f09d1eb2b9f3b84900328fe5a0478e9b3bd91a845808",
|
||||||
|
"zh:49848fa491ac26b0568b112a57d14cc49772607c7cf405e2f74dd537407214b1",
|
||||||
|
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||||
|
"zh:7b9f345f5bb5f17c5d0bc3d373c25828934a3cbcdb331e0eab54eb47f1355fb2",
|
||||||
|
"zh:8e276f4de508a86e725fffc02ee891db73397c35dbd591d8918af427eeec93a1",
|
||||||
|
"zh:90b349933d2fd28f822a36128be4625bb816aa9f20ec314c79c77306f632ae87",
|
||||||
|
"zh:a0ca6fd6cd94a52684e432104d3dc170a74075f47d9d4ba725cc340a438ed75a",
|
||||||
|
"zh:a6cffc45535a0ff8206782538b3eeaef17dc93d0e1fd58bc1e6f7d5aa0f6ba1a",
|
||||||
|
"zh:c010807b5d3e03d769419787b0e5d4efa6963134e1873a413102af6bf3dd1c49",
|
||||||
|
"zh:faf962ee1981e897e99f7e528642c7e74beed37afd8eaf743e6ede24df812d80",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
provider "registry.terraform.io/hashicorp/random" {
|
||||||
|
version = "3.6.2"
|
||||||
|
hashes = [
|
||||||
|
"h1:wmG0QFjQ2OfyPy6BB7mQ57WtoZZGGV07uAPQeDmIrAE=",
|
||||||
|
"zh:0ef01a4f81147b32c1bea3429974d4d104bbc4be2ba3cfa667031a8183ef88ec",
|
||||||
|
"zh:1bcd2d8161e89e39886119965ef0f37fcce2da9c1aca34263dd3002ba05fcb53",
|
||||||
|
"zh:37c75d15e9514556a5f4ed02e1548aaa95c0ecd6ff9af1119ac905144c70c114",
|
||||||
|
"zh:4210550a767226976bc7e57d988b9ce48f4411fa8a60cd74a6b246baf7589dad",
|
||||||
|
"zh:562007382520cd4baa7320f35e1370ffe84e46ed4e2071fdc7e4b1a9b1f8ae9b",
|
||||||
|
"zh:5efb9da90f665e43f22c2e13e0ce48e86cae2d960aaf1abf721b497f32025916",
|
||||||
|
"zh:6f71257a6b1218d02a573fc9bff0657410404fb2ef23bc66ae8cd968f98d5ff6",
|
||||||
|
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||||
|
"zh:9647e18f221380a85f2f0ab387c68fdafd58af6193a932417299cdcae4710150",
|
||||||
|
"zh:bb6297ce412c3c2fa9fec726114e5e0508dd2638cad6a0cb433194930c97a544",
|
||||||
|
"zh:f83e925ed73ff8a5ef6e3608ad9225baa5376446349572c2449c0c0b3cf184b7",
|
||||||
|
"zh:fbef0781cb64de76b1df1ca11078aecba7800d82fd4a956302734999cfd9a4af",
|
||||||
|
]
|
||||||
|
}
|
50
core/oidc_client/main.tf
Normal file
50
core/oidc_client/main.tf
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
resource "random_password" "oidc_client_id" {
|
||||||
|
length = 72
|
||||||
|
override_special = "-._~"
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "random_password" "oidc_secret" {
|
||||||
|
length = 72
|
||||||
|
override_special = "-._~"
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_variable" "authelia_oidc_secret" {
|
||||||
|
path = "secrets/authelia/${var.name}"
|
||||||
|
items = {
|
||||||
|
client_id = resource.random_password.oidc_client_id.result
|
||||||
|
secret = resource.random_password.oidc_secret.result
|
||||||
|
secret_hash = resource.random_password.oidc_secret.bcrypt_hash
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_variable" "authelia_access_control_oidc" {
|
||||||
|
path = "authelia/access_control/oidc_clients/${var.name}"
|
||||||
|
items = {
|
||||||
|
id = resource.random_password.oidc_client_id.result
|
||||||
|
description = var.oidc_client_config.description
|
||||||
|
authorization_policy = var.oidc_client_config.authorization_policy
|
||||||
|
redirect_uris = yamlencode(var.oidc_client_config.redirect_uris)
|
||||||
|
scopes = yamlencode(var.oidc_client_config.scopes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_acl_policy" "oidc_authelia" {
|
||||||
|
count = var.job_acl != null ? 1 : 0
|
||||||
|
name = "${var.name}-authelia"
|
||||||
|
description = "Give access to shared authelia variables"
|
||||||
|
rules_hcl = <<EOH
|
||||||
|
namespace "default" {
|
||||||
|
variables {
|
||||||
|
path "secrets/authelia/${var.name}" {
|
||||||
|
capabilities = ["read"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOH
|
||||||
|
|
||||||
|
job_acl {
|
||||||
|
job_id = var.job_acl.job_id
|
||||||
|
group = var.job_acl.group
|
||||||
|
task = var.job_acl.task
|
||||||
|
}
|
||||||
|
}
|
11
core/oidc_client/output.tf
Normal file
11
core/oidc_client/output.tf
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
output "client_id" {
|
||||||
|
value = resource.random_password.oidc_client_id.result
|
||||||
|
}
|
||||||
|
|
||||||
|
output "secret" {
|
||||||
|
value = resource.random_password.oidc_secret.result
|
||||||
|
}
|
||||||
|
|
||||||
|
output "secret_hash" {
|
||||||
|
value = resource.random_password.oidc_secret.bcrypt_hash
|
||||||
|
}
|
25
core/oidc_client/vars.tf
Normal file
25
core/oidc_client/vars.tf
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
variable "name" {
|
||||||
|
description = "Name of service"
|
||||||
|
type = string
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "oidc_client_config" {
|
||||||
|
description = "Authelia oidc client configuration to enable oidc authentication"
|
||||||
|
type = object({
|
||||||
|
description = string
|
||||||
|
authorization_policy = optional(string, "one_factor")
|
||||||
|
redirect_uris = list(string)
|
||||||
|
scopes = list(string)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "job_acl" {
|
||||||
|
description = "Job ACL that should be given to the secrets"
|
||||||
|
type = object({
|
||||||
|
job_id = string
|
||||||
|
group = optional(string)
|
||||||
|
task = optional(string)
|
||||||
|
})
|
||||||
|
|
||||||
|
default = null
|
||||||
|
}
|
@ -37,12 +37,36 @@ job "prometheus" {
|
|||||||
"traefik.enable=true",
|
"traefik.enable=true",
|
||||||
"traefik.http.routers.prometheus.entryPoints=websecure",
|
"traefik.http.routers.prometheus.entryPoints=websecure",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
check {
|
||||||
|
type = "http"
|
||||||
|
path = "/-/healthy"
|
||||||
|
interval = "10s"
|
||||||
|
timeout = "3s"
|
||||||
|
|
||||||
|
check_restart {
|
||||||
|
limit = 3
|
||||||
|
grace = "5m"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
service {
|
service {
|
||||||
name = "pushgateway"
|
name = "pushgateway"
|
||||||
provider = "nomad"
|
provider = "nomad"
|
||||||
port = "pushgateway"
|
port = "pushgateway"
|
||||||
|
|
||||||
|
check {
|
||||||
|
type = "http"
|
||||||
|
path = "/-/healthy"
|
||||||
|
interval = "10s"
|
||||||
|
timeout = "3s"
|
||||||
|
|
||||||
|
check_restart {
|
||||||
|
limit = 3
|
||||||
|
grace = "5m"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
task "prometheus" {
|
task "prometheus" {
|
||||||
|
7
core/prometheus.tf
Normal file
7
core/prometheus.tf
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
resource "nomad_job" "prometheus" {
|
||||||
|
jobspec = templatefile("${path.module}/prometheus.nomad", {
|
||||||
|
use_wesher = var.use_wesher,
|
||||||
|
})
|
||||||
|
|
||||||
|
detach = false
|
||||||
|
}
|
@ -26,14 +26,8 @@ job "syslogng" {
|
|||||||
task "promtail" {
|
task "promtail" {
|
||||||
driver = "docker"
|
driver = "docker"
|
||||||
|
|
||||||
meta = {
|
|
||||||
"diun.sort_tags" = "semver"
|
|
||||||
"diun.watch_repo" = true
|
|
||||||
"diun.include_tags" = "^[0-9]+\\.[0-9]+\\.[0-9]+$"
|
|
||||||
}
|
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "grafana/promtail:2.9.1"
|
image = "grafana/promtail:3.3.0"
|
||||||
ports = ["main", "metrics"]
|
ports = ["main", "metrics"]
|
||||||
args = ["--config.file=/etc/promtail/promtail.yml"]
|
args = ["--config.file=/etc/promtail/promtail.yml"]
|
||||||
|
|
||||||
@ -72,7 +66,7 @@ EOF
|
|||||||
|
|
||||||
resources {
|
resources {
|
||||||
cpu = 50
|
cpu = 50
|
||||||
memory = 20
|
memory = 50
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -136,7 +130,7 @@ EOF
|
|||||||
|
|
||||||
resources {
|
resources {
|
||||||
cpu = 50
|
cpu = 50
|
||||||
memory = 10
|
memory = 50
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
26
core/traefik/.terraform.lock.hcl
generated
26
core/traefik/.terraform.lock.hcl
generated
@ -2,20 +2,20 @@
|
|||||||
# Manual edits may be lost in future updates.
|
# Manual edits may be lost in future updates.
|
||||||
|
|
||||||
provider "registry.terraform.io/hashicorp/nomad" {
|
provider "registry.terraform.io/hashicorp/nomad" {
|
||||||
version = "1.4.17"
|
version = "2.1.0"
|
||||||
hashes = [
|
hashes = [
|
||||||
"h1:iPylWr144mqXvM8NBVMTm+MS6JRhqIihlpJG91GYDyA=",
|
"h1:ek0L7fA+4R1/BXhbutSRqlQPzSZ5aY/I2YfVehuYeEU=",
|
||||||
"zh:146f97eacd9a0c78b357a6cfd2cb12765d4b18e9660a75500ee3e748c6eba41a",
|
"zh:39ba4d4fc9557d4d2c1e4bf866cf63973359b73e908cce237c54384512bdb454",
|
||||||
"zh:2eb89a6e5cee9aea03a96ea9f141096fe3baf219b2700ce30229d2d882f5015f",
|
"zh:40d2b66e3f3675e6b88000c145977c1d5288510c76b702c6c131d9168546c605",
|
||||||
"zh:3d0f971f79b615c1014c75e2f99f34bd4b4da542ca9f31d5ea7fadc4e9de39c1",
|
"zh:40fbe575d85a083f96d4703c6b7334e9fc3e08e4f1d441de2b9513215184ebcc",
|
||||||
"zh:46099a750c752ce05aa14d663a86478a5ad66d95aff3d69367f1d3628aac7792",
|
"zh:42ce6db79e2f94557fae516ee3f22e5271f0b556638eb45d5fbad02c99fc7af3",
|
||||||
"zh:71e56006b013dcfe1e4e059b2b07148b44fcd79351ae2c357e0d97e27ae0d916",
|
"zh:4acf63dfb92f879b3767529e75764fef68886521b7effa13dd0323c38133ce88",
|
||||||
"zh:74febd25d776688f0558178c2f5a0e6818bbf4cdaa2e160d7049da04103940f0",
|
"zh:72cf35a13c2fb542cd3c8528826e2390db9b8f6f79ccb41532e009ad140a3269",
|
||||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||||
"zh:af18c064a5f0dd5422d6771939274841f635b619ab392c73d5bf9720945fdb85",
|
"zh:8b8bcc136c05916234cb0c3bcc3d48fda7ca551a091ad8461ea4ab16fb6960a3",
|
||||||
"zh:c133d7a862079da9f06e301c530eacbd70e9288fa2276ec0704df907270ee328",
|
"zh:8e1c2f924eae88afe7ac83775f000ae8fd71a04e06228edf7eddce4df2421169",
|
||||||
"zh:c894cf98d239b9f5a4b7cde9f5c836face0b5b93099048ee817b0380ea439c65",
|
"zh:abc6e725531fc06a8e02e84946aaabc3453ecafbc1b7a442ea175db14fd9c86a",
|
||||||
"zh:c918642870f0cafdbe4d7dd07c909701fc3ddb47cac8357bdcde1327bf78c11d",
|
"zh:b735fcd1fb20971df3e92f81bb6d73eef845dcc9d3d98e908faa3f40013f0f69",
|
||||||
"zh:f8f5655099a57b4b9c0018a2d49133771e24c7ff8262efb1ceb140fd224aa9b6",
|
"zh:ce59797282505d872903789db8f092861036da6ec3e73f6507dac725458a5ec9",
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -14,13 +14,15 @@ job "traefik" {
|
|||||||
|
|
||||||
update {
|
update {
|
||||||
max_parallel = 1
|
max_parallel = 1
|
||||||
# canary = 1
|
canary = 1
|
||||||
# auto_promote = true
|
auto_promote = false
|
||||||
auto_revert = true
|
auto_revert = true
|
||||||
|
min_healthy_time = "30s"
|
||||||
|
healthy_deadline = "5m"
|
||||||
}
|
}
|
||||||
|
|
||||||
group "traefik" {
|
group "traefik" {
|
||||||
count = 1
|
count = 2
|
||||||
|
|
||||||
network {
|
network {
|
||||||
port "web" {
|
port "web" {
|
||||||
@ -35,12 +37,17 @@ job "traefik" {
|
|||||||
static = 514
|
static = 514
|
||||||
}
|
}
|
||||||
|
|
||||||
|
port "gitssh" {
|
||||||
|
static = 2222
|
||||||
|
}
|
||||||
|
|
||||||
|
port "metrics" {}
|
||||||
|
|
||||||
dns {
|
dns {
|
||||||
servers = [
|
servers = [
|
||||||
"192.168.2.101",
|
"192.168.2.101",
|
||||||
"192.168.2.102",
|
"192.168.2.102",
|
||||||
"192.168.2.30",
|
"192.168.2.30",
|
||||||
"192.168.2.170",
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -50,39 +57,42 @@ job "traefik" {
|
|||||||
sticky = true
|
sticky = true
|
||||||
}
|
}
|
||||||
|
|
||||||
service {
|
|
||||||
name = "traefik"
|
|
||||||
provider = "nomad"
|
|
||||||
port = "web"
|
|
||||||
|
|
||||||
check {
|
|
||||||
type = "http"
|
|
||||||
path = "/ping"
|
|
||||||
port = "web"
|
|
||||||
interval = "10s"
|
|
||||||
timeout = "2s"
|
|
||||||
}
|
|
||||||
|
|
||||||
tags = [
|
|
||||||
"traefik.enable=true",
|
|
||||||
"traefik.http.routers.traefik.entryPoints=websecure",
|
|
||||||
"traefik.http.routers.traefik.service=api@internal",
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
task "traefik" {
|
task "traefik" {
|
||||||
driver = "docker"
|
driver = "docker"
|
||||||
|
|
||||||
meta = {
|
service {
|
||||||
"diun.sort_tags" = "semver"
|
name = "traefik"
|
||||||
"diun.watch_repo" = true
|
provider = "nomad"
|
||||||
"diun.include_tags" = "^[0-9]+\\.[0-9]+$"
|
port = "web"
|
||||||
|
|
||||||
|
check {
|
||||||
|
type = "http"
|
||||||
|
path = "/ping"
|
||||||
|
interval = "10s"
|
||||||
|
timeout = "2s"
|
||||||
|
}
|
||||||
|
|
||||||
|
tags = [
|
||||||
|
"traefik.enable=true",
|
||||||
|
"traefik.http.routers.traefik.entryPoints=websecure",
|
||||||
|
"traefik.http.routers.traefik.service=api@internal",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
service {
|
||||||
|
name = "traefik-metrics"
|
||||||
|
provider = "nomad"
|
||||||
|
port = "metrics"
|
||||||
|
|
||||||
|
tags = [
|
||||||
|
"prometheus.scrape",
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "traefik:2.9"
|
image = "traefik:3.0"
|
||||||
|
|
||||||
ports = ["web", "websecure"]
|
ports = ["web", "websecure", "syslog", "gitssh", "metrics"]
|
||||||
network_mode = "host"
|
network_mode = "host"
|
||||||
|
|
||||||
mount {
|
mount {
|
||||||
@ -96,6 +106,20 @@ job "traefik" {
|
|||||||
target = "/etc/traefik/usersfile"
|
target = "/etc/traefik/usersfile"
|
||||||
source = "secrets/usersfile"
|
source = "secrets/usersfile"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
mount {
|
||||||
|
type = "bind"
|
||||||
|
target = "/etc/traefik/certs"
|
||||||
|
source = "secrets/certs"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
env = {
|
||||||
|
TRAEFIK_PROVIDERS_NOMAD_ENDPOINT_TOKEN = "${NOMAD_TOKEN}"
|
||||||
|
}
|
||||||
|
|
||||||
|
identity {
|
||||||
|
env = true
|
||||||
}
|
}
|
||||||
|
|
||||||
template {
|
template {
|
||||||
@ -118,12 +142,9 @@ job "traefik" {
|
|||||||
[entryPoints.websecure]
|
[entryPoints.websecure]
|
||||||
address = ":443"
|
address = ":443"
|
||||||
[entryPoints.websecure.http.tls]
|
[entryPoints.websecure.http.tls]
|
||||||
certResolver = "letsEncrypt"
|
|
||||||
[[entryPoints.websecure.http.tls.domains]]
|
|
||||||
main = "*.<< with nomadVar "nomad/jobs" >><< .base_hostname >><< end >>"
|
|
||||||
|
|
||||||
[entryPoints.metrics]
|
[entryPoints.metrics]
|
||||||
address = ":8989"
|
address = ":<< env "NOMAD_PORT_metrics" >>"
|
||||||
|
|
||||||
[entryPoints.syslogtcp]
|
[entryPoints.syslogtcp]
|
||||||
address = ":514"
|
address = ":514"
|
||||||
@ -131,6 +152,9 @@ job "traefik" {
|
|||||||
[entryPoints.syslogudp]
|
[entryPoints.syslogudp]
|
||||||
address = ":514/udp"
|
address = ":514/udp"
|
||||||
|
|
||||||
|
[entryPoints.gitssh]
|
||||||
|
address = ":2222"
|
||||||
|
|
||||||
[api]
|
[api]
|
||||||
dashboard = true
|
dashboard = true
|
||||||
|
|
||||||
@ -150,31 +174,9 @@ job "traefik" {
|
|||||||
exposedByDefault = false
|
exposedByDefault = false
|
||||||
defaultRule = "Host(`{{normalize .Name}}.<< with nomadVar "nomad/jobs" >><< .base_hostname >><< end >>`)"
|
defaultRule = "Host(`{{normalize .Name}}.<< with nomadVar "nomad/jobs" >><< .base_hostname >><< end >>`)"
|
||||||
[providers.nomad.endpoint]
|
[providers.nomad.endpoint]
|
||||||
address = "http://<< env "attr.unique.network.ip-address" >>:4646"
|
address = "unix:///secrets/api.sock"
|
||||||
|
|
||||||
<< if nomadVarExists "nomad/jobs/traefik" ->>
|
|
||||||
[certificatesResolvers.letsEncrypt.acme]
|
|
||||||
email = "<< with nomadVar "nomad/jobs/traefik" >><< .acme_email >><< end >>"
|
|
||||||
# Store in /local because /secrets doesn't persist with ephemeral disk
|
|
||||||
storage = "/local/acme.json"
|
|
||||||
[certificatesResolvers.letsEncrypt.acme.dnsChallenge]
|
|
||||||
provider = "cloudflare"
|
|
||||||
resolvers = ["1.1.1.1:53", "8.8.8.8:53"]
|
|
||||||
delayBeforeCheck = 0
|
|
||||||
<<- end >>
|
|
||||||
EOH
|
EOH
|
||||||
destination = "local/config/traefik.toml"
|
destination = "${NOMAD_TASK_DIR}/config/traefik.toml"
|
||||||
}
|
|
||||||
|
|
||||||
template {
|
|
||||||
data = <<EOH
|
|
||||||
{{ with nomadVar "nomad/jobs/traefik" -}}
|
|
||||||
CF_DNS_API_TOKEN={{ .domain_lego_dns }}
|
|
||||||
CF_ZONE_API_TOKEN={{ .domain_lego_dns }}
|
|
||||||
{{- end }}
|
|
||||||
EOH
|
|
||||||
destination = "secrets/cloudflare.env"
|
|
||||||
env = true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
template {
|
template {
|
||||||
@ -185,23 +187,48 @@ CF_ZONE_API_TOKEN={{ .domain_lego_dns }}
|
|||||||
entryPoints = ["websecure"]
|
entryPoints = ["websecure"]
|
||||||
service = "nomad"
|
service = "nomad"
|
||||||
rule = "Host(`nomad.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}`)"
|
rule = "Host(`nomad.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}`)"
|
||||||
[http.routers.hass]
|
|
||||||
|
{{ range nomadVarList "traefik_external" }}{{ with nomadVar .Path }}
|
||||||
|
[http.routers.{{ .name }}]
|
||||||
entryPoints = ["websecure"]
|
entryPoints = ["websecure"]
|
||||||
service = "hass"
|
service = "{{ .name }}"
|
||||||
rule = "Host(`hass.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}`)"
|
rule = "Host(`{{ .subdomain }}.{{ with nomadVar "nomad/jobs" }}{{ .base_hostname }}{{ end }}`){{ with .path_prefix.Value }}&&PathPrefix(`{{ . }}`){{ end }}"
|
||||||
|
{{ $name := .name -}}
|
||||||
|
{{ with .path_prefix.Value -}}
|
||||||
|
middlewares = ["{{ $name }}@file"]
|
||||||
|
{{ end }}
|
||||||
|
{{- end }}{{ end }}
|
||||||
|
|
||||||
|
#[http.middlewares]
|
||||||
|
# {{ range nomadVarList "traefik_external" }}{{ with nomadVar .Path -}}
|
||||||
|
# {{ $name := .name -}}
|
||||||
|
# {{ with .path_prefix.Value -}}
|
||||||
|
# [http.middlewares.{{ $name }}.stripPrefix]
|
||||||
|
# prefixes = ["{{ . }}"]
|
||||||
|
# {{ end }}
|
||||||
|
# {{- end }}{{ end }}
|
||||||
|
|
||||||
[http.services]
|
[http.services]
|
||||||
[http.services.nomad]
|
[http.services.nomad]
|
||||||
[http.services.nomad.loadBalancer]
|
[http.services.nomad.loadBalancer]
|
||||||
[[http.services.nomad.loadBalancer.servers]]
|
[[http.services.nomad.loadBalancer.servers]]
|
||||||
url = "http://127.0.0.1:4646"
|
url = "http://127.0.0.1:4646"
|
||||||
[http.services.hass]
|
|
||||||
[http.services.hass.loadBalancer]
|
{{ range nomadVarList "traefik_external" }}{{ with nomadVar .Path }}
|
||||||
[[http.services.hass.loadBalancer.servers]]
|
[http.services.{{ .name }}]
|
||||||
url = "http://192.168.3.65:8123"
|
[http.services.{{ .name }}.loadBalancer]
|
||||||
|
[[http.services.{{ .name }}.loadBalancer.servers]]
|
||||||
|
url = "{{ .url }}"
|
||||||
|
{{- end }}{{ end }}
|
||||||
EOH
|
EOH
|
||||||
destination = "local/config/conf/route-hashi.toml"
|
destination = "${NOMAD_TASK_DIR}/config/conf/route-hashi.toml"
|
||||||
change_mode = "noop"
|
change_mode = "noop"
|
||||||
|
splay = "1m"
|
||||||
|
|
||||||
|
wait {
|
||||||
|
min = "10s"
|
||||||
|
max = "20s"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
template {
|
template {
|
||||||
@ -237,7 +264,39 @@ CF_ZONE_API_TOKEN={{ .domain_lego_dns }}
|
|||||||
{{ end -}}
|
{{ end -}}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
EOH
|
EOH
|
||||||
destination = "local/config/conf/route-syslog-ng.toml"
|
destination = "${NOMAD_TASK_DIR}/config/conf/route-syslog-ng.toml"
|
||||||
|
change_mode = "noop"
|
||||||
|
splay = "1m"
|
||||||
|
|
||||||
|
wait {
|
||||||
|
min = "10s"
|
||||||
|
max = "20s"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template {
|
||||||
|
data = <<EOF
|
||||||
|
{{- with nomadVar "secrets/certs/_lego/certificates/__thefij_rocks_crt" }}{{ .contents }}{{ end -}}"
|
||||||
|
EOF
|
||||||
|
destination = "${NOMAD_SECRETS_DIR}/certs/_.thefij.rocks.crt"
|
||||||
|
change_mode = "noop"
|
||||||
|
}
|
||||||
|
|
||||||
|
template {
|
||||||
|
data = <<EOF
|
||||||
|
{{- with nomadVar "secrets/certs/_lego/certificates/__thefij_rocks_key" }}{{ .contents }}{{ end -}}"
|
||||||
|
EOF
|
||||||
|
destination = "${NOMAD_SECRETS_DIR}/certs/_.thefij.rocks.key"
|
||||||
|
change_mode = "noop"
|
||||||
|
}
|
||||||
|
|
||||||
|
template {
|
||||||
|
data = <<EOH
|
||||||
|
[[tls.certificates]]
|
||||||
|
certFile = "/etc/traefik/certs/_.thefij.rocks.crt"
|
||||||
|
keyFile = "/etc/traefik/certs/_.thefij.rocks.key"
|
||||||
|
EOH
|
||||||
|
destination = "${NOMAD_TASK_DIR}/config/conf/dynamic-tls.toml"
|
||||||
change_mode = "noop"
|
change_mode = "noop"
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -247,12 +306,11 @@ CF_ZONE_API_TOKEN={{ .domain_lego_dns }}
|
|||||||
{{ with nomadVar "nomad/jobs/traefik" }}
|
{{ with nomadVar "nomad/jobs/traefik" }}
|
||||||
{{ if .usersfile }}
|
{{ if .usersfile }}
|
||||||
[http.middlewares.basic-auth.basicAuth]
|
[http.middlewares.basic-auth.basicAuth]
|
||||||
# TODO: Reference secrets mount
|
|
||||||
usersFile = "/etc/traefik/usersfile"
|
usersFile = "/etc/traefik/usersfile"
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
EOH
|
EOH
|
||||||
destination = "local/config/conf/middlewares.toml"
|
destination = "${NOMAD_TASK_DIR}/config/conf/middlewares.toml"
|
||||||
change_mode = "noop"
|
change_mode = "noop"
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -262,7 +320,7 @@ CF_ZONE_API_TOKEN={{ .domain_lego_dns }}
|
|||||||
{{ .usersfile }}
|
{{ .usersfile }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
EOH
|
EOH
|
||||||
destination = "secrets/usersfile"
|
destination = "${NOMAD_SECRETS_DIR}/usersfile"
|
||||||
change_mode = "noop"
|
change_mode = "noop"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,3 +1,90 @@
|
|||||||
resource "nomad_job" "traefik" {
|
resource "nomad_job" "traefik" {
|
||||||
jobspec = file("${path.module}/traefik.nomad")
|
jobspec = file("${path.module}/traefik.nomad")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
resource "nomad_acl_policy" "treafik_secrets_certs_read" {
|
||||||
|
name = "traefik-secrets-certs-read"
|
||||||
|
description = "Read certs to secrets store"
|
||||||
|
rules_hcl = <<EOH
|
||||||
|
namespace "default" {
|
||||||
|
variables {
|
||||||
|
path "secrets/certs/*" {
|
||||||
|
capabilities = ["read"]
|
||||||
|
}
|
||||||
|
path "secrets/certs" {
|
||||||
|
capabilities = ["read"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOH
|
||||||
|
job_acl {
|
||||||
|
job_id = resource.nomad_job.traefik.id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_acl_policy" "traefik_query_jobs" {
|
||||||
|
name = "traefik-query-jobs"
|
||||||
|
description = "Allow traefik to query jobs"
|
||||||
|
rules_hcl = <<EOH
|
||||||
|
namespace "default" {
|
||||||
|
capabilities = ["list-jobs", "read-job"]
|
||||||
|
}
|
||||||
|
EOH
|
||||||
|
job_acl {
|
||||||
|
job_id = resource.nomad_job.traefik.id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_acl_policy" "treafik_external" {
|
||||||
|
name = "traefik-exernal"
|
||||||
|
description = "Read external services"
|
||||||
|
rules_hcl = <<EOH
|
||||||
|
namespace "default" {
|
||||||
|
variables {
|
||||||
|
path "traefik_external/*" {
|
||||||
|
capabilities = ["read"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOH
|
||||||
|
job_acl {
|
||||||
|
job_id = "traefik"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_variable" "traefik_external_hass" {
|
||||||
|
path = "traefik_external/hass"
|
||||||
|
items = {
|
||||||
|
name = "hass"
|
||||||
|
subdomain = "hass",
|
||||||
|
url = "http://192.168.3.65:8123"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_variable" "traefik_external_plex" {
|
||||||
|
path = "traefik_external/plex"
|
||||||
|
items = {
|
||||||
|
name = "plex"
|
||||||
|
subdomain = "plex",
|
||||||
|
url = "http://agnosticfront.thefij:32400"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_variable" "traefik_external_appdaemon" {
|
||||||
|
path = "traefik_external/appdaemon"
|
||||||
|
items = {
|
||||||
|
name = "appdaemon"
|
||||||
|
subdomain = "appdash",
|
||||||
|
url = "http://192.168.3.65:5050"
|
||||||
|
# path_prefix = "/add"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_variable" "traefik_external_jellyfin" {
|
||||||
|
path = "traefik_external/jellyfin"
|
||||||
|
items = {
|
||||||
|
name = "jellyfin"
|
||||||
|
subdomain = "jellyfin",
|
||||||
|
url = "http://agnosticfront.thefij:8096"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -3,6 +3,10 @@ job "lldap" {
|
|||||||
type = "service"
|
type = "service"
|
||||||
priority = 80
|
priority = 80
|
||||||
|
|
||||||
|
update {
|
||||||
|
auto_revert = true
|
||||||
|
}
|
||||||
|
|
||||||
group "lldap" {
|
group "lldap" {
|
||||||
|
|
||||||
network {
|
network {
|
||||||
@ -70,10 +74,12 @@ job "lldap" {
|
|||||||
data = <<EOH
|
data = <<EOH
|
||||||
ldap_base_dn = "{{ with nomadVar "nomad/jobs" }}{{ .ldap_base_dn }}{{ end }}"
|
ldap_base_dn = "{{ with nomadVar "nomad/jobs" }}{{ .ldap_base_dn }}{{ end }}"
|
||||||
|
|
||||||
{{ with nomadVar "nomad/jobs/lldap" -}}
|
{{ with nomadVar "secrets/ldap" -}}
|
||||||
ldap_user_dn = "{{ .admin_user }}"
|
ldap_user_dn = "{{ .admin_user }}"
|
||||||
ldap_user_email = "{{ .admin_email }}"
|
ldap_user_email = "{{ .admin_email }}"
|
||||||
|
{{ end -}}
|
||||||
|
|
||||||
|
{{ with nomadVar "nomad/jobs/lldap" -}}
|
||||||
[smtp_options]
|
[smtp_options]
|
||||||
from = "{{ .smtp_from }}"
|
from = "{{ .smtp_from }}"
|
||||||
reply_to = "{{ .smtp_reply_to }}"
|
reply_to = "{{ .smtp_reply_to }}"
|
||||||
@ -109,7 +115,7 @@ user = "{{ .user }}"
|
|||||||
}
|
}
|
||||||
|
|
||||||
template {
|
template {
|
||||||
data = "{{ with nomadVar \"nomad/jobs/lldap\" }}{{ .admin_password }}{{ end }}"
|
data = "{{ with nomadVar \"secrets/ldap\" }}{{ .admin_password }}{{ end }}"
|
||||||
destination = "$${NOMAD_SECRETS_DIR}/user_pass.txt"
|
destination = "$${NOMAD_SECRETS_DIR}/user_pass.txt"
|
||||||
change_mode = "restart"
|
change_mode = "restart"
|
||||||
}
|
}
|
||||||
@ -139,7 +145,7 @@ user = "{{ .user }}"
|
|||||||
image = "mariadb:10"
|
image = "mariadb:10"
|
||||||
args = [
|
args = [
|
||||||
"/usr/bin/timeout",
|
"/usr/bin/timeout",
|
||||||
"2m",
|
"20m",
|
||||||
"/bin/bash",
|
"/bin/bash",
|
||||||
"-c",
|
"-c",
|
||||||
"until /usr/bin/mysql --defaults-extra-file=$${NOMAD_SECRETS_DIR}/my.cnf < $${NOMAD_SECRETS_DIR}/bootstrap.sql; do sleep 10; done",
|
"until /usr/bin/mysql --defaults-extra-file=$${NOMAD_SECRETS_DIR}/my.cnf < $${NOMAD_SECRETS_DIR}/bootstrap.sql; do sleep 10; done",
|
||||||
@ -193,9 +199,9 @@ SELECT 'NOOP';
|
|||||||
}
|
}
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "alpine:3.17"
|
image = "iamthefij/stunnel:1.0.0"
|
||||||
|
args = ["$${NOMAD_TASK_DIR}/stunnel.conf"]
|
||||||
ports = ["tls"]
|
ports = ["tls"]
|
||||||
args = ["/bin/sh", "$${NOMAD_TASK_DIR}/start.sh"]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
resources {
|
resources {
|
||||||
@ -203,15 +209,6 @@ SELECT 'NOOP';
|
|||||||
memory = 100
|
memory = 100
|
||||||
}
|
}
|
||||||
|
|
||||||
template {
|
|
||||||
data = <<EOF
|
|
||||||
set -e
|
|
||||||
apk add stunnel
|
|
||||||
exec stunnel {{ env "NOMAD_TASK_DIR" }}/stunnel.conf
|
|
||||||
EOF
|
|
||||||
destination = "$${NOMAD_TASK_DIR}/start.sh"
|
|
||||||
}
|
|
||||||
|
|
||||||
template {
|
template {
|
||||||
data = <<EOF
|
data = <<EOF
|
||||||
syslog = no
|
syslog = no
|
||||||
@ -222,7 +219,7 @@ delay = yes
|
|||||||
accept = {{ env "NOMAD_PORT_tls" }}
|
accept = {{ env "NOMAD_PORT_tls" }}
|
||||||
connect = 127.0.0.1:{{ env "NOMAD_PORT_ldap" }}
|
connect = 127.0.0.1:{{ env "NOMAD_PORT_ldap" }}
|
||||||
ciphers = PSK
|
ciphers = PSK
|
||||||
PSKsecrets = {{ env "NOMAD_TASK_DIR" }}/stunnel_psk.txt
|
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/stunnel_psk.txt
|
||||||
|
|
||||||
[mysql_client]
|
[mysql_client]
|
||||||
client = yes
|
client = yes
|
||||||
@ -241,7 +238,7 @@ PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/mysql_stunnel_psk.txt
|
|||||||
{{ with nomadVar .Path }}{{ .psk }}{{ end }}
|
{{ with nomadVar .Path }}{{ .psk }}{{ end }}
|
||||||
{{ end -}}
|
{{ end -}}
|
||||||
EOF
|
EOF
|
||||||
destination = "$${NOMAD_TASK_DIR}/stunnel_psk.txt"
|
destination = "$${NOMAD_SECRETS_DIR}/stunnel_psk.txt"
|
||||||
}
|
}
|
||||||
|
|
||||||
template {
|
template {
|
||||||
|
@ -9,6 +9,42 @@ resource "nomad_job" "lldap" {
|
|||||||
detach = false
|
detach = false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Give access to ldap secrets
|
||||||
|
resource "nomad_acl_policy" "lldap_ldap_secrets" {
|
||||||
|
name = "lldap-secrets-ldap"
|
||||||
|
description = "Give access to LDAP secrets"
|
||||||
|
rules_hcl = <<EOH
|
||||||
|
namespace "default" {
|
||||||
|
variables {
|
||||||
|
path "secrets/ldap/*" {
|
||||||
|
capabilities = ["read"]
|
||||||
|
}
|
||||||
|
path "secrets/ldap" {
|
||||||
|
capabilities = ["read"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOH
|
||||||
|
|
||||||
|
job_acl {
|
||||||
|
# job_id = resource.nomad_job.lldap.id
|
||||||
|
job_id = "lldap"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create self-scoped psk so that config is valid at first start
|
||||||
|
resource "random_password" "lldap_ldap_psk" {
|
||||||
|
length = 32
|
||||||
|
override_special = "!@#%&*-_="
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_variable" "lldap_ldap_psk" {
|
||||||
|
path = "secrets/ldap/allowed_psks/ldap"
|
||||||
|
items = {
|
||||||
|
psk = "lldap:${resource.random_password.lldap_ldap_psk.result}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
# Give access to smtp secrets
|
# Give access to smtp secrets
|
||||||
resource "nomad_acl_policy" "lldap_smtp_secrets" {
|
resource "nomad_acl_policy" "lldap_smtp_secrets" {
|
||||||
name = "lldap-secrets-smtp"
|
name = "lldap-secrets-smtp"
|
||||||
@ -24,6 +60,7 @@ namespace "default" {
|
|||||||
EOH
|
EOH
|
||||||
|
|
||||||
job_acl {
|
job_acl {
|
||||||
|
# job_id = resource.nomad_job.lldap.id
|
||||||
job_id = "lldap"
|
job_id = "lldap"
|
||||||
group = "lldap"
|
group = "lldap"
|
||||||
task = "lldap"
|
task = "lldap"
|
||||||
@ -45,6 +82,7 @@ namespace "default" {
|
|||||||
EOH
|
EOH
|
||||||
|
|
||||||
job_acl {
|
job_acl {
|
||||||
|
# job_id = resource.nomad_job.lldap.id
|
||||||
job_id = "lldap"
|
job_id = "lldap"
|
||||||
group = "lldap"
|
group = "lldap"
|
||||||
task = "bootstrap"
|
task = "bootstrap"
|
||||||
@ -77,27 +115,9 @@ namespace "default" {
|
|||||||
EOH
|
EOH
|
||||||
|
|
||||||
job_acl {
|
job_acl {
|
||||||
|
# job_id = resource.nomad_job.lldap.id
|
||||||
job_id = "lldap"
|
job_id = "lldap"
|
||||||
group = "lldap"
|
group = "lldap"
|
||||||
task = "stunnel"
|
task = "stunnel"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
# Give access to all ldap secrets
|
|
||||||
resource "nomad_acl_policy" "secrets_ldap" {
|
|
||||||
name = "secrets-ldap"
|
|
||||||
description = "Give access to Postgres secrets"
|
|
||||||
rules_hcl = <<EOH
|
|
||||||
namespace "default" {
|
|
||||||
variables {
|
|
||||||
path "secrets/ldap/*" {
|
|
||||||
capabilities = ["read"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
EOH
|
|
||||||
|
|
||||||
job_acl {
|
|
||||||
job_id = resource.nomad_job.lldap.id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -1,62 +0,0 @@
|
|||||||
resource "nomad_job" "mysql-server" {
|
|
||||||
jobspec = file("${path.module}/mysql.nomad")
|
|
||||||
|
|
||||||
# Block until deployed as there are servics dependent on this one
|
|
||||||
detach = false
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "nomad_acl_policy" "secrets_mysql" {
|
|
||||||
name = "secrets-mysql"
|
|
||||||
description = "Give access to MySQL secrets"
|
|
||||||
rules_hcl = <<EOH
|
|
||||||
namespace "default" {
|
|
||||||
variables {
|
|
||||||
path "secrets/mysql/*" {
|
|
||||||
capabilities = ["read"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
EOH
|
|
||||||
|
|
||||||
job_acl {
|
|
||||||
job_id = resource.nomad_job.mysql-server.id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "nomad_job" "postgres-server" {
|
|
||||||
jobspec = file("${path.module}/postgres.nomad")
|
|
||||||
|
|
||||||
# Block until deployed as there are servics dependent on this one
|
|
||||||
detach = false
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "nomad_acl_policy" "secrets_postgres" {
|
|
||||||
name = "secrets-postgres"
|
|
||||||
description = "Give access to Postgres secrets"
|
|
||||||
rules_hcl = <<EOH
|
|
||||||
namespace "default" {
|
|
||||||
variables {
|
|
||||||
path "secrets/postgres/*" {
|
|
||||||
capabilities = ["read"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
EOH
|
|
||||||
|
|
||||||
job_acl {
|
|
||||||
job_id = resource.nomad_job.postgres-server.id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "nomad_job" "redis" {
|
|
||||||
for_each = toset(["blocky", "authelia"])
|
|
||||||
|
|
||||||
jobspec = templatefile("${path.module}/redis.nomad",
|
|
||||||
{
|
|
||||||
name = each.key,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Block until deployed as there are servics dependent on this one
|
|
||||||
detach = false
|
|
||||||
}
|
|
@ -3,6 +3,10 @@ job "mysql-server" {
|
|||||||
type = "service"
|
type = "service"
|
||||||
priority = 80
|
priority = 80
|
||||||
|
|
||||||
|
update {
|
||||||
|
auto_revert = true
|
||||||
|
}
|
||||||
|
|
||||||
group "mysql-server" {
|
group "mysql-server" {
|
||||||
count = 1
|
count = 1
|
||||||
|
|
||||||
@ -73,7 +77,7 @@ MYSQL_ROOT_PASSWORD={{ .mysql_root_password }}
|
|||||||
|
|
||||||
resources {
|
resources {
|
||||||
cpu = 300
|
cpu = 300
|
||||||
memory = 1536
|
memory = 1600
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -81,9 +85,9 @@ MYSQL_ROOT_PASSWORD={{ .mysql_root_password }}
|
|||||||
driver = "docker"
|
driver = "docker"
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "alpine:3.17"
|
image = "iamthefij/stunnel:1.0.0"
|
||||||
|
args = ["${NOMAD_TASK_DIR}/stunnel.conf"]
|
||||||
ports = ["tls"]
|
ports = ["tls"]
|
||||||
args = ["/bin/sh", "${NOMAD_TASK_DIR}/start.sh"]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
resources {
|
resources {
|
||||||
@ -91,15 +95,6 @@ MYSQL_ROOT_PASSWORD={{ .mysql_root_password }}
|
|||||||
memory = 100
|
memory = 100
|
||||||
}
|
}
|
||||||
|
|
||||||
template {
|
|
||||||
data = <<EOF
|
|
||||||
set -e
|
|
||||||
apk add stunnel
|
|
||||||
exec stunnel ${NOMAD_TASK_DIR}/stunnel.conf
|
|
||||||
EOF
|
|
||||||
destination = "${NOMAD_TASK_DIR}/start.sh"
|
|
||||||
}
|
|
||||||
|
|
||||||
template {
|
template {
|
||||||
data = <<EOF
|
data = <<EOF
|
||||||
syslog = no
|
syslog = no
|
||||||
|
41
databases/mysql.tf
Normal file
41
databases/mysql.tf
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
resource "nomad_job" "mysql-server" {
|
||||||
|
jobspec = file("${path.module}/mysql.nomad")
|
||||||
|
|
||||||
|
# Block until deployed as there are servics dependent on this one
|
||||||
|
detach = false
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_acl_policy" "secrets_mysql" {
|
||||||
|
name = "secrets-mysql"
|
||||||
|
description = "Give access to MySQL secrets"
|
||||||
|
rules_hcl = <<EOH
|
||||||
|
namespace "default" {
|
||||||
|
variables {
|
||||||
|
path "secrets/mysql" {
|
||||||
|
capabilities = ["read"]
|
||||||
|
}
|
||||||
|
path "secrets/mysql/*" {
|
||||||
|
capabilities = ["read"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOH
|
||||||
|
|
||||||
|
job_acl {
|
||||||
|
# job_id = resource.nomad_job.mysql-server.id
|
||||||
|
job_id = "mysql-server"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create self-scoped psk so that config is valid at first start
|
||||||
|
resource "random_password" "mysql_mysql_psk" {
|
||||||
|
length = 32
|
||||||
|
override_special = "!@#%&*-_="
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_variable" "mysql_mysql_psk" {
|
||||||
|
path = "secrets/mysql/allowed_psks/mysql"
|
||||||
|
items = {
|
||||||
|
psk = "mysql:${resource.random_password.mysql_mysql_psk.result}"
|
||||||
|
}
|
||||||
|
}
|
@ -3,6 +3,10 @@ job "postgres-server" {
|
|||||||
type = "service"
|
type = "service"
|
||||||
priority = 80
|
priority = 80
|
||||||
|
|
||||||
|
update {
|
||||||
|
auto_revert = true
|
||||||
|
}
|
||||||
|
|
||||||
group "postgres-server" {
|
group "postgres-server" {
|
||||||
count = 1
|
count = 1
|
||||||
|
|
||||||
@ -73,7 +77,8 @@ POSTGRES_PASSWORD={{ .superuser_pass }}
|
|||||||
|
|
||||||
resources {
|
resources {
|
||||||
cpu = 500
|
cpu = 500
|
||||||
memory = 400
|
memory = 800
|
||||||
|
memory_max = 1500
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -81,9 +86,9 @@ POSTGRES_PASSWORD={{ .superuser_pass }}
|
|||||||
driver = "docker"
|
driver = "docker"
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "alpine:3.17"
|
image = "iamthefij/stunnel:1.0.0"
|
||||||
|
args = ["${NOMAD_TASK_DIR}/stunnel.conf"]
|
||||||
ports = ["tls"]
|
ports = ["tls"]
|
||||||
args = ["/bin/sh", "${NOMAD_TASK_DIR}/start.sh"]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
resources {
|
resources {
|
||||||
@ -91,15 +96,6 @@ POSTGRES_PASSWORD={{ .superuser_pass }}
|
|||||||
memory = 100
|
memory = 100
|
||||||
}
|
}
|
||||||
|
|
||||||
template {
|
|
||||||
data = <<EOF
|
|
||||||
set -e
|
|
||||||
apk add stunnel
|
|
||||||
exec stunnel ${NOMAD_TASK_DIR}/stunnel.conf
|
|
||||||
EOF
|
|
||||||
destination = "${NOMAD_TASK_DIR}/start.sh"
|
|
||||||
}
|
|
||||||
|
|
||||||
template {
|
template {
|
||||||
data = <<EOF
|
data = <<EOF
|
||||||
syslog = no
|
syslog = no
|
||||||
|
41
databases/postgres.tf
Normal file
41
databases/postgres.tf
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
resource "nomad_job" "postgres-server" {
|
||||||
|
jobspec = file("${path.module}/postgres.nomad")
|
||||||
|
|
||||||
|
# Block until deployed as there are servics dependent on this one
|
||||||
|
detach = false
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_acl_policy" "secrets_postgres" {
|
||||||
|
name = "secrets-postgres"
|
||||||
|
description = "Give access to Postgres secrets"
|
||||||
|
rules_hcl = <<EOH
|
||||||
|
namespace "default" {
|
||||||
|
variables {
|
||||||
|
path "secrets/postgres" {
|
||||||
|
capabilities = ["read"]
|
||||||
|
}
|
||||||
|
path "secrets/postgres/*" {
|
||||||
|
capabilities = ["read"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOH
|
||||||
|
|
||||||
|
job_acl {
|
||||||
|
# job_id = resource.nomad_job.postgres-server.id
|
||||||
|
job_id = "postgres-server"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create self-scoped psk so that config is valid at first start
|
||||||
|
resource "random_password" "postgres_postgres_psk" {
|
||||||
|
length = 32
|
||||||
|
override_special = "!@#%&*-_="
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_variable" "postgres_postgres_psk" {
|
||||||
|
path = "secrets/postgres/allowed_psks/postgres"
|
||||||
|
items = {
|
||||||
|
psk = "postgres:${resource.random_password.postgres_postgres_psk.result}"
|
||||||
|
}
|
||||||
|
}
|
@ -3,6 +3,10 @@ job "redis-${name}" {
|
|||||||
type = "service"
|
type = "service"
|
||||||
priority = 80
|
priority = 80
|
||||||
|
|
||||||
|
update {
|
||||||
|
auto_revert = true
|
||||||
|
}
|
||||||
|
|
||||||
group "cache" {
|
group "cache" {
|
||||||
count = 1
|
count = 1
|
||||||
|
|
||||||
@ -35,7 +39,7 @@ job "redis-${name}" {
|
|||||||
|
|
||||||
resources {
|
resources {
|
||||||
cpu = 100
|
cpu = 100
|
||||||
memory = 128
|
memory = 64
|
||||||
memory_max = 512
|
memory_max = 512
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -44,23 +48,14 @@ job "redis-${name}" {
|
|||||||
driver = "docker"
|
driver = "docker"
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "alpine:3.17"
|
image = "iamthefij/stunnel:1.0.0"
|
||||||
|
args = ["$${NOMAD_TASK_DIR}/stunnel.conf"]
|
||||||
ports = ["tls"]
|
ports = ["tls"]
|
||||||
args = ["/bin/sh", "$${NOMAD_TASK_DIR}/start.sh"]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
resources {
|
resources {
|
||||||
cpu = 100
|
cpu = 50
|
||||||
memory = 100
|
memory = 15
|
||||||
}
|
|
||||||
|
|
||||||
template {
|
|
||||||
data = <<EOF
|
|
||||||
set -e
|
|
||||||
apk add stunnel
|
|
||||||
exec stunnel $${NOMAD_TASK_DIR}/stunnel.conf
|
|
||||||
EOF
|
|
||||||
destination = "$${NOMAD_TASK_DIR}/start.sh"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
template {
|
template {
|
||||||
|
12
databases/redis.tf
Normal file
12
databases/redis.tf
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
resource "nomad_job" "redis" {
|
||||||
|
for_each = toset(["blocky", "authelia"])
|
||||||
|
|
||||||
|
jobspec = templatefile("${path.module}/redis.nomad",
|
||||||
|
{
|
||||||
|
name = each.key,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Block until deployed as there are servics dependent on this one
|
||||||
|
detach = false
|
||||||
|
}
|
@ -1,5 +1,5 @@
|
|||||||
pre-commit
|
pre-commit
|
||||||
detect-secrets==1.4.0 # This should match what is in .pre-commit-config.yaml
|
detect-secrets==1.4.0 # This should match what is in .pre-commit-config.yaml
|
||||||
ansible
|
ansible
|
||||||
python-consul
|
python-nomad
|
||||||
hvac
|
netaddr
|
||||||
|
@ -56,6 +56,10 @@ for job in nomad_req("jobs"):
|
|||||||
if job["Type"] in ("batch", "sysbatch"):
|
if job["Type"] in ("batch", "sysbatch"):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if job["Status"] != "running":
|
||||||
|
print(f"WARNING: job {job['Name']} is {job['Status']}")
|
||||||
|
continue
|
||||||
|
|
||||||
job_detail = nomad_req("job", job["ID"])
|
job_detail = nomad_req("job", job["ID"])
|
||||||
job_detail = cast(dict[str, Any], job_detail)
|
job_detail = cast(dict[str, Any], job_detail)
|
||||||
|
|
||||||
@ -83,7 +87,7 @@ for job in nomad_req("jobs"):
|
|||||||
restart_allocs: set[str] = set()
|
restart_allocs: set[str] = set()
|
||||||
for allocation in nomad_req("job", job_detail["ID"], "allocations"):
|
for allocation in nomad_req("job", job_detail["ID"], "allocations"):
|
||||||
allocation = cast(dict[str, Any], allocation)
|
allocation = cast(dict[str, Any], allocation)
|
||||||
if allocation["TaskGroup"] in restart_groups:
|
if allocation["ClientStatus"] == "running" and allocation["TaskGroup"] in restart_groups:
|
||||||
restart_allocs.add(allocation["ID"])
|
restart_allocs.add(allocation["ID"])
|
||||||
|
|
||||||
# Restart allocs associated with missing services
|
# Restart allocs associated with missing services
|
||||||
|
@ -57,9 +57,11 @@ for namespace in nomad_req("services"):
|
|||||||
except requests.exceptions.HTTPError as e:
|
except requests.exceptions.HTTPError as e:
|
||||||
if e.response.status_code == 404:
|
if e.response.status_code == 404:
|
||||||
alloc_found = False
|
alloc_found = False
|
||||||
print(
|
message = f"alloc {alloc_id} not found for {service_name}."
|
||||||
f"alloc {alloc_id} not found for {service_name}. Deleting {service_id}"
|
if args.delete:
|
||||||
)
|
message += f" Deleting {service_id}"
|
||||||
|
|
||||||
|
print(message)
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
|
52
services/.terraform.lock.hcl
generated
52
services/.terraform.lock.hcl
generated
@ -2,39 +2,39 @@
|
|||||||
# Manual edits may be lost in future updates.
|
# Manual edits may be lost in future updates.
|
||||||
|
|
||||||
provider "registry.terraform.io/hashicorp/nomad" {
|
provider "registry.terraform.io/hashicorp/nomad" {
|
||||||
version = "2.0.0"
|
version = "2.1.1"
|
||||||
hashes = [
|
hashes = [
|
||||||
"h1:lIHIxA6ZmfyTGL3J9YIddhxlfit4ipSS09BLxkwo6L0=",
|
"h1:liQBgBXfQEYmwpoGZUfSsu0U0t/nhvuRZbMhaMz7wcQ=",
|
||||||
"zh:09b897d64db293f9a904a4a0849b11ec1e3fff5c638f734d82ae36d8dc044b72",
|
"zh:28bc6922e8a21334568410760150d9d413d7b190d60b5f0b4aab2f4ef52efeeb",
|
||||||
"zh:435cc106799290f64078ec24b6c59cb32b33784d609088638ed32c6d12121199",
|
"zh:2d4283740e92ce1403875486cd5ff2c8acf9df28c190873ab4d769ce37db10c1",
|
||||||
"zh:7073444bd064e8c4ec115ca7d9d7f030cc56795c0a83c27f6668bba519e6849a",
|
"zh:457e16d70075eae714a7df249d3ba42c2176f19b6750650152c56f33959028d9",
|
||||||
|
"zh:49ee88371e355c00971eefee6b5001392431b47b3e760a5c649dda76f59fb8fa",
|
||||||
|
"zh:614ad3bf07155ed8a5ced41dafb09042afbd1868490a379654b3e970def8e33d",
|
||||||
|
"zh:75be7199d76987e7549e1f27439922973d1bf27353b44a593bfbbc2e3b9f698f",
|
||||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||||
"zh:79d238c35d650d2d83a439716182da63f3b2767e72e4cbd0b69cb13d9b1aebfc",
|
"zh:888e14a24410d56b37212fbea373a3e0401d0ff8f8e4f4dd00ba8b29de9fed39",
|
||||||
"zh:7ef5f49344278fe0bbc5447424e6aa5425ff1821d010d944a444d7fa2c751acf",
|
"zh:aa261925e8b152636a0886b3a2149864707632d836e98a88dacea6cfb6302082",
|
||||||
"zh:92179091638c8ba03feef371c4361a790190f9955caea1fa59de2055c701a251",
|
"zh:ac10cefb4064b3bb63d4b0379624a416a45acf778eac0004466f726ead686196",
|
||||||
"zh:a8a34398851761368eb8e7c171f24e55efa6e9fdbb5c455f6dec34dc17f631bc",
|
"zh:b1a3c8b4d5b2dc9b510eac5e9e02665582862c24eb819ab74f44d3d880246d4f",
|
||||||
"zh:b38fd5338625ebace5a4a94cea1a28b11bd91995d834e318f47587cfaf6ec599",
|
"zh:c552e2fe5670b6d3ad9a5faf78e3a27197eeedbe2b13928d2c491fa509bc47c7",
|
||||||
"zh:b71b273a2aca7ad5f1e07c767b25b5a888881ba9ca93b30044ccc39c2937f03c",
|
|
||||||
"zh:cd14357e520e0f09fb25badfb4f2ee37d7741afdc3ed47c7bcf54c1683772543",
|
|
||||||
"zh:e05e025f4bb95138c3c8a75c636e97cd7cfd2fc1525b0c8bd097db8c5f02df6e",
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
provider "registry.terraform.io/hashicorp/random" {
|
provider "registry.terraform.io/hashicorp/random" {
|
||||||
version = "3.5.1"
|
version = "3.6.0"
|
||||||
hashes = [
|
hashes = [
|
||||||
"h1:VSnd9ZIPyfKHOObuQCaKfnjIHRtR7qTw19Rz8tJxm+k=",
|
"h1:R5Ucn26riKIEijcsiOMBR3uOAjuOMfI1x7XvH4P6B1w=",
|
||||||
"zh:04e3fbd610cb52c1017d282531364b9c53ef72b6bc533acb2a90671957324a64",
|
"zh:03360ed3ecd31e8c5dac9c95fe0858be50f3e9a0d0c654b5e504109c2159287d",
|
||||||
"zh:119197103301ebaf7efb91df8f0b6e0dd31e6ff943d231af35ee1831c599188d",
|
"zh:1c67ac51254ba2a2bb53a25e8ae7e4d076103483f55f39b426ec55e47d1fe211",
|
||||||
"zh:4d2b219d09abf3b1bb4df93d399ed156cadd61f44ad3baf5cf2954df2fba0831",
|
"zh:24a17bba7f6d679538ff51b3a2f378cedadede97af8a1db7dad4fd8d6d50f829",
|
||||||
"zh:6130bdde527587bbe2dcaa7150363e96dbc5250ea20154176d82bc69df5d4ce3",
|
"zh:30ffb297ffd1633175d6545d37c2217e2cef9545a6e03946e514c59c0859b77d",
|
||||||
"zh:6cc326cd4000f724d3086ee05587e7710f032f94fc9af35e96a386a1c6f2214f",
|
"zh:454ce4b3dbc73e6775f2f6605d45cee6e16c3872a2e66a2c97993d6e5cbd7055",
|
||||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||||
"zh:b6d88e1d28cf2dfa24e9fdcc3efc77adcdc1c3c3b5c7ce503a423efbdd6de57b",
|
"zh:91df0a9fab329aff2ff4cf26797592eb7a3a90b4a0c04d64ce186654e0cc6e17",
|
||||||
"zh:ba74c592622ecbcef9dc2a4d81ed321c4e44cddf7da799faa324da9bf52a22b2",
|
"zh:aa57384b85622a9f7bfb5d4512ca88e61f22a9cea9f30febaa4c98c68ff0dc21",
|
||||||
"zh:c7c5cde98fe4ef1143bd1b3ec5dc04baf0d4cc3ca2c5c7d40d17c0e9b2076865",
|
"zh:c4a3e329ba786ffb6f2b694e1fd41d413a7010f3a53c20b432325a94fa71e839",
|
||||||
"zh:dac4bad52c940cd0dfc27893507c1e92393846b024c5a9db159a93c534a3da03",
|
"zh:e2699bc9116447f96c53d55f2a00570f982e6f9935038c3810603572693712d0",
|
||||||
"zh:de8febe2a2acd9ac454b844a4106ed295ae9520ef54dc8ed2faf29f12716b602",
|
"zh:e747c0fd5d7684e5bfad8aa0ca441903f15ae7a98a737ff6aca24ba223207e2c",
|
||||||
"zh:eab0d0495e7e711cca367f7d4df6e322e6c562fc52151ec931176115b83ed014",
|
"zh:f1ca75f417ce490368f047b63ec09fd003711ae48487fba90b4aba2ccf71920e",
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -1,58 +0,0 @@
|
|||||||
module "bazarr" {
|
|
||||||
source = "./service"
|
|
||||||
|
|
||||||
name = "bazarr"
|
|
||||||
image = "lscr.io/linuxserver/bazarr:1.2.4"
|
|
||||||
|
|
||||||
resources = {
|
|
||||||
cpu = 150
|
|
||||||
memory = 400
|
|
||||||
}
|
|
||||||
|
|
||||||
ingress = true
|
|
||||||
service_port = 6767
|
|
||||||
use_wesher = var.use_wesher
|
|
||||||
|
|
||||||
use_postgres = true
|
|
||||||
postgres_bootstrap = {
|
|
||||||
enabled = true
|
|
||||||
}
|
|
||||||
|
|
||||||
env = {
|
|
||||||
PGID = 100
|
|
||||||
PUID = 1001
|
|
||||||
TZ = "America/Los_Angeles"
|
|
||||||
}
|
|
||||||
|
|
||||||
host_volumes = [
|
|
||||||
{
|
|
||||||
name = "bazarr-config"
|
|
||||||
dest = "/config"
|
|
||||||
read_only = false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name = "media-write"
|
|
||||||
dest = "/media"
|
|
||||||
read_only = false
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
templates = [
|
|
||||||
{
|
|
||||||
data = <<EOF
|
|
||||||
{{ with nomadVar "nomad/jobs/bazarr" -}}
|
|
||||||
POSTGRES_ENABLED=True
|
|
||||||
POSTGRES_HOST=127.0.0.1
|
|
||||||
POSTGRES_PORT=5432
|
|
||||||
POSTGRES_DATABASE={{ .db_name }}
|
|
||||||
POSTGRES_USERNAME={{ .db_user }}
|
|
||||||
POSTGRES_PASSWORD={{ .db_pass }}
|
|
||||||
{{- end }}
|
|
||||||
EOF
|
|
||||||
dest_prefix = "$${NOMAD_SECRETS_DIR}/"
|
|
||||||
dest = "env"
|
|
||||||
env = true
|
|
||||||
mount = false
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
44
services/christmas-community.tf
Normal file
44
services/christmas-community.tf
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
module "wishlist" {
|
||||||
|
source = "./service"
|
||||||
|
|
||||||
|
name = "wishlist"
|
||||||
|
image = "wingysam/christmas-community:latest"
|
||||||
|
|
||||||
|
ingress = true
|
||||||
|
service_port = 80
|
||||||
|
use_wesher = var.use_wesher
|
||||||
|
|
||||||
|
host_volumes = [
|
||||||
|
{
|
||||||
|
name = "christmas-community"
|
||||||
|
dest = "/data"
|
||||||
|
read_only = false
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
templates = [
|
||||||
|
{
|
||||||
|
data = <<EOF
|
||||||
|
{{ with nomadVar "nomad/jobs/wishlist" -}}
|
||||||
|
GUEST_PASSWORD={{ .guest_password }}
|
||||||
|
{{ end -}}
|
||||||
|
{{ with nomadService "traefik" -}}
|
||||||
|
{{- $last := len . | subtract 1 -}}
|
||||||
|
{{- $services := . -}}
|
||||||
|
TRUST_PROXY={{ range $i := loop $last -}}
|
||||||
|
{{- with index $services $i }}{{ .Address }},{{ end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- with index . $last }}{{ .Address }}{{ end -}}
|
||||||
|
{{- end }}
|
||||||
|
EOF
|
||||||
|
dest = "env"
|
||||||
|
dest_prefix = "$${NOMAD_SECRETS_DIR}/"
|
||||||
|
env = true
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
resources = {
|
||||||
|
cpu = 100
|
||||||
|
memory = 200
|
||||||
|
}
|
||||||
|
}
|
@ -2,7 +2,7 @@ module "diun" {
|
|||||||
source = "./service"
|
source = "./service"
|
||||||
|
|
||||||
name = "diun"
|
name = "diun"
|
||||||
image = "crazymax/diun:4.26"
|
image = "crazymax/diun:4.28"
|
||||||
args = ["serve", "--log-level=debug"]
|
args = ["serve", "--log-level=debug"]
|
||||||
|
|
||||||
sticky_disk = true
|
sticky_disk = true
|
||||||
@ -13,13 +13,16 @@ module "diun" {
|
|||||||
DIUN_PROVIDERS_NOMAD_WATCHBYDEFAULT = true
|
DIUN_PROVIDERS_NOMAD_WATCHBYDEFAULT = true
|
||||||
DIUN_DEFAULTS_WATCHREPO = true
|
DIUN_DEFAULTS_WATCHREPO = true
|
||||||
DIUN_DEFAULTS_SORTTAGS = "semver"
|
DIUN_DEFAULTS_SORTTAGS = "semver"
|
||||||
DIUN_DEFAUTLS_INCLUDETAGS = "^\\d+(\\.\\d+){0,2}$"
|
DIUN_DEFAULTS_INCLUDETAGS = "^\\d+(\\.\\d+){0,2}$"
|
||||||
|
|
||||||
# Nomad API
|
# Nomad API
|
||||||
# TODO: Use socket in $NOMAD_SECRETS_DIR/api.sock when we can assign workload ACLs with Terraform to
|
NOMAD_ADDR = "unix:///secrets/api.sock"
|
||||||
# allow read access. Will need to update template to allow passing token by env
|
DIUN_PROVIDERS_NOMAD = true
|
||||||
NOMAD_ADDR = "http://$${attr.unique.network.ip-address}:4646/"
|
DIUN_PROVIDERS_NOMAD_SECRETID = "$${NOMAD_TOKEN}"
|
||||||
DIUN_PROVIDERS_NOMAD = true
|
}
|
||||||
|
|
||||||
|
task_identity = {
|
||||||
|
env = true
|
||||||
}
|
}
|
||||||
|
|
||||||
templates = [
|
templates = [
|
||||||
@ -35,18 +38,17 @@ module "diun" {
|
|||||||
mount = false
|
mount = false
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
}
|
||||||
|
|
||||||
workload_acl_policy = {
|
resource "nomad_acl_policy" "diun_query_jobs" {
|
||||||
name = "diun-read"
|
name = "diun-query-jobs"
|
||||||
description = "Give the diun task read access to jobs"
|
description = "Allow diun to query jobs"
|
||||||
|
rules_hcl = <<EOH
|
||||||
rules_hcl = <<EOH
|
|
||||||
namespace "default" {
|
namespace "default" {
|
||||||
capabilities = [
|
capabilities = ["list-jobs", "read-job"]
|
||||||
"list-jobs",
|
|
||||||
"read-job",
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
EOH
|
EOH
|
||||||
|
job_acl {
|
||||||
|
job_id = module.diun.job_id
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
168
services/gitea.tf
Normal file
168
services/gitea.tf
Normal file
@ -0,0 +1,168 @@
|
|||||||
|
module "gitea" {
|
||||||
|
source = "./service"
|
||||||
|
|
||||||
|
name = "git"
|
||||||
|
image = "gitea/gitea:1.21"
|
||||||
|
|
||||||
|
resources = {
|
||||||
|
cpu = 200
|
||||||
|
memory = 512
|
||||||
|
}
|
||||||
|
|
||||||
|
env = {
|
||||||
|
# Custom files should be part of the task
|
||||||
|
GITEA_WORK_DIR = "$${NOMAD_TASK_DIR}"
|
||||||
|
GITEA_CUSTOM = "$${NOMAD_TASK_DIR}/custom"
|
||||||
|
}
|
||||||
|
|
||||||
|
ingress = true
|
||||||
|
service_port = 3000
|
||||||
|
use_wesher = var.use_wesher
|
||||||
|
ports = [
|
||||||
|
{
|
||||||
|
name = "ssh"
|
||||||
|
to = 22
|
||||||
|
}
|
||||||
|
]
|
||||||
|
service_check = {
|
||||||
|
path = "/api/healthz"
|
||||||
|
}
|
||||||
|
|
||||||
|
custom_services = [
|
||||||
|
{
|
||||||
|
name = "git-ssh"
|
||||||
|
port = "ssh"
|
||||||
|
tags = [
|
||||||
|
"traefik.enable=true",
|
||||||
|
"traefik.tcp.routers.git-ssh.entryPoints=gitssh",
|
||||||
|
"traefik.tcp.routers.git-ssh.rule=HostSNI(`*`)",
|
||||||
|
"traefik.tcp.routers.git-ssh.tls=false",
|
||||||
|
]
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
use_smtp = true
|
||||||
|
|
||||||
|
mysql_bootstrap = {
|
||||||
|
enabled = true
|
||||||
|
}
|
||||||
|
|
||||||
|
oidc_client_config = {
|
||||||
|
description = "Gitea"
|
||||||
|
redirect_uris = [
|
||||||
|
"https://git.thefij.rocks/user/oauth2/authelia/callback",
|
||||||
|
]
|
||||||
|
scopes = ["openid", "email", "profile"]
|
||||||
|
}
|
||||||
|
|
||||||
|
host_volumes = [
|
||||||
|
{
|
||||||
|
name = "gitea-data"
|
||||||
|
dest = "/data"
|
||||||
|
read_only = false
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
# TODO: Bootstrap OIDC with
|
||||||
|
# su -- git gitea admin auth add-oauth --name authelia --provider openidConnect --key gitea --secret "{{ .oidc_secret }}" --auto-discover-url https://authelia.thefij.rocks/.well-known/openid-configuration --skip-local-2fa
|
||||||
|
|
||||||
|
templates = [
|
||||||
|
{
|
||||||
|
data = <<EOF
|
||||||
|
{{ with nomadVar "nomad/jobs/git" }}
|
||||||
|
GITEA__server__DOMAIN=git.thefij.rocks
|
||||||
|
GITEA__server__SSH_PORT=2222
|
||||||
|
GITEA__server__ROOT_URL=https://git.thefij.rocks
|
||||||
|
|
||||||
|
GITEA__security__INSTALL_LOCK=true
|
||||||
|
|
||||||
|
GITEA__database__DB_TYPE=mysql
|
||||||
|
GITEA__database__HOST=127.0.0.1:3306
|
||||||
|
GITEA__database__NAME={{ .db_name }}
|
||||||
|
GITEA__database__USER={{ .db_user }}
|
||||||
|
|
||||||
|
GITEA__service__DISABLE_REGISTRATION=false
|
||||||
|
GITEA__service__ALLOW_ONLY_EXTERNAL_REGISTRATION=true
|
||||||
|
GITEA__service__SHOW_REGISTRATION_BUTTON=false
|
||||||
|
|
||||||
|
GITEA__openid__ENABLE_OPENID_SIGNIN=true
|
||||||
|
GITEA__openid__ENABLE_OPENID_SIGNUP=true
|
||||||
|
GITEA__openid__WHITELISTED_URIS=authelia.thefij.rocks
|
||||||
|
|
||||||
|
GITEA__log__ROOT_PATH={{ env "NOMAD_TASK_DIR" }}/log
|
||||||
|
|
||||||
|
GITEA__mailer__ENABLED=true
|
||||||
|
GITEA__mailer__FROM={{ .smtp_sender }}
|
||||||
|
|
||||||
|
GITEA__session__provider=db
|
||||||
|
{{ end }}
|
||||||
|
EOF
|
||||||
|
env = true
|
||||||
|
mount = false
|
||||||
|
dest = "env"
|
||||||
|
},
|
||||||
|
# TODO: Gitea writes these out to the ini file in /local anyway
|
||||||
|
# Find some way to get it to write to /secrets
|
||||||
|
{
|
||||||
|
data = <<EOF
|
||||||
|
{{ with nomadVar "nomad/jobs/git" }}
|
||||||
|
GITEA__security__SECRET_KEY="{{ .secret_key }}"
|
||||||
|
GITEA__database__PASSWD={{ .db_pass }}
|
||||||
|
{{ end }}
|
||||||
|
{{ with nomadVar "secrets/smtp" }}
|
||||||
|
GITEA__mailer__SMTP_ADDR={{ .server }}
|
||||||
|
GITEA__mailer__SMTP_PORT={{ .port }}
|
||||||
|
GITEA__mailer__USER={{ .user }}
|
||||||
|
GITEA__mailer__PASSWD={{ .password }}
|
||||||
|
{{ end }}
|
||||||
|
EOF
|
||||||
|
env = true
|
||||||
|
mount = false
|
||||||
|
dest = "env"
|
||||||
|
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data = <<EOF
|
||||||
|
{{ with nomadVar "secrets/authelia/git" -}}
|
||||||
|
CLIENT_ID={{ .client_id }}
|
||||||
|
SECRET={{ .secret }}
|
||||||
|
{{- end }}
|
||||||
|
EOF
|
||||||
|
dest = "oauth.env"
|
||||||
|
dest_prefix = "$${NOMAD_SECRETS_DIR}"
|
||||||
|
mount = false
|
||||||
|
change_mode = "script"
|
||||||
|
change_script = {
|
||||||
|
command = "/local/bootstrap_auth.sh"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data = <<EOF
|
||||||
|
#! /bin/bash
|
||||||
|
source {{ env "NOMAD_SECRETS_DIR" }}/oauth.env
|
||||||
|
auth_provider_id=$(su -- git gitea admin auth list | awk '/authelia/ { print $1 }')
|
||||||
|
|
||||||
|
if [ -z "$auth_provider_id" ]; then
|
||||||
|
echo "Creating Authelia OAuth provider"
|
||||||
|
su -- git gitea admin auth add-oauth \
|
||||||
|
--name authelia \
|
||||||
|
--provider openidConnect \
|
||||||
|
--key "$CLIENT_ID" \
|
||||||
|
--secret "$SECRET" \
|
||||||
|
--auto-discover-url https://authelia.thefij.rocks/.well-known/openid-configuration \
|
||||||
|
--skip-local-2fa
|
||||||
|
else
|
||||||
|
echo "Updating Authelia OAuth provider"
|
||||||
|
su -- git gitea admin auth update-oauth \
|
||||||
|
--id $auth_provider_id \
|
||||||
|
--key "$CLIENT_ID" \
|
||||||
|
--secret "$SECRET"
|
||||||
|
fi
|
||||||
|
EOF
|
||||||
|
dest = "bootstrap_auth.sh"
|
||||||
|
perms = "777"
|
||||||
|
change_mode = "noop"
|
||||||
|
mount = false
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
@ -1,212 +0,0 @@
|
|||||||
job "ipdvr" {
|
|
||||||
region = "global"
|
|
||||||
datacenters = ["dc1"]
|
|
||||||
|
|
||||||
type = "service"
|
|
||||||
|
|
||||||
group "sabnzbd" {
|
|
||||||
network {
|
|
||||||
mode = "bridge"
|
|
||||||
|
|
||||||
port "main" {
|
|
||||||
%{~ if use_wesher ~}
|
|
||||||
host_network = "wesher"
|
|
||||||
%{~ endif ~}
|
|
||||||
to = 8080
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
volume "sabnzbd-config" {
|
|
||||||
type = "host"
|
|
||||||
read_only = false
|
|
||||||
source = "sabnzbd-config"
|
|
||||||
}
|
|
||||||
|
|
||||||
volume "media-downloads" {
|
|
||||||
type = "host"
|
|
||||||
read_only = false
|
|
||||||
source = "media-downloads"
|
|
||||||
}
|
|
||||||
|
|
||||||
service {
|
|
||||||
name = "sabnzbd"
|
|
||||||
provider = "nomad"
|
|
||||||
port = "main"
|
|
||||||
|
|
||||||
tags = [
|
|
||||||
"traefik.enable=true",
|
|
||||||
"traefik.http.routers.sabnzbd.entryPoints=websecure",
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
task "sabnzbd" {
|
|
||||||
driver = "docker"
|
|
||||||
|
|
||||||
config {
|
|
||||||
image = "linuxserver/sabnzbd"
|
|
||||||
ports = ["main"]
|
|
||||||
}
|
|
||||||
|
|
||||||
env = {
|
|
||||||
"PGID" = 100
|
|
||||||
"PUID" = 1001
|
|
||||||
"TZ" = "America/Los_Angeles"
|
|
||||||
}
|
|
||||||
|
|
||||||
volume_mount {
|
|
||||||
volume = "sabnzbd-config"
|
|
||||||
destination = "/config"
|
|
||||||
read_only = false
|
|
||||||
}
|
|
||||||
|
|
||||||
volume_mount {
|
|
||||||
volume = "media-downloads"
|
|
||||||
destination = "/downloads"
|
|
||||||
read_only = false
|
|
||||||
}
|
|
||||||
|
|
||||||
resources {
|
|
||||||
cpu = 400
|
|
||||||
memory = 500
|
|
||||||
memory_max = 800
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
group "nzbget" {
|
|
||||||
network {
|
|
||||||
mode = "bridge"
|
|
||||||
port "main" {
|
|
||||||
%{~ if use_wesher ~}
|
|
||||||
host_network = "wesher"
|
|
||||||
%{~ endif ~}
|
|
||||||
static = 6789
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
volume "nzbget-config" {
|
|
||||||
type = "host"
|
|
||||||
read_only = false
|
|
||||||
source = "nzbget-config"
|
|
||||||
}
|
|
||||||
|
|
||||||
volume "media-downloads" {
|
|
||||||
type = "host"
|
|
||||||
read_only = false
|
|
||||||
source = "media-downloads"
|
|
||||||
}
|
|
||||||
|
|
||||||
service {
|
|
||||||
name = "nzbget"
|
|
||||||
provider = "nomad"
|
|
||||||
port = "main"
|
|
||||||
|
|
||||||
tags = [
|
|
||||||
"traefik.enable=true",
|
|
||||||
"traefik.http.routers.nzbget.entryPoints=websecure",
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
task "nzbget" {
|
|
||||||
driver = "docker"
|
|
||||||
|
|
||||||
config {
|
|
||||||
image = "linuxserver/nzbget"
|
|
||||||
ports = ["main"]
|
|
||||||
}
|
|
||||||
|
|
||||||
env = {
|
|
||||||
"PGID" = 100
|
|
||||||
"PUID" = 1001
|
|
||||||
"TZ" = "America/Los_Angeles"
|
|
||||||
}
|
|
||||||
|
|
||||||
volume_mount {
|
|
||||||
volume = "nzbget-config"
|
|
||||||
destination = "/config"
|
|
||||||
read_only = false
|
|
||||||
}
|
|
||||||
|
|
||||||
volume_mount {
|
|
||||||
volume = "media-downloads"
|
|
||||||
destination = "/downloads"
|
|
||||||
read_only = false
|
|
||||||
}
|
|
||||||
|
|
||||||
resources {
|
|
||||||
cpu = 200
|
|
||||||
memory = 300
|
|
||||||
memory_max = 500
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
group "sonarr" {
|
|
||||||
network {
|
|
||||||
mode = "bridge"
|
|
||||||
port "main" {
|
|
||||||
%{~ if use_wesher ~}
|
|
||||||
host_network = "wesher"
|
|
||||||
%{~ endif ~}
|
|
||||||
to = 8989
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
volume "sonarr-data" {
|
|
||||||
type = "host"
|
|
||||||
read_only = false
|
|
||||||
source = "sonarr-data"
|
|
||||||
}
|
|
||||||
|
|
||||||
volume "media-write" {
|
|
||||||
type = "host"
|
|
||||||
read_only = false
|
|
||||||
source = "media-write"
|
|
||||||
}
|
|
||||||
|
|
||||||
service {
|
|
||||||
name = "sonarr"
|
|
||||||
provider = "nomad"
|
|
||||||
port = "main"
|
|
||||||
|
|
||||||
tags = [
|
|
||||||
"traefik.enable=true",
|
|
||||||
"traefik.http.routers.sonarr.entryPoints=websecure",
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
task "sonarr" {
|
|
||||||
driver = "docker"
|
|
||||||
|
|
||||||
config {
|
|
||||||
image = "lscr.io/linuxserver/sonarr:3.0.10"
|
|
||||||
ports = ["main"]
|
|
||||||
}
|
|
||||||
|
|
||||||
env = {
|
|
||||||
"PGID" = 100
|
|
||||||
"PUID" = 1001
|
|
||||||
"TZ" = "America/Los_Angeles"
|
|
||||||
}
|
|
||||||
|
|
||||||
volume_mount {
|
|
||||||
volume = "sonarr-data"
|
|
||||||
destination = "/config"
|
|
||||||
read_only = false
|
|
||||||
}
|
|
||||||
|
|
||||||
volume_mount {
|
|
||||||
volume = "media-write"
|
|
||||||
destination = "/media"
|
|
||||||
read_only = false
|
|
||||||
}
|
|
||||||
|
|
||||||
resources {
|
|
||||||
cpu = 100
|
|
||||||
memory = 500
|
|
||||||
memory_max = 700
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
25
services/languagetool.tf
Normal file
25
services/languagetool.tf
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
module "languagetool" {
|
||||||
|
source = "./service"
|
||||||
|
|
||||||
|
name = "languagetool"
|
||||||
|
image = "ghcr.io/erikvl87/docker-languagetool/languagetool:4.8"
|
||||||
|
ingress = true
|
||||||
|
service_port = 8010
|
||||||
|
use_wesher = var.use_wesher
|
||||||
|
|
||||||
|
env = {
|
||||||
|
Java_Xmx = "512m"
|
||||||
|
}
|
||||||
|
|
||||||
|
service_check = {
|
||||||
|
path = "/v2/healthcheck"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Possibility to use a volume over nfs to host n-gram datasets
|
||||||
|
# https://github.com/Erikvl87/docker-languagetool/pkgs/container/docker-languagetool%2Flanguagetool#using-n-gram-datasets
|
||||||
|
|
||||||
|
resources = {
|
||||||
|
cpu = 100
|
||||||
|
memory = 512
|
||||||
|
}
|
||||||
|
}
|
@ -40,9 +40,4 @@ module "lidarr" {
|
|||||||
cpu = 500
|
cpu = 500
|
||||||
memory = 1500
|
memory = 1500
|
||||||
}
|
}
|
||||||
|
|
||||||
stunnel_resources = {
|
|
||||||
cpu = 100
|
|
||||||
memory = 100
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -1,5 +0,0 @@
|
|||||||
resource "nomad_job" "ipdvr" {
|
|
||||||
jobspec = templatefile("${path.module}/ip-dvr.nomad", {
|
|
||||||
use_wesher = var.use_wesher,
|
|
||||||
})
|
|
||||||
}
|
|
@ -11,25 +11,11 @@ monitors:
|
|||||||
- '/app/scripts/curl_ok.sh'
|
- '/app/scripts/curl_ok.sh'
|
||||||
- 'https://my.iamthefij.com'
|
- 'https://my.iamthefij.com'
|
||||||
|
|
||||||
- name: Grafana (public)
|
|
||||||
command:
|
|
||||||
- '/app/scripts/curl_ok.sh'
|
|
||||||
- 'https://grafana.iamthefij.com'
|
|
||||||
|
|
||||||
- name: Grafana (internal)
|
- name: Grafana (internal)
|
||||||
command:
|
command:
|
||||||
- '/app/scripts/curl_ok.sh'
|
- '/app/scripts/curl_ok.sh'
|
||||||
- 'https://grafana.thefij.rocks'
|
- 'https://grafana.thefij.rocks'
|
||||||
|
|
||||||
- name: Plex
|
|
||||||
command:
|
|
||||||
- 'curl'
|
|
||||||
- '--silent'
|
|
||||||
- '--show-error'
|
|
||||||
- '-o'
|
|
||||||
- '/dev/null'
|
|
||||||
- 'http://192.168.2.10:32400'
|
|
||||||
|
|
||||||
- name: NZBget
|
- name: NZBget
|
||||||
command:
|
command:
|
||||||
- '/app/scripts/curl_ok.sh'
|
- '/app/scripts/curl_ok.sh'
|
||||||
@ -45,6 +31,11 @@ monitors:
|
|||||||
- '/app/scripts/curl_ok.sh'
|
- '/app/scripts/curl_ok.sh'
|
||||||
- 'https://lidarr.thefij.rocks'
|
- 'https://lidarr.thefij.rocks'
|
||||||
|
|
||||||
|
- name: Radarr
|
||||||
|
command:
|
||||||
|
- '/app/scripts/curl_ok.sh'
|
||||||
|
- 'https://radarr.thefij.rocks'
|
||||||
|
|
||||||
- name: Authelia
|
- name: Authelia
|
||||||
command:
|
command:
|
||||||
- '/app/scripts/curl_ok.sh'
|
- '/app/scripts/curl_ok.sh'
|
||||||
@ -55,6 +46,20 @@ monitors:
|
|||||||
- '/app/scripts/curl_ok.sh'
|
- '/app/scripts/curl_ok.sh'
|
||||||
- 'https://photoprism.thefij.rocks'
|
- 'https://photoprism.thefij.rocks'
|
||||||
|
|
||||||
|
- name: Prometheus
|
||||||
|
command:
|
||||||
|
- '/app/scripts/curl_ok.sh'
|
||||||
|
- 'https://prometheus.thefij.rocks'
|
||||||
|
|
||||||
|
- name: Plex
|
||||||
|
command:
|
||||||
|
- 'curl'
|
||||||
|
- '--silent'
|
||||||
|
- '--show-error'
|
||||||
|
- '-o'
|
||||||
|
- '/dev/null'
|
||||||
|
- 'http://192.168.2.10:32400'
|
||||||
|
|
||||||
alerts:
|
alerts:
|
||||||
log:
|
log:
|
||||||
command:
|
command:
|
||||||
|
@ -1,12 +1,13 @@
|
|||||||
module "minitor" {
|
module "minitor" {
|
||||||
source = "./service"
|
source = "./service"
|
||||||
|
|
||||||
name = "minitor"
|
name = "minitor"
|
||||||
image = "iamthefij/minitor-go:1.4.1"
|
image = "iamthefij/minitor-go:1.4.1"
|
||||||
args = ["-metrics", "-config=$${NOMAD_TASK_DIR}/config.yml"]
|
args = ["-metrics", "-config=$${NOMAD_TASK_DIR}/config.yml"]
|
||||||
service_port = 8080
|
service_port = 8080
|
||||||
use_wesher = var.use_wesher
|
service_check = null
|
||||||
prometheus = true
|
use_wesher = var.use_wesher
|
||||||
|
prometheus = true
|
||||||
|
|
||||||
env = {
|
env = {
|
||||||
TZ = "America/Los_Angeles",
|
TZ = "America/Los_Angeles",
|
||||||
|
63
services/nomad-fixers.nomad
Normal file
63
services/nomad-fixers.nomad
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
job "fixers" {
|
||||||
|
|
||||||
|
type = "batch"
|
||||||
|
|
||||||
|
periodic {
|
||||||
|
cron = "*/15 * * * * *"
|
||||||
|
prohibit_overlap = true
|
||||||
|
}
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
"diun.enable" = false
|
||||||
|
}
|
||||||
|
|
||||||
|
group "orphaned_services" {
|
||||||
|
task "orphaned_services" {
|
||||||
|
driver = "docker"
|
||||||
|
|
||||||
|
config {
|
||||||
|
image = "iamthefij/nomad-service-fixers:0.1.1"
|
||||||
|
command = "/scripts/nomad_orphan_services.py"
|
||||||
|
args = ["--delete"]
|
||||||
|
}
|
||||||
|
|
||||||
|
env = {
|
||||||
|
NOMAD_ADDR = "http+unix://%2Fsecrets%2Fapi.sock"
|
||||||
|
}
|
||||||
|
|
||||||
|
identity {
|
||||||
|
env = true
|
||||||
|
}
|
||||||
|
|
||||||
|
resources {
|
||||||
|
cpu = 50
|
||||||
|
memory = 100
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
group "missing_services" {
|
||||||
|
task "missing_services" {
|
||||||
|
driver = "docker"
|
||||||
|
|
||||||
|
config {
|
||||||
|
image = "iamthefij/nomad-service-fixers:0.1.1"
|
||||||
|
command = "/scripts/nomad_missing_services.py"
|
||||||
|
args = ["--restart"]
|
||||||
|
}
|
||||||
|
|
||||||
|
env = {
|
||||||
|
NOMAD_ADDR = "http+unix://%2Fsecrets%2Fapi.sock"
|
||||||
|
}
|
||||||
|
|
||||||
|
identity {
|
||||||
|
env = true
|
||||||
|
}
|
||||||
|
|
||||||
|
resources {
|
||||||
|
cpu = 50
|
||||||
|
memory = 100
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
22
services/nomad-fixers.tf
Normal file
22
services/nomad-fixers.tf
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
resource "nomad_job" "nomad-fixers" {
|
||||||
|
jobspec = file("${path.module}/nomad-fixers.nomad")
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_acl_policy" "nomad_fixers_workload" {
|
||||||
|
name = "nomad-fixers-workload"
|
||||||
|
description = "Give nomad fixers access to the Nomad api for fixing things"
|
||||||
|
rules_hcl = <<EOH
|
||||||
|
namespace "default" {
|
||||||
|
capabilities = [
|
||||||
|
"list-jobs",
|
||||||
|
"read-job",
|
||||||
|
"submit-job", # This allows deleting a service registeration
|
||||||
|
"alloc-lifecycle",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
EOH
|
||||||
|
|
||||||
|
job_acl {
|
||||||
|
job_id = "fixers/*"
|
||||||
|
}
|
||||||
|
}
|
35
services/nzbget.tf
Normal file
35
services/nzbget.tf
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
module "nzbget" {
|
||||||
|
source = "./service"
|
||||||
|
|
||||||
|
name = "nzbget"
|
||||||
|
image = "linuxserver/nzbget:v21.1-ls138"
|
||||||
|
|
||||||
|
ingress = true
|
||||||
|
service_port = 6789
|
||||||
|
use_wesher = var.use_wesher
|
||||||
|
|
||||||
|
env = {
|
||||||
|
PGID = 100
|
||||||
|
PUID = 1001
|
||||||
|
TZ = "America/Los_Angeles"
|
||||||
|
}
|
||||||
|
|
||||||
|
host_volumes = [
|
||||||
|
{
|
||||||
|
name = "nzbget-config"
|
||||||
|
dest = "/config"
|
||||||
|
read_only = false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name = "media-downloads"
|
||||||
|
dest = "/downloads"
|
||||||
|
read_only = false
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
resources = {
|
||||||
|
cpu = 200
|
||||||
|
memory = 300
|
||||||
|
memory_max = 500
|
||||||
|
}
|
||||||
|
}
|
@ -1,28 +1,25 @@
|
|||||||
module "photoprism_module" {
|
module "photoprism" {
|
||||||
source = "./service"
|
source = "./service"
|
||||||
|
|
||||||
name = "photoprism"
|
name = "photoprism"
|
||||||
image = "photoprism/photoprism:221118-jammy"
|
image = "photoprism/photoprism:240711"
|
||||||
image_pull_timeout = "10m"
|
image_pull_timeout = "10m"
|
||||||
constraints = [{
|
# constraints = [{
|
||||||
attribute = "$${meta.hw_transcode.type}"
|
# attribute = "$${meta.hw_transcode.type}"
|
||||||
# operator = "is_set"
|
# # operator = "is_set"
|
||||||
value = "raspberry"
|
# value = "raspberry"
|
||||||
}]
|
# }]
|
||||||
|
priority = 60
|
||||||
|
|
||||||
docker_devices = [{
|
# docker_devices = [{
|
||||||
host_path = "$${meta.hw_transcode.device}"
|
# host_path = "$${meta.hw_transcode.device}"
|
||||||
container_path = "$${meta.hw_transcode.device}"
|
# container_path = "$${meta.hw_transcode.device}"
|
||||||
}]
|
# }]
|
||||||
resources = {
|
resources = {
|
||||||
cpu = 2000
|
cpu = 1500
|
||||||
memory = 2500
|
memory = 2200
|
||||||
memory_max = 4000
|
memory_max = 4000
|
||||||
}
|
}
|
||||||
stunnel_resources = {
|
|
||||||
cpu = 100
|
|
||||||
memory = 100
|
|
||||||
}
|
|
||||||
sticky_disk = true
|
sticky_disk = true
|
||||||
host_volumes = [
|
host_volumes = [
|
||||||
{
|
{
|
||||||
@ -40,18 +37,21 @@ module "photoprism_module" {
|
|||||||
ingress = true
|
ingress = true
|
||||||
service_port = 2342
|
service_port = 2342
|
||||||
use_wesher = var.use_wesher
|
use_wesher = var.use_wesher
|
||||||
ingress_middlewares = [
|
|
||||||
"authelia@nomad"
|
|
||||||
]
|
|
||||||
|
|
||||||
mysql_bootstrap = {
|
mysql_bootstrap = {
|
||||||
enabled = true
|
enabled = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
oidc_client_config = {
|
||||||
|
description = "Photoprism"
|
||||||
|
redirect_uris = [
|
||||||
|
"https://photoprism.thefij.rocks/api/v1/oidc/redirect",
|
||||||
|
]
|
||||||
|
scopes = ["openid", "email", "profile"]
|
||||||
|
}
|
||||||
|
|
||||||
env = {
|
env = {
|
||||||
PHOTOPRISM_DEBUG = true
|
PHOTOPRISM_DEBUG = true
|
||||||
# Make public since we added Authelia at the proxy level
|
|
||||||
PHOTOPRISM_AUTH_MODE = "public"
|
|
||||||
# UI
|
# UI
|
||||||
PHOTOPRISM_SITE_CAPTION = "AI-Powered Photos App"
|
PHOTOPRISM_SITE_CAPTION = "AI-Powered Photos App"
|
||||||
PHOTOPRISM_SITE_DESCRIPTION = "Fijolek home photos"
|
PHOTOPRISM_SITE_DESCRIPTION = "Fijolek home photos"
|
||||||
@ -59,7 +59,8 @@ module "photoprism_module" {
|
|||||||
PHOTOPRISM_SITE_URL = "https://photoprism.${var.base_hostname}/"
|
PHOTOPRISM_SITE_URL = "https://photoprism.${var.base_hostname}/"
|
||||||
PHOTOPRISM_SPONSOR = "true"
|
PHOTOPRISM_SPONSOR = "true"
|
||||||
# Worker config
|
# Worker config
|
||||||
PHOTOPRISM_WORKERS = 2
|
PHOTOPRISM_WORKERS = 2
|
||||||
|
PHOTOPRISM_BACKUP_DATABASE = false
|
||||||
# Paths
|
# Paths
|
||||||
PHOTOPRISM_ORIGINALS_PATH = "/photoprism-media/Library"
|
PHOTOPRISM_ORIGINALS_PATH = "/photoprism-media/Library"
|
||||||
PHOTOPRISM_IMPORT_PATH = "/photoprism-media/Import"
|
PHOTOPRISM_IMPORT_PATH = "/photoprism-media/Import"
|
||||||
@ -68,6 +69,12 @@ module "photoprism_module" {
|
|||||||
PHOTOPRISM_UID = 500
|
PHOTOPRISM_UID = 500
|
||||||
PHOTOPRISM_GID = 100
|
PHOTOPRISM_GID = 100
|
||||||
PHOTOPRISM_UMASK = 0000
|
PHOTOPRISM_UMASK = 0000
|
||||||
|
# OIDC
|
||||||
|
PHOTOPRISM_OIDC_URI = "https://authelia.thefij.rocks"
|
||||||
|
PHOTOPRISM_OIDC_PROVIDER = "Authelia"
|
||||||
|
PHOTOPRISM_OIDC_REGISTER = true
|
||||||
|
PHOTOPRISM_OIDC_REDIRECT = true
|
||||||
|
PHOTOPRISM_OIDC_SCOPES = "openid email profile"
|
||||||
}
|
}
|
||||||
|
|
||||||
templates = [
|
templates = [
|
||||||
@ -89,6 +96,10 @@ module "photoprism_module" {
|
|||||||
PHOTOPRISM_FFMPEG_ENCODER=intel
|
PHOTOPRISM_FFMPEG_ENCODER=intel
|
||||||
PHOTOPRISM_INIT="intel tensorflow"
|
PHOTOPRISM_INIT="intel tensorflow"
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
{{ with nomadVar "secrets/authelia/photoprism" -}}
|
||||||
|
PHOTOPRISM_OIDC_CLIENT={{ .client_id }}
|
||||||
|
PHOTOPRISM_OIDC_SECRET={{ .secret }}
|
||||||
|
{{- end }}
|
||||||
EOF
|
EOF
|
||||||
dest_prefix = "$${NOMAD_SECRETS_DIR}/"
|
dest_prefix = "$${NOMAD_SECRETS_DIR}/"
|
||||||
dest = "env"
|
dest = "env"
|
||||||
@ -96,4 +107,13 @@ module "photoprism_module" {
|
|||||||
mount = false
|
mount = false
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
actions = [
|
||||||
|
{
|
||||||
|
name = "import"
|
||||||
|
command = "photoprism"
|
||||||
|
args = ["import", "/photoprism-media/Import"]
|
||||||
|
cron = "0 0 3 * * * *"
|
||||||
|
},
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
59
services/radarr.tf
Normal file
59
services/radarr.tf
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
module "radarr" {
|
||||||
|
source = "./service"
|
||||||
|
|
||||||
|
name = "radarr"
|
||||||
|
image = "lscr.io/linuxserver/radarr:5.2.6"
|
||||||
|
|
||||||
|
ingress = true
|
||||||
|
service_port = 7878
|
||||||
|
use_wesher = var.use_wesher
|
||||||
|
ingress_middlewares = [
|
||||||
|
"authelia@nomad"
|
||||||
|
]
|
||||||
|
|
||||||
|
use_postgres = true
|
||||||
|
postgres_bootstrap = {
|
||||||
|
enabled = true
|
||||||
|
databases = [
|
||||||
|
"radarr",
|
||||||
|
"radarr-logs",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
env = {
|
||||||
|
PGID = 100
|
||||||
|
PUID = 1001
|
||||||
|
TZ = "America/Los_Angeles"
|
||||||
|
}
|
||||||
|
|
||||||
|
host_volumes = [
|
||||||
|
{
|
||||||
|
name = "radarr-config"
|
||||||
|
dest = "/config"
|
||||||
|
read_only = false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name = "media-write"
|
||||||
|
dest = "/media"
|
||||||
|
read_only = false
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
resources = {
|
||||||
|
cpu = 500
|
||||||
|
memory = 500
|
||||||
|
memory_max = 700
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_variable" "authelia_service_rules_radarr" {
|
||||||
|
path = "authelia/access_control/service_rules/radarr"
|
||||||
|
items = {
|
||||||
|
name = "radarr"
|
||||||
|
rule = <<EOH
|
||||||
|
policy: bypass
|
||||||
|
resources:
|
||||||
|
- '^/api([/?].*)?$'
|
||||||
|
EOH
|
||||||
|
}
|
||||||
|
}
|
@ -1,3 +1,10 @@
|
|||||||
|
locals {
|
||||||
|
port_names = concat(
|
||||||
|
var.service_port != null ? ["main"] : [],
|
||||||
|
[for port in var.ports : port.name if port.task_config],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
resource "nomad_job" "service" {
|
resource "nomad_job" "service" {
|
||||||
jobspec = templatefile("${path.module}/service_template.nomad", {
|
jobspec = templatefile("${path.module}/service_template.nomad", {
|
||||||
name = var.name
|
name = var.name
|
||||||
@ -8,14 +15,19 @@ resource "nomad_job" "service" {
|
|||||||
args = var.args
|
args = var.args
|
||||||
env = var.env
|
env = var.env
|
||||||
task_meta = var.task_meta
|
task_meta = var.task_meta
|
||||||
|
task_identity = var.task_identity
|
||||||
group_meta = var.group_meta
|
group_meta = var.group_meta
|
||||||
job_meta = var.job_meta
|
job_meta = var.job_meta
|
||||||
constraints = var.constraints
|
constraints = var.constraints
|
||||||
docker_devices = var.docker_devices
|
docker_devices = var.docker_devices
|
||||||
|
user = var.user
|
||||||
|
actions = var.actions
|
||||||
|
|
||||||
service_port = var.service_port
|
service_port = var.service_port
|
||||||
service_port_static = var.service_port_static
|
service_port_static = var.service_port_static
|
||||||
|
service_check = var.service_check
|
||||||
ports = var.ports
|
ports = var.ports
|
||||||
|
port_names = local.port_names
|
||||||
sticky_disk = var.sticky_disk
|
sticky_disk = var.sticky_disk
|
||||||
resources = var.resources
|
resources = var.resources
|
||||||
stunnel_resources = var.stunnel_resources
|
stunnel_resources = var.stunnel_resources
|
||||||
@ -38,27 +50,11 @@ resource "nomad_job" "service" {
|
|||||||
|
|
||||||
mysql_bootstrap = var.mysql_bootstrap
|
mysql_bootstrap = var.mysql_bootstrap
|
||||||
postgres_bootstrap = var.postgres_bootstrap
|
postgres_bootstrap = var.postgres_bootstrap
|
||||||
|
|
||||||
workload_identity_env = var.workload_acl_policy != null
|
|
||||||
})
|
})
|
||||||
|
|
||||||
detach = var.detach
|
detach = var.detach
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "nomad_acl_policy" "workload_special" {
|
|
||||||
count = var.workload_acl_policy != null ? 1 : 0
|
|
||||||
|
|
||||||
name = var.workload_acl_policy.name
|
|
||||||
description = var.workload_acl_policy.description
|
|
||||||
rules_hcl = var.workload_acl_policy.rules_hcl
|
|
||||||
|
|
||||||
job_acl {
|
|
||||||
job_id = var.name
|
|
||||||
group = var.name
|
|
||||||
task = var.name
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "nomad_acl_policy" "secrets_mysql" {
|
resource "nomad_acl_policy" "secrets_mysql" {
|
||||||
count = var.use_mysql || var.mysql_bootstrap != null ? 1 : 0
|
count = var.use_mysql || var.mysql_bootstrap != null ? 1 : 0
|
||||||
|
|
||||||
@ -75,7 +71,7 @@ namespace "default" {
|
|||||||
EOH
|
EOH
|
||||||
|
|
||||||
job_acl {
|
job_acl {
|
||||||
job_id = var.name
|
job_id = resource.nomad_job.service.id
|
||||||
group = var.name
|
group = var.name
|
||||||
task = "mysql-bootstrap"
|
task = "mysql-bootstrap"
|
||||||
}
|
}
|
||||||
@ -113,7 +109,7 @@ namespace "default" {
|
|||||||
EOH
|
EOH
|
||||||
|
|
||||||
job_acl {
|
job_acl {
|
||||||
job_id = var.name
|
job_id = resource.nomad_job.service.id
|
||||||
group = var.name
|
group = var.name
|
||||||
task = "stunnel"
|
task = "stunnel"
|
||||||
}
|
}
|
||||||
@ -135,7 +131,7 @@ namespace "default" {
|
|||||||
EOH
|
EOH
|
||||||
|
|
||||||
job_acl {
|
job_acl {
|
||||||
job_id = var.name
|
job_id = resource.nomad_job.service.id
|
||||||
group = var.name
|
group = var.name
|
||||||
task = "postgres-bootstrap"
|
task = "postgres-bootstrap"
|
||||||
}
|
}
|
||||||
@ -173,7 +169,7 @@ namespace "default" {
|
|||||||
EOH
|
EOH
|
||||||
|
|
||||||
job_acl {
|
job_acl {
|
||||||
job_id = var.name
|
job_id = resource.nomad_job.service.id
|
||||||
group = var.name
|
group = var.name
|
||||||
task = "stunnel"
|
task = "stunnel"
|
||||||
}
|
}
|
||||||
@ -211,7 +207,7 @@ namespace "default" {
|
|||||||
EOH
|
EOH
|
||||||
|
|
||||||
job_acl {
|
job_acl {
|
||||||
job_id = var.name
|
job_id = resource.nomad_job.service.id
|
||||||
group = var.name
|
group = var.name
|
||||||
task = "stunnel"
|
task = "stunnel"
|
||||||
}
|
}
|
||||||
@ -233,8 +229,58 @@ namespace "default" {
|
|||||||
EOH
|
EOH
|
||||||
|
|
||||||
job_acl {
|
job_acl {
|
||||||
job_id = var.name
|
job_id = resource.nomad_job.service.id
|
||||||
group = var.name
|
group = var.name
|
||||||
task = var.name
|
task = var.name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module "oidc_client" {
|
||||||
|
count = var.oidc_client_config != null ? 1 : 0
|
||||||
|
|
||||||
|
source = "../../core/oidc_client"
|
||||||
|
name = var.name
|
||||||
|
|
||||||
|
oidc_client_config = {
|
||||||
|
description = var.oidc_client_config.description
|
||||||
|
authorization_policy = var.oidc_client_config.authorization_policy
|
||||||
|
redirect_uris = var.oidc_client_config.redirect_uris
|
||||||
|
scopes = var.oidc_client_config.scopes
|
||||||
|
}
|
||||||
|
|
||||||
|
job_acl = {
|
||||||
|
job_id = resource.nomad_job.service.id
|
||||||
|
group = var.name
|
||||||
|
task = var.name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Action cron jobs
|
||||||
|
resource "nomad_job" "action_cron" {
|
||||||
|
for_each = tomap({ for action in var.actions : action.name => action if action.cron != null })
|
||||||
|
jobspec = templatefile("${path.module}/service_scheduled.nomad", {
|
||||||
|
name = var.name
|
||||||
|
action_name = each.value.name
|
||||||
|
action_cron = each.value.cron
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_acl_policy" "action_cron_workload_policy" {
|
||||||
|
for_each = resource.nomad_job.action_cron
|
||||||
|
|
||||||
|
name = "service-action-${each.value.id}"
|
||||||
|
description = "Give custom service cron actions access to execute actions."
|
||||||
|
rules_hcl = <<EOH
|
||||||
|
namespace "default" {
|
||||||
|
capabilities = [
|
||||||
|
"list-jobs",
|
||||||
|
"read-job",
|
||||||
|
"alloc-exec",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
EOH
|
||||||
|
|
||||||
|
job_acl {
|
||||||
|
job_id = each.value.id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
3
services/service/output.tf
Normal file
3
services/service/output.tf
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
output "job_id" {
|
||||||
|
value = resource.nomad_job.service.id
|
||||||
|
}
|
39
services/service/service_scheduled.nomad
Normal file
39
services/service/service_scheduled.nomad
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
job "${name}-${action_name}" {
|
||||||
|
region = "global"
|
||||||
|
datacenters = ["dc1"]
|
||||||
|
|
||||||
|
type = "batch"
|
||||||
|
|
||||||
|
periodic {
|
||||||
|
cron = "${action_cron}"
|
||||||
|
}
|
||||||
|
|
||||||
|
group "main" {
|
||||||
|
task "${action_name}" {
|
||||||
|
driver = "docker"
|
||||||
|
|
||||||
|
config {
|
||||||
|
image = "hashicorp/nomad:$${attr.nomad.version}"
|
||||||
|
args = [
|
||||||
|
"job",
|
||||||
|
"action",
|
||||||
|
"-job",
|
||||||
|
"${name}",
|
||||||
|
"-group",
|
||||||
|
"${name}",
|
||||||
|
"-task",
|
||||||
|
"${name}",
|
||||||
|
"${action_name}"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
env = {
|
||||||
|
NOMAD_ADDR = "unix:///secrets/api.sock"
|
||||||
|
}
|
||||||
|
|
||||||
|
identity {
|
||||||
|
env = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -5,6 +5,10 @@ job "${name}" {
|
|||||||
type = "service"
|
type = "service"
|
||||||
priority = ${priority}
|
priority = ${priority}
|
||||||
|
|
||||||
|
update {
|
||||||
|
auto_revert = true
|
||||||
|
}
|
||||||
|
|
||||||
group "${name}" {
|
group "${name}" {
|
||||||
count = ${count}
|
count = ${count}
|
||||||
%{~ if length(job_meta) > 0 }
|
%{~ if length(job_meta) > 0 }
|
||||||
@ -31,10 +35,18 @@ job "${name}" {
|
|||||||
%{~ endif ~}
|
%{~ endif ~}
|
||||||
%{~ for port in ports }
|
%{~ for port in ports }
|
||||||
port "${port.name}" {
|
port "${port.name}" {
|
||||||
%{ if port.host_network != null }host_network = "${port.host_network}"%{ endif ~}
|
%{~ if port.host_network != null ~}
|
||||||
%{ if port.from != null }to = ${port.from}%{ endif ~}
|
host_network = "${port.host_network}"
|
||||||
%{ if port.to != null }to = ${port.to}%{ endif ~}
|
%{~ endif ~}
|
||||||
%{ if port.static != null }static = ${port.static}%{ endif ~}
|
%{~ if port.from != null ~}
|
||||||
|
from = ${port.from}
|
||||||
|
%{~ endif ~}
|
||||||
|
%{~ if port.to != null ~}
|
||||||
|
to = ${port.to}
|
||||||
|
%{~ endif ~}
|
||||||
|
%{~ if port.static != null ~}
|
||||||
|
static = ${port.static}
|
||||||
|
%{~ endif ~}
|
||||||
}
|
}
|
||||||
%{~ endfor ~}
|
%{~ endfor ~}
|
||||||
}
|
}
|
||||||
@ -65,45 +77,12 @@ job "${name}" {
|
|||||||
source = "${host_volume.name}"
|
source = "${host_volume.name}"
|
||||||
}
|
}
|
||||||
%{~ endfor ~}
|
%{~ endfor ~}
|
||||||
%{~ if service_port != null }
|
|
||||||
service {
|
|
||||||
name = "${replace(name, "_", "-")}"
|
|
||||||
provider = "nomad"
|
|
||||||
port = "main"
|
|
||||||
|
|
||||||
tags = [
|
|
||||||
%{~ if prometheus == true ~}
|
|
||||||
"prometheus.scrape",
|
|
||||||
%{~ endif ~}
|
|
||||||
%{~ if ingress ~}
|
|
||||||
"traefik.enable=true",
|
|
||||||
"traefik.http.routers.${name}.entryPoints=websecure",
|
|
||||||
%{~ if try(ingress_rule, null) != null ~}
|
|
||||||
"traefik.http.routers.${name}.rule=${ingress_rule}",
|
|
||||||
%{~ endif ~}
|
|
||||||
%{~ for middleware in ingress_middlewares ~}
|
|
||||||
"traefik.http.routers.${name}.middlewares=${middleware}",
|
|
||||||
%{~ endfor ~}
|
|
||||||
%{~ endif ~}
|
|
||||||
%{~ for tag in service_tags ~}
|
|
||||||
"${tag}",
|
|
||||||
%{~ endfor ~}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
%{~ endif ~}
|
|
||||||
|
|
||||||
%{~ for custom_service in custom_services ~}
|
|
||||||
service {
|
|
||||||
name = "${custom_service.name}"
|
|
||||||
provider = "nomad"
|
|
||||||
port = "${custom_service.port}"
|
|
||||||
|
|
||||||
tags = ${jsonencode(custom_service.tags)}
|
|
||||||
}
|
|
||||||
|
|
||||||
%{~ endfor ~}
|
|
||||||
task "${name}" {
|
task "${name}" {
|
||||||
driver = "docker"
|
driver = "docker"
|
||||||
|
%{~ if user != null }
|
||||||
|
user = "${user}"
|
||||||
|
%{~ endif ~}
|
||||||
%{~ if length(task_meta) > 0 }
|
%{~ if length(task_meta) > 0 }
|
||||||
meta = {
|
meta = {
|
||||||
%{ for k, v in task_meta ~}
|
%{ for k, v in task_meta ~}
|
||||||
@ -111,14 +90,70 @@ job "${name}" {
|
|||||||
%{ endfor ~}
|
%{ endfor ~}
|
||||||
}
|
}
|
||||||
%{~ endif ~}
|
%{~ endif ~}
|
||||||
|
%{~ if service_port != null }
|
||||||
|
service {
|
||||||
|
name = "${replace(name, "_", "-")}"
|
||||||
|
provider = "nomad"
|
||||||
|
port = "main"
|
||||||
|
|
||||||
|
tags = [
|
||||||
|
%{~ if prometheus == true ~}
|
||||||
|
"prometheus.scrape",
|
||||||
|
%{~ endif ~}
|
||||||
|
%{~ if ingress ~}
|
||||||
|
"traefik.enable=true",
|
||||||
|
"traefik.http.routers.${name}.entryPoints=websecure",
|
||||||
|
%{~ if try(ingress_rule, null) != null ~}
|
||||||
|
"traefik.http.routers.${name}.rule=${ingress_rule}",
|
||||||
|
%{~ endif ~}
|
||||||
|
%{~ for middleware in ingress_middlewares ~}
|
||||||
|
"traefik.http.routers.${name}.middlewares=${middleware}",
|
||||||
|
%{~ endfor ~}
|
||||||
|
%{~ endif ~}
|
||||||
|
%{~ for tag in service_tags ~}
|
||||||
|
"${tag}",
|
||||||
|
%{~ endfor ~}
|
||||||
|
]
|
||||||
|
|
||||||
|
%{~ if service_check != null ~}
|
||||||
|
check {
|
||||||
|
%{~ if service_check.name != "" ~}
|
||||||
|
name = "${service_check.name}"
|
||||||
|
%{~ endif ~}
|
||||||
|
%{~ if service_check.name != "" ~}
|
||||||
|
port = "${service_check.port}"
|
||||||
|
%{~ endif ~}
|
||||||
|
type = "${service_check.type}"
|
||||||
|
path = "${service_check.path}"
|
||||||
|
interval = "${service_check.interval}"
|
||||||
|
timeout = "${service_check.timeout}"
|
||||||
|
|
||||||
|
check_restart {
|
||||||
|
limit = 5
|
||||||
|
grace = "90s"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
%{~ endif ~}
|
||||||
|
}
|
||||||
|
|
||||||
|
%{~ endif ~}
|
||||||
|
%{~ for custom_service in custom_services ~}
|
||||||
|
service {
|
||||||
|
name = "${custom_service.name}"
|
||||||
|
provider = "nomad"
|
||||||
|
port = "${custom_service.port}"
|
||||||
|
|
||||||
|
tags = ${jsonencode(custom_service.tags)}
|
||||||
|
}
|
||||||
|
|
||||||
|
%{~ endfor ~}
|
||||||
config {
|
config {
|
||||||
image = "${image}"
|
image = "${image}"
|
||||||
%{~if image_pull_timeout != null ~}
|
%{~if image_pull_timeout != null ~}
|
||||||
image_pull_timeout = "${image_pull_timeout}"
|
image_pull_timeout = "${image_pull_timeout}"
|
||||||
%{~ endif ~}
|
%{~ endif ~}
|
||||||
%{~ if service_port != null ~}
|
%{~ if length(try(port_names, [])) > 0 ~}
|
||||||
ports = ["main"]
|
ports = ${jsonencode(port_names)}
|
||||||
%{~ endif ~}
|
%{~ endif ~}
|
||||||
%{~ if length(try(args, [])) > 0 ~}
|
%{~ if length(try(args, [])) > 0 ~}
|
||||||
args = ${jsonencode(args)}
|
args = ${jsonencode(args)}
|
||||||
@ -150,6 +185,14 @@ job "${name}" {
|
|||||||
%{~ endfor ~}
|
%{~ endfor ~}
|
||||||
}
|
}
|
||||||
%{~ endif ~}
|
%{~ endif ~}
|
||||||
|
%{~ for action in actions }
|
||||||
|
action "${action.name}" {
|
||||||
|
command = "${action.command}"
|
||||||
|
%{~ if length(action.args) > 0 ~}
|
||||||
|
args = ${jsonencode(action.args)}
|
||||||
|
%{~ endif ~}
|
||||||
|
}
|
||||||
|
%{~ endfor ~}
|
||||||
%{~ for volume in host_volumes }
|
%{~ for volume in host_volumes }
|
||||||
volume_mount {
|
volume_mount {
|
||||||
volume = "${volume.name}"
|
volume = "${volume.name}"
|
||||||
@ -169,12 +212,23 @@ EOF
|
|||||||
%{~ if template.right_delimiter != null }
|
%{~ if template.right_delimiter != null }
|
||||||
right_delimiter = "${template.right_delimiter}"
|
right_delimiter = "${template.right_delimiter}"
|
||||||
%{~ endif ~}
|
%{~ endif ~}
|
||||||
|
%{~ if template.perms != null }
|
||||||
|
perms = "${template.perms}"
|
||||||
|
%{~ endif ~}
|
||||||
%{~ if template.change_mode != null }
|
%{~ if template.change_mode != null }
|
||||||
change_mode = "${template.change_mode}"
|
change_mode = "${template.change_mode}"
|
||||||
%{~ endif ~}
|
%{~ endif ~}
|
||||||
%{~ if template.change_signal != null }
|
%{~ if template.change_signal != null }
|
||||||
change_signal = "${template.change_signal}"
|
change_signal = "${template.change_signal}"
|
||||||
%{~ endif ~}
|
%{~ endif ~}
|
||||||
|
%{~ if template.change_script != null }
|
||||||
|
change_script {
|
||||||
|
command = "${template.change_script.command}"
|
||||||
|
args = ${jsonencode(template.change_script.args)}
|
||||||
|
timeout = "${template.change_script.timeout}"
|
||||||
|
fail_on_error = ${template.change_script.fail_on_error}
|
||||||
|
}
|
||||||
|
%{~ endif ~}
|
||||||
%{~ if template.env != null }
|
%{~ if template.env != null }
|
||||||
env = ${template.env}
|
env = ${template.env}
|
||||||
%{~ endif ~}
|
%{~ endif ~}
|
||||||
@ -189,9 +243,10 @@ EOF
|
|||||||
%{~ endif ~}
|
%{~ endif ~}
|
||||||
}
|
}
|
||||||
%{~ endif ~}
|
%{~ endif ~}
|
||||||
%{~ if workload_identity_env }
|
%{~ if task_identity != null }
|
||||||
identity {
|
identity {
|
||||||
env = true
|
env = ${task_identity.env}
|
||||||
|
file = ${task_identity.file}
|
||||||
}
|
}
|
||||||
%{~ endif ~}
|
%{~ endif ~}
|
||||||
}
|
}
|
||||||
@ -208,10 +263,10 @@ EOF
|
|||||||
image = "mariadb:10"
|
image = "mariadb:10"
|
||||||
args = [
|
args = [
|
||||||
"/usr/bin/timeout",
|
"/usr/bin/timeout",
|
||||||
"2m",
|
"20m",
|
||||||
"/bin/bash",
|
"/bin/bash",
|
||||||
"-c",
|
"-c",
|
||||||
"until /usr/bin/mysql --defaults-extra-file=$${NOMAD_SECRETS_DIR}/my.cnf < $${NOMAD_SECRETS_DIR}/bootstrap.sql; do sleep 10; done",
|
"until /usr/bin/mysql --defaults-extra-file=$${NOMAD_SECRETS_DIR}/my.cnf < $${NOMAD_SECRETS_DIR}/bootstrap.sql; do echo 'Retry in 10s'; sleep 10; done",
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -271,10 +326,10 @@ SELECT 'NOOP';
|
|||||||
image = "postgres:14"
|
image = "postgres:14"
|
||||||
args = [
|
args = [
|
||||||
"/usr/bin/timeout",
|
"/usr/bin/timeout",
|
||||||
"2m",
|
"20m",
|
||||||
"/bin/bash",
|
"/bin/bash",
|
||||||
"-c",
|
"-c",
|
||||||
"until /bin/bash $${NOMAD_TASK_DIR}/bootstrap.sh; do sleep 10; done",
|
"until /bin/bash $${NOMAD_TASK_DIR}/bootstrap.sh; do echo 'Retry in 10s'; sleep 10; done",
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -343,8 +398,8 @@ $$;
|
|||||||
}
|
}
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "alpine:3.17"
|
image = "iamthefij/stunnel:1.0.0"
|
||||||
args = ["/bin/sh", "$${NOMAD_TASK_DIR}/start.sh"]
|
args = ["$${NOMAD_TASK_DIR}/stunnel.conf"]
|
||||||
}
|
}
|
||||||
|
|
||||||
resources {
|
resources {
|
||||||
@ -355,15 +410,6 @@ $$;
|
|||||||
%{~ endif ~}
|
%{~ endif ~}
|
||||||
}
|
}
|
||||||
|
|
||||||
template {
|
|
||||||
data = <<EOF
|
|
||||||
set -e
|
|
||||||
apk add stunnel
|
|
||||||
exec stunnel {{ env "NOMAD_TASK_DIR" }}/stunnel.conf
|
|
||||||
EOF
|
|
||||||
destination = "$${NOMAD_TASK_DIR}/start.sh"
|
|
||||||
}
|
|
||||||
|
|
||||||
template {
|
template {
|
||||||
data = <<EOF
|
data = <<EOF
|
||||||
syslog = no
|
syslog = no
|
||||||
|
@ -21,7 +21,6 @@ variable "priority" {
|
|||||||
description = "Scheduler priority of the service"
|
description = "Scheduler priority of the service"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
variable "image" {
|
variable "image" {
|
||||||
type = string
|
type = string
|
||||||
description = "Image that should be run"
|
description = "Image that should be run"
|
||||||
@ -33,12 +32,27 @@ variable "image_pull_timeout" {
|
|||||||
description = "A time duration that controls how long Nomad will wait before cancelling an in-progress pull of the Docker image"
|
description = "A time duration that controls how long Nomad will wait before cancelling an in-progress pull of the Docker image"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "user" {
|
||||||
|
type = string
|
||||||
|
default = null
|
||||||
|
description = "User to be passed to the task driver for execution. [ user | user:group | uid | uid:gid | user:gid | uid:group ]"
|
||||||
|
}
|
||||||
|
|
||||||
variable "task_meta" {
|
variable "task_meta" {
|
||||||
type = map(string)
|
type = map(string)
|
||||||
default = {}
|
default = {}
|
||||||
description = "Meta attributes to attach to the task"
|
description = "Meta attributes to attach to the task"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "task_identity" {
|
||||||
|
description = "Task workload identity"
|
||||||
|
type = object({
|
||||||
|
env = optional(bool, false)
|
||||||
|
file = optional(bool, false)
|
||||||
|
})
|
||||||
|
default = null
|
||||||
|
}
|
||||||
|
|
||||||
variable "group_meta" {
|
variable "group_meta" {
|
||||||
type = map(string)
|
type = map(string)
|
||||||
default = {}
|
default = {}
|
||||||
@ -63,6 +77,19 @@ variable "service_port_static" {
|
|||||||
description = "Should the port assigned be static"
|
description = "Should the port assigned be static"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "ports" {
|
||||||
|
type = list(object({
|
||||||
|
name = string
|
||||||
|
host_network = optional(string)
|
||||||
|
from = optional(number)
|
||||||
|
to = optional(number)
|
||||||
|
static = optional(number)
|
||||||
|
task_config = optional(bool, false)
|
||||||
|
}))
|
||||||
|
default = []
|
||||||
|
description = "Additional ports (not service_port) to be bound."
|
||||||
|
}
|
||||||
|
|
||||||
variable "prometheus" {
|
variable "prometheus" {
|
||||||
type = bool
|
type = bool
|
||||||
default = false
|
default = false
|
||||||
@ -110,7 +137,7 @@ variable "stunnel_resources" {
|
|||||||
|
|
||||||
default = {
|
default = {
|
||||||
cpu = 50
|
cpu = 50
|
||||||
memory = 50
|
memory = 15
|
||||||
memory_max = null
|
memory_max = null
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -141,29 +168,24 @@ variable "service_tags" {
|
|||||||
description = "Additional tags to be added to the service."
|
description = "Additional tags to be added to the service."
|
||||||
}
|
}
|
||||||
|
|
||||||
variable "ports" {
|
|
||||||
type = list(object({
|
|
||||||
name = string
|
|
||||||
host_network = optional(string)
|
|
||||||
from = optional(number)
|
|
||||||
to = optional(number)
|
|
||||||
static = optional(number)
|
|
||||||
}))
|
|
||||||
default = []
|
|
||||||
description = "Additional ports (not service_port) to be bound."
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "templates" {
|
variable "templates" {
|
||||||
type = list(object({
|
type = list(object({
|
||||||
data = string
|
data = string
|
||||||
dest = string
|
dest = string
|
||||||
dest_prefix = optional(string, "$${NOMAD_TASK_DIR}")
|
dest_prefix = optional(string, "$${NOMAD_TASK_DIR}")
|
||||||
change_mode = optional(string)
|
|
||||||
change_signal = optional(string)
|
|
||||||
left_delimiter = optional(string)
|
left_delimiter = optional(string)
|
||||||
right_delimiter = optional(string)
|
right_delimiter = optional(string)
|
||||||
mount = optional(bool, true)
|
mount = optional(bool, true)
|
||||||
env = optional(bool, false)
|
env = optional(bool, false)
|
||||||
|
perms = optional(string)
|
||||||
|
change_mode = optional(string)
|
||||||
|
change_signal = optional(string)
|
||||||
|
change_script = optional(object({
|
||||||
|
command = optional(string, "")
|
||||||
|
args = optional(list(string), [])
|
||||||
|
timeout = optional(string, "5s")
|
||||||
|
fail_on_error = optional(bool, false)
|
||||||
|
}))
|
||||||
}))
|
}))
|
||||||
default = []
|
default = []
|
||||||
description = "Templates to be used"
|
description = "Templates to be used"
|
||||||
@ -263,11 +285,38 @@ variable "use_wesher" {
|
|||||||
default = true
|
default = true
|
||||||
}
|
}
|
||||||
|
|
||||||
variable "workload_acl_policy" {
|
variable "actions" {
|
||||||
|
description = "Nomad actions that should be part of the main task"
|
||||||
|
type = list(object({
|
||||||
|
name = string
|
||||||
|
command = string
|
||||||
|
args = optional(list(string))
|
||||||
|
cron = optional(string)
|
||||||
|
}))
|
||||||
|
default = []
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "service_check" {
|
||||||
|
description = "Health check for main ingress service"
|
||||||
type = object({
|
type = object({
|
||||||
name = string
|
name = optional(string, "")
|
||||||
description = string
|
port = optional(string, "")
|
||||||
rules_hcl = string
|
path = optional(string, "/")
|
||||||
|
interval = optional(string, "30s")
|
||||||
|
timeout = optional(string, "2s")
|
||||||
|
type = optional(string, "http")
|
||||||
|
})
|
||||||
|
|
||||||
|
default = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "oidc_client_config" {
|
||||||
|
description = "Authelia oidc client configuration to enable oidc authentication"
|
||||||
|
type = object({
|
||||||
|
description = string
|
||||||
|
authorization_policy = optional(string, "one_factor")
|
||||||
|
redirect_uris = list(string)
|
||||||
|
scopes = list(string)
|
||||||
})
|
})
|
||||||
|
|
||||||
default = null
|
default = null
|
||||||
|
66
services/sonarr.tf
Normal file
66
services/sonarr.tf
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
module "sonarr" {
|
||||||
|
source = "./service"
|
||||||
|
|
||||||
|
name = "sonarr"
|
||||||
|
image = "lscr.io/linuxserver/sonarr:4.0.2"
|
||||||
|
|
||||||
|
priority = 55
|
||||||
|
|
||||||
|
ingress = true
|
||||||
|
service_port = 8989
|
||||||
|
use_wesher = var.use_wesher
|
||||||
|
ingress_middlewares = [
|
||||||
|
"authelia@nomad"
|
||||||
|
]
|
||||||
|
|
||||||
|
use_postgres = true
|
||||||
|
postgres_bootstrap = {
|
||||||
|
enabled = true
|
||||||
|
databases = [
|
||||||
|
"sonarr",
|
||||||
|
"sonarr-logs",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
env = {
|
||||||
|
PGID = 100
|
||||||
|
PUID = 1001
|
||||||
|
TZ = "America/Los_Angeles"
|
||||||
|
}
|
||||||
|
|
||||||
|
host_volumes = [
|
||||||
|
{
|
||||||
|
name = "sonarr-config"
|
||||||
|
dest = "/config"
|
||||||
|
read_only = false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name = "media-write"
|
||||||
|
dest = "/media"
|
||||||
|
read_only = false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name = "media-overflow-write"
|
||||||
|
dest = "/media-overflow"
|
||||||
|
read_only = false
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
resources = {
|
||||||
|
cpu = 100
|
||||||
|
memory = 500
|
||||||
|
memory_max = 700
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "nomad_variable" "authelia_service_rules_sonarr" {
|
||||||
|
path = "authelia/access_control/service_rules/sonarr"
|
||||||
|
items = {
|
||||||
|
name = "sonarr"
|
||||||
|
rule = <<EOH
|
||||||
|
policy: bypass
|
||||||
|
resources:
|
||||||
|
- '^/api([/?].*)?$'
|
||||||
|
EOH
|
||||||
|
}
|
||||||
|
}
|
40
services/unifi-traffic-route-ips.nomad
Normal file
40
services/unifi-traffic-route-ips.nomad
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
job "unifi-traffic-route-ips" {
|
||||||
|
|
||||||
|
type = "batch"
|
||||||
|
|
||||||
|
periodic {
|
||||||
|
cron = "*/15 * * * * *"
|
||||||
|
prohibit_overlap = true
|
||||||
|
}
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
"diun.enable" = false
|
||||||
|
}
|
||||||
|
|
||||||
|
group "main" {
|
||||||
|
|
||||||
|
task "main" {
|
||||||
|
driver = "docker"
|
||||||
|
|
||||||
|
config {
|
||||||
|
image = "iamthefij/unifi-traffic-routes:0.0.4"
|
||||||
|
}
|
||||||
|
|
||||||
|
env = {
|
||||||
|
UNIFI_HOST = "192.168.2.1",
|
||||||
|
UNIFI_PORT = "443",
|
||||||
|
}
|
||||||
|
|
||||||
|
template {
|
||||||
|
data = <<EOF
|
||||||
|
{{ with nomadVar "nomad/jobs/unifi-traffic-route-ips" -}}
|
||||||
|
UNIFI_USER={{ .unifi_username }}
|
||||||
|
UNIFI_PASS={{ .unifi_password }}
|
||||||
|
{{ end -}}
|
||||||
|
EOF
|
||||||
|
destination = "$${NOMAD_SECRETS_DIR}/env"
|
||||||
|
env = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
3
services/unifi-traffic-route-ips.tf
Normal file
3
services/unifi-traffic-route-ips.tf
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
resource "nomad_job" "unifi-traffic-route-ips" {
|
||||||
|
jobspec = file("${path.module}/unifi-traffic-route-ips.nomad")
|
||||||
|
}
|
35
services/ytdl.tf
Normal file
35
services/ytdl.tf
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
module "ytdl-web" {
|
||||||
|
source = "./service"
|
||||||
|
|
||||||
|
name = "ytdl-web"
|
||||||
|
image = "iamthefij/ytdl-web:0.1.4"
|
||||||
|
args = ["poetry", "run", "python", "-m", "ytdl_web.web", "--downloader"]
|
||||||
|
ingress = true
|
||||||
|
service_port = 5000
|
||||||
|
use_wesher = var.use_wesher
|
||||||
|
# service_check = null
|
||||||
|
user = "1001:100"
|
||||||
|
|
||||||
|
env = {
|
||||||
|
QUEUE_DIR = "/data/queue"
|
||||||
|
OUTPUT_TMPL = "/media/RomeTube/%(uploader)s%(channel)s/%(title)s.%(ext)s"
|
||||||
|
}
|
||||||
|
|
||||||
|
resources = {
|
||||||
|
cpu = 50
|
||||||
|
memory = 150
|
||||||
|
}
|
||||||
|
|
||||||
|
host_volumes = [
|
||||||
|
{
|
||||||
|
name = "ytdl-web"
|
||||||
|
dest = "/data"
|
||||||
|
read_only = false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name = "media-write"
|
||||||
|
dest = "/media"
|
||||||
|
read_only = false
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user