Refactor everything for nomad vars

This commit is contained in:
IamTheFij 2023-03-24 11:24:36 -07:00
parent 9f5752c66b
commit d8307935f5
22 changed files with 208 additions and 579 deletions

View File

@ -193,25 +193,7 @@
"line_number": 252, "line_number": 252,
"is_secret": false "is_secret": false
} }
],
"core/syslogng.nomad": [
{
"type": "Base64 High Entropy String",
"filename": "core/syslogng.nomad",
"hashed_secret": "298b5925fe7c7458cb8a12a74621fdedafea5ad6",
"is_verified": false,
"line_number": 165,
"is_secret": false
},
{
"type": "Base64 High Entropy String",
"filename": "core/syslogng.nomad",
"hashed_secret": "3a1cec2d3c3de7e4da4d99c6731ca696c24b72b4",
"is_verified": false,
"line_number": 165,
"is_secret": false
}
] ]
}, },
"generated_at": "2023-01-13T23:47:42Z" "generated_at": "2023-03-24T18:23:24Z"
} }

View File

@ -45,11 +45,8 @@ job "blocky" {
provider = "nomad" provider = "nomad"
port = "api" port = "api"
meta {
metrics_addr = "${NOMAD_ADDR_api}"
}
tags = [ tags = [
"prometheus.scrape",
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.blocky-api.entryPoints=websecure", "traefik.http.routers.blocky-api.entryPoints=websecure",
] ]

View File

@ -14,40 +14,16 @@ job "metrics" {
service { service {
name = "promtail" name = "promtail"
provider = "nomad"
port = "promtail" port = "promtail"
meta { meta {
metrics_addr = "${NOMAD_ADDR_promtail}"
nomad_dc = "${NOMAD_DC}" nomad_dc = "${NOMAD_DC}"
nomad_node_name = "${node.unique.name}" nomad_node_name = "${node.unique.name}"
} }
connect { tags = [
sidecar_service { "prometheus.scrape",
proxy {
local_service_port = 9080
upstreams {
destination_name = "loki"
local_bind_port = 1000
}
}
}
sidecar_task {
resources {
cpu = 50
memory = 20
}
}
}
check {
type = "http"
path = "/metrics"
port = "promtail"
interval = "10s"
timeout = "10s"
} }
} }
@ -111,7 +87,9 @@ server:
http_listen_port: 9080 http_listen_port: 9080
clients: clients:
- url: http://{{ env "NOMAD_UPSTREAM_ADDR_loki" }}/loki/api/v1/push {{ range nomadService 1 (env "NOMAD_ALLOC_ID") "loki" -}}
- url: http://{{ .Address }}:{{ .Port }}/loki/api/v1/push
{{- end }}
scrape_configs: scrape_configs:

View File

@ -8,10 +8,11 @@ job "grafana" {
mode = "bridge" mode = "bridge"
port "web" { port "web" {
host_network = "loopback" host_network = "wgoverlay"
to = 3000 to = 3000
} }
# TODO: Not sure if this needs to be mapped here since it's within the group
port "renderer" { port "renderer" {
host_network = "loopback" host_network = "loopback"
to = 8081 to = 8081
@ -25,46 +26,9 @@ job "grafana" {
service { service {
name = "grafana" name = "grafana"
provider = "nomad"
port = "web" port = "web"
connect {
sidecar_service {
proxy {
local_service_port = 3000
upstreams {
destination_name = "prometheus"
local_bind_port = 9090
}
upstreams {
destination_name = "loki"
local_bind_port = 3100
}
upstreams {
destination_name = "mysql-server"
local_bind_port = 6060
}
}
}
sidecar_task {
resources {
cpu = 50
memory = 50
}
}
}
check {
type = "http"
path = "/"
port = "web"
interval = "10s"
timeout = "10s"
}
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.grafana.entryPoints=websecure", "traefik.http.routers.grafana.entryPoints=websecure",
@ -98,25 +62,27 @@ job "grafana" {
template { template {
data = <<EOF data = <<EOF
[client] [client]
host={{ env "NOMAD_UPSTREAM_IP_mysql_server" }} {{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-server" -}}
port={{ env "NOMAD_UPSTREAM_PORT_mysql_server" }} host={{ .Address }}
port={{ .Port }}
{{ end -}}
user=root user=root
{{ with secret "kv/data/mysql" }} {{ with nomadVar "nomad/jobs" -}}
password={{ .Data.data.root_password }} password={{ .mysql_root_password }}
{{ end }} {{ end -}}
EOF EOF
destination = "$${NOMAD_SECRETS_DIR}/my.cnf" destination = "$${NOMAD_SECRETS_DIR}/my.cnf"
} }
template { template {
data = <<EOF data = <<EOF
{{ with secret "kv/data/grafana" -}} {{ with nomadVar "nomad/jobs/grafana" -}}
{{ if .Data.data.db_name -}} {{ if .db_name -}}
CREATE DATABASE IF NOT EXISTS `{{ .Data.data.db_name }}`; CREATE DATABASE IF NOT EXISTS `{{ .db_name }}`;
CREATE USER IF NOT EXISTS '{{ .Data.data.db_user }}'@'%' IDENTIFIED BY '{{ .Data.data.db_pass }}'; CREATE USER IF NOT EXISTS '{{ .db_user }}'@'%' IDENTIFIED BY '{{ .db_pass }}';
GRANT ALL ON `{{ .Data.data.db_name }}`.* to '{{ .Data.data.db_user }}'@'%'; GRANT ALL ON `{{ .db_name }}`.* to '{{ .db_user }}'@'%';
-- Add grafana read_only user -- Create Read Only user
CREATE USER IF NOT EXISTS '{{ .Data.data.db_user_ro }}'@'%' IDENTIFIED BY '{{ .Data.data.db_pass_ro }}'; CREATE USER IF NOT EXISTS '{{ .db_user_ro }}'@'%' IDENTIFIED BY '{{ .db_pass_ro }}';
{{ else -}} {{ else -}}
SELECT 'NOOP'; SELECT 'NOOP';
{{ end -}} {{ end -}}
@ -149,35 +115,28 @@ SELECT 'NOOP';
"GF_INSTALL_PLUGINS" = "grafana-clock-panel,grafana-piechart-panel,grafana-polystat-panel,natel-discrete-panel", "GF_INSTALL_PLUGINS" = "grafana-clock-panel,grafana-piechart-panel,grafana-polystat-panel,natel-discrete-panel",
} }
vault {
policies = [
"access-tables",
"nomad-task",
]
}
template { template {
data = <<EOF data = <<EOF
{{ with secret "kv/data/grafana" -}} {{ with nomadVar "nomad/jobs/grafana" -}}
GF_SECURITY_ADMIN_PASSWORD={{ .Data.data.admin_pw }} GF_SECURITY_ADMIN_PASSWORD={{ .admin_pw }}
GF_SMTP_USER={{ .Data.data.smtp_user }} GF_SMTP_USER={{ .smtp_user }}
GF_SMTP_PASSWORD={{ .Data.data.smtp_password }} GF_SMTP_PASSWORD={{ .smtp_password }}
GF_EXTERNAL_IMAGE_STORAGE_S3_ACCESS_KEY={{ .Data.data.minio_access_key }} GF_EXTERNAL_IMAGE_STORAGE_S3_ACCESS_KEY={{ .minio_access_key }}
GF_EXTERNAL_IMAGE_STORAGE_S3_SECRET_KEY={{ .Data.data.minio_secret_key }} GF_EXTERNAL_IMAGE_STORAGE_S3_SECRET_KEY={{ .minio_secret_key }}
GRAFANA_ALERT_EMAIL_ADDRESSES={{ .Data.data.alert_email_addresses }} GRAFANA_ALERT_EMAIL_ADDRESSES={{ .Data.data.alert_email_addresses }}
{{ if .Data.data.db_name -}} {{ if .Data.data.db_name -}}
# Database storage # Database storage
GF_DATABASE_TYPE=mysql GF_DATABASE_TYPE=mysql
GF_DATABASE_HOST={{ env "NOMAD_UPSTREAM_ADDR_mysql_server" }} {{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-server" -}}
GF_DATABASE_NAME={{ .Data.data.db_name }} GF_DATABASE_HOST={{ .Address }}:{{ .Port }}
GF_DATABASE_USER={{ .Data.data.db_user }} {{- end }}
GF_DATABASE_PASSWORD={{ .Data.data.db_pass }} GF_DATABASE_NAME={{ .db_name }}
{{ end -}} GF_DATABASE_USER={{ .db_user }}
{{ end -}} GF_DATABASE_PASSWORD={{ .db_pass }}
{{ with secret "kv/data/slack" -}} {{- end }}
SLACK_BOT_URL={{ .Data.data.bot_url }} SLACK_BOT_URL={{ .slack_bot_url }}
SLACK_BOT_TOKEN={{ .Data.data.bot_token }} SLACK_BOT_TOKEN={{ .slack_bot_token }}
SLACK_HOOK_URL={{ .Data.data.hook_url }} SLACK_HOOK_URL={{ .slack_hook_url }}
{{ end -}} {{ end -}}
EOF EOF
env = true env = true

View File

@ -281,7 +281,7 @@ log_queries =
#################################### Auth Proxy ########################## #################################### Auth Proxy ##########################
[auth.proxy] [auth.proxy]
{{ with service "traefik" -}} {{ with nomadService "traefik" -}}
enabled = true enabled = true
header_name = X-WEBAUTH-USER header_name = X-WEBAUTH-USER
header_property = username header_property = username

View File

@ -2,9 +2,11 @@
apiVersion: 1 apiVersion: 1
datasources: datasources:
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "loki" -}}
- name: Loki - name: Loki
url: http://{{ env "NOMAD_UPSTREAM_ADDR_loki" }} url: http://{{ .Address }}:{{ .Port }}
type: loki type: loki
access: proxy access: proxy
isDefault: false isDefault: false
version: 1 version: 1
{{- end }}

View File

@ -2,16 +2,17 @@
apiVersion: 1 apiVersion: 1
datasources: datasources:
{{ with secret "kv/data/blocky" }}
- name: Blocky logs - name: Blocky logs
url: {{ env "NOMAD_UPSTREAM_ADDR_mysql_server" }} {{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-server" -}}
database: {{ .Data.data.db_name }} url: {{ .Address }}:{{ .Port }}
{{- end }}
# TODO: Looking for an acl friendly way to expose this since it's a variable in blocky setup
database: blocky
type: mysql type: mysql
isDefault: false isDefault: false
version: 1 version: 1
{{ with secret "kv/data/grafana" }} {{ with nomadVar "nomad/jobs/grafana" -}}
user: {{ .Data.data.db_user_ro }} user: {{ .db_user_ro }}
secureJsonData: secureJsonData:
password: {{ .Data.data.db_pass_ro }} password: {{ .db_pass_ro }}
{{ end }} {{- end }}
{{ end -}}

View File

@ -2,9 +2,11 @@
apiVersion: 1 apiVersion: 1
datasources: datasources:
{{ range nomadService 1 (env "NOMAD_ALLOC_ID") "prometheus" -}}
- name: Prometheus - name: Prometheus
url: http://{{ env "NOMAD_UPSTREAM_ADDR_prometheus" }} url: http://{{ .Address }}:{{ .Port }}
type: prometheus type: prometheus
access: proxy access: proxy
isDefault: true isDefault: true
version: 1 version: 1
{{- end }}

View File

@ -1,5 +1,5 @@
--- ---
{{ with secret "kv/data/grafana" -}} {{ with nomadVar "nomad/jobs/grafana" -}}
notifiers: notifiers:
- name: Personal email - name: Personal email
type: email type: email
@ -7,5 +7,5 @@ notifiers:
org_id: 1 org_id: 1
is_default: false is_default: false
settings: settings:
addresses: "{{ .Data.data.alert_email_addresses }}" addresses: "{{ .alert_email_addresses }}"
{{ end -}} {{ end -}}

View File

@ -1,5 +1,5 @@
--- ---
{{ with secret "kv/data/slack" -}} {{ with nomadVar "nomad/jobs/grafana" -}}
notifiers: notifiers:
- name: Slack Bot - name: Slack Bot
type: slack type: slack
@ -7,11 +7,11 @@ notifiers:
org_id: 1 org_id: 1
is_default: false is_default: false
settings: settings:
url: "{{ .Data.data.bot_url }}" url: "{{ .slack_bot_url }}"
recipient: "#site-notifications" recipient: "#site-notifications"
username: Grafana Alerts username: Grafana Alerts
icon_url: https://grafana.iamthefij.com/public/img/grafana_icon.svg icon_url: https://grafana.iamthefij.com/public/img/grafana_icon.svg
token: "{{ .Data.data.bot_token }}" token: "{{ .slack_bot_token }}"
uploadImage: true uploadImage: true
mentionChannel: channel mentionChannel: channel
- name: Slack Hook - name: Slack Hook
@ -20,7 +20,7 @@ notifiers:
org_id: 1 org_id: 1
is_default: true is_default: true
settings: settings:
url: "{{ .Data.data.hook_url }}" url: "{{ .slack_hook_url }}"
icon_url: https://grafana.iamthefij.com/public/img/grafana_icon.svg icon_url: https://grafana.iamthefij.com/public/img/grafana_icon.svg
uploadImage: true uploadImage: true
mentionChannel: channel mentionChannel: channel

View File

@ -26,21 +26,21 @@ resource "nomad_job" "grafana" {
depends_on = [nomad_job.prometheus] depends_on = [nomad_job.prometheus]
} }
resource "consul_config_entry" "prometheus_intent" { # resource "consul_config_entry" "prometheus_intent" {
name = "prometheus" # name = "prometheus"
kind = "service-intentions" # kind = "service-intentions"
#
config_json = jsonencode({ # config_json = jsonencode({
Sources = [ # Sources = [
{ # {
Action = "allow" # Action = "allow"
Name = "grafana" # Name = "grafana"
Precedence = 9 # Precedence = 9
Type = "consul" # Type = "consul"
}, # },
] # ]
}) # })
} # }
# resource "consul_config_entry" "envoy_prometheus_bind" { # resource "consul_config_entry" "envoy_prometheus_bind" {
# name = "global" # name = "global"

View File

@ -8,7 +8,7 @@ job "prometheus" {
mode = "bridge" mode = "bridge"
port "web" { port "web" {
host_network = "loopback" host_network = "wgoverlay"
to = 9090 to = 9090
} }
} }
@ -20,31 +20,9 @@ job "prometheus" {
service { service {
name = "prometheus" name = "prometheus"
provider = "nomad"
port = "web" port = "web"
connect {
sidecar_service {
proxy {
local_service_port = 9090
}
}
sidecar_task {
resources {
cpu = 50
memory = 50
}
}
}
check {
type = "http"
path = "/"
port = "web"
interval = "10s"
timeout = "10s"
}
// TODO: Remove traefik tags // TODO: Remove traefik tags
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
@ -110,6 +88,23 @@ scrape_configs:
replacement: $1:8500 replacement: $1:8500
target_label: __address__ target_label: __address__
- job_name: "nomad_services"
metrics_path: "/metrics"
nomad_sd_configs:
- server: "http://{{env "attr.unique.network.ip-address"}}:8500"
relabel_configs:
- source_labels: [__meta_nomad_tags]
regex: .*(prometheus.scrape).*
action: keep
- source_labels: [__meta_nomad_address]
target_label: __address__
- source_labels: [__meta_nomad_service]
target_label: nomad_service
- source_labels: [__meta_nomad_dc]
target_label: nomad_dc
- source_labels: [__meta_nomad_node_id]
target_label: nomad_node_id
- job_name: "exporters" - job_name: "exporters"
metrics_path: "/metrics" metrics_path: "/metrics"
consul_sd_configs: consul_sd_configs:

View File

@ -89,28 +89,8 @@ EOF
service { service {
name = "syslogng" name = "syslogng"
provider = "nomad"
port = "main" port = "main"
connect {
sidecar_service {
proxy {
local_service_port = 514
upstreams {
destination_name = "syslogng-promtail"
local_bind_port = 1000
}
}
}
sidecar_task {
resources {
cpu = 50
memory = 20
memory_max = 50
}
}
}
} }
task "syslogng" { task "syslogng" {

View File

@ -18,26 +18,12 @@ job "redis" {
port "main" { port "main" {
to = 6379 to = 6379
} }
port "envoy_metrics" {
to = 9123
}
} }
service { service {
name = "redis" name = "redis"
provider = "nomad" provider = "nomad"
port = "main" port = "main"
# check {
# name = "alive"
# type = "tcp"
# interval = "10s"
# timeout = "2s"
# }
meta {
envoy_metrics_addr = "${NOMAD_ADDR_envoy_metrics}"
}
} }
task "redis" { task "redis" {

View File

@ -198,11 +198,14 @@ EOF
template { template {
data = <<EOF data = <<EOF
[client] [client]
host={{ env "NOMAD_UPSTREAM_IP_mysql_server" }} {{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-server" -}}
port={{ env "NOMAD_UPSTREAM_PORT_mysql_server" }} host={{ .Address }}
port={{ .Port }}
{{ end -}}
user=root user=root
{{ with secret "kv/data/mysql" -}} # TODO: Use via lesser scoped access
password={{ .Data.data.root_password }} {{ with nomadVar "nomad/jobs" -}}
password={{ .mysql_root_password }}
{{ end -}} {{ end -}}
EOF EOF
destination = "${NOMAD_SECRETS_DIR}/my.cnf" destination = "${NOMAD_SECRETS_DIR}/my.cnf"
@ -210,13 +213,13 @@ password={{ .Data.data.root_password }}
template { template {
data = <<EOF data = <<EOF
{{ with secret "[[.vault_key]]" -}} {{ with nomadVar "[[.vault_key]]" -}}
CREATE DATABASE IF NOT EXISTS `{{ .Data.data.[[ default "db_name" .db_name_key ]] }}` CREATE DATABASE IF NOT EXISTS `{{ .[[ default "db_name" .db_name_key ]] }}`
CHARACTER SET = 'utf8mb4' CHARACTER SET = 'utf8mb4'
COLLATE = 'utf8mb4_unicode_ci'; COLLATE = 'utf8mb4_unicode_ci';
CREATE USER IF NOT EXISTS '{{ .Data.data.[[ default "db_user" .db_user_key ]] }}'@'%' CREATE USER IF NOT EXISTS '{{ .[[ default "db_user" .db_user_key ]] }}'@'%'
IDENTIFIED BY '{{ .Data.data.[[ default "db_pass" .db_pass_key ]] }}'; IDENTIFIED BY '{{ .[[ default "db_pass" .db_pass_key ]] }}';
GRANT ALL ON `{{ .Data.data.[[ default "db_name" .db_name_key ]] }}`.* to '{{ .Data.data.[[ default "db_user" .db_user_key ]] }}'@'%'; GRANT ALL ON `{{ .[[ default "db_name" .db_name_key ]] }}`.* to '{{ .[[ default "db_user" .db_user_key ]] }}'@'%';
{{ end -}} {{ end -}}
{{ end -}} {{ end -}}
EOF EOF

View File

@ -50,54 +50,12 @@ job "backup%{ if batch_node != null }-oneoff-${batch_node}%{ endif }" {
service { service {
name = "backups" name = "backups"
provider = "nomad"
port = "metrics" port = "metrics"
# Add connect to mysql tags = [
connect { "prometheus.scrape"
sidecar_service { ]
proxy {
local_service_port = 8080
# TODO: Do I need this?
expose {
path {
path = "/metrics"
protocol = "http"
local_path_port = 8080
listener_port = "metrics"
}
}
upstreams {
destination_name = "mysql-server"
local_bind_port = 6060
}
config {
protocol = "tcp"
}
}
}
sidecar_task {
resources {
cpu = 50
memory = 50
}
}
}
check {
port = "metrics"
type = "http"
path = "/health"
interval = "10s"
timeout = "3s"
}
meta {
metrics_addr = "$${NOMAD_ADDR_metrics}"
}
} }
task "backup" { task "backup" {
@ -122,16 +80,7 @@ job "backup%{ if batch_node != null }-oneoff-${batch_node}%{ endif }" {
] ]
} }
vault {
policies = [
"access-tables",
"nomad-task",
]
}
env = { env = {
"MYSQL_HOST" = "$${NOMAD_UPSTREAM_IP_mysql_server}"
"MYSQL_PORT" = "$${NOMAD_UPSTREAM_PORT_mysql_server}"
"RCLONE_CHECKERS" = "2" "RCLONE_CHECKERS" = "2"
"RCLONE_TRANSFERS" = "2" "RCLONE_TRANSFERS" = "2"
"RCLONE_FTP_CONCURRENCY" = "5" "RCLONE_FTP_CONCURRENCY" = "5"
@ -139,15 +88,20 @@ job "backup%{ if batch_node != null }-oneoff-${batch_node}%{ endif }" {
template { template {
data = <<EOF data = <<EOF
{{ with secret "kv/data/mysql" }} {{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-server" -}}
MYSQL_HOST={{ .Address }}
MYSQL_PORT={{ .Port }}
{{- end }}
# TODO: Move this to new mysql root pass path
{{ with nomadVar "nomad/jobs" }}
MYSQL_USER=root MYSQL_USER=root
MYSQL_PASSWORD={{ .Data.data.root_password }} MYSQL_PASSWORD={{ .mysql_root_password }}
{{ end -}} {{ end -}}
{{ with secret "kv/data/backups" -}} {{ with nomadVar "nomad/jobs/backups" -}}
BACKUP_PASSPHRASE={{ .Data.data.backup_passphrase }} BACKUP_PASSPHRASE={{ .backup_passphrase }}
RCLONE_FTP_HOST={{ .Data.data.nas_ftp_host }} RCLONE_FTP_HOST={{ .nas_ftp_host }}
RCLONE_FTP_USER={{ .Data.data.nas_ftp_user }} RCLONE_FTP_USER={{ .nas_ftp_user }}
RCLONE_FTP_PASS={{ .Data.data.nas_ftp_pass | toJSON }} RCLONE_FTP_PASS={{ .nas_ftp_pass.Value | toJSON }}
RCLONE_FTP_EXPLICIT_TLS=true RCLONE_FTP_EXPLICIT_TLS=true
RCLONE_FTP_NO_CHECK_CERTIFICATE=true RCLONE_FTP_NO_CHECK_CERTIFICATE=true
{{ end -}} {{ end -}}

View File

@ -9,12 +9,9 @@ job "ipdvr" {
mode = "bridge" mode = "bridge"
port "main" { port "main" {
host_network = "loopback" host_network = "wgoverlay"
to = 8080 to = 8080
} }
port "envoy_metrics" {
to = 9123
}
} }
volume "sabnzbd-config" { volume "sabnzbd-config" {
@ -31,36 +28,13 @@ job "ipdvr" {
service { service {
name = "sabnzbd" name = "sabnzbd"
provider = "nomad"
port = "main" port = "main"
connect {
sidecar_service {
proxy {
local_service_port = 8080
config {
envoy_prometheus_bind_addr = "0.0.0.0:9123"
}
}
}
sidecar_task {
resources {
cpu = 50
memory = 50
memory_max = 100
}
}
}
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.sabnzbd.entryPoints=websecure", "traefik.http.routers.sabnzbd.entryPoints=websecure",
] ]
meta {
envoy_metrics_addr = "${NOMAD_ADDR_envoy_metrics}"
}
} }
task "sabnzbd" { task "sabnzbd" {
@ -101,12 +75,9 @@ job "ipdvr" {
network { network {
mode = "bridge" mode = "bridge"
port "main" { port "main" {
host_network = "loopback" host_network = "wgoverlay"
to = 6789 to = 6789
} }
port "envoy_metrics" {
to = 9123
}
} }
volume "nzbget-config" { volume "nzbget-config" {
@ -123,44 +94,13 @@ job "ipdvr" {
service { service {
name = "nzbget" name = "nzbget"
provider = "nomad"
port = "main" port = "main"
connect {
sidecar_service {
proxy {
local_service_port = 6789
config {
envoy_prometheus_bind_addr = "0.0.0.0:9123"
}
}
}
sidecar_task {
resources {
cpu = 50
memory = 50
memory_max = 100
}
}
}
# check {
# type = "http"
# path = "/"
# port = "main"
# interval = "10s"
# timeout = "10s"
# }
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.nzbget.entryPoints=websecure", "traefik.http.routers.nzbget.entryPoints=websecure",
] ]
meta {
envoy_metrics_addr = "${NOMAD_ADDR_envoy_metrics}"
}
} }
task "nzbget" { task "nzbget" {
@ -202,12 +142,9 @@ job "ipdvr" {
network { network {
mode = "bridge" mode = "bridge"
port "main" { port "main" {
host_network = "loopback" host_network = "wgoverlay"
to = 8989 to = 8989
} }
port "envoy_metrics" {
to = 9123
}
} }
volume "sonarr-data" { volume "sonarr-data" {
@ -224,53 +161,13 @@ job "ipdvr" {
service { service {
name = "sonarr" name = "sonarr"
provider = "nomad"
port = "main" port = "main"
connect {
sidecar_service {
proxy {
local_service_port = 8989
upstreams {
destination_name = "nzbget"
local_bind_port = 6789
}
upstreams {
destination_name = "sabnzbd"
local_bind_port = 8080
}
config {
envoy_prometheus_bind_addr = "0.0.0.0:9123"
}
}
}
sidecar_task {
resources {
cpu = 50
memory = 50
memory_max = 100
}
}
}
# check {
# type = "http"
# path = "/"
# port = "main"
# interval = "10s"
# timeout = "10s"
# }
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.sonarr.entryPoints=websecure", "traefik.http.routers.sonarr.entryPoints=websecure",
] ]
meta {
envoy_metrics_addr = "${NOMAD_ADDR_envoy_metrics}"
}
} }
task "sonarr" { task "sonarr" {

View File

@ -27,41 +27,41 @@ resource "nomad_job" "ipdvr" {
jobspec = file("${path.module}/ip-dvr.nomad") jobspec = file("${path.module}/ip-dvr.nomad")
} }
resource "consul_config_entry" "sabnzbd_intents" { # resource "consul_config_entry" "sabnzbd_intents" {
depends_on = [nomad_job.ipdvr] # depends_on = [nomad_job.ipdvr]
#
name = "sabnzbd" # name = "sabnzbd"
kind = "service-intentions" # kind = "service-intentions"
#
config_json = jsonencode({ # config_json = jsonencode({
Sources = [ # Sources = [
{ # {
Action = "allow" # Action = "allow"
Name = "sonarr" # Name = "sonarr"
Precedence = 9 # Precedence = 9
Type = "consul" # Type = "consul"
}, # },
] # ]
}) # })
} # }
#
resource "consul_config_entry" "nzbget_intents" { # resource "consul_config_entry" "nzbget_intents" {
depends_on = [nomad_job.ipdvr] # depends_on = [nomad_job.ipdvr]
#
name = "nzbget" # name = "nzbget"
kind = "service-intentions" # kind = "service-intentions"
#
config_json = jsonencode({ # config_json = jsonencode({
Sources = [ # Sources = [
{ # {
Action = "allow" # Action = "allow"
Name = "sonarr" # Name = "sonarr"
Precedence = 9 # Precedence = 9
Type = "consul" # Type = "consul"
}, # },
] # ]
}) # })
} # }
module "media-library" { module "media-library" {
source = "./service" source = "./service"
@ -88,14 +88,12 @@ module "minitor" {
args = ["-metrics", "-config=$${NOMAD_TASK_DIR}/config.yml"] args = ["-metrics", "-config=$${NOMAD_TASK_DIR}/config.yml"]
service_port = 8080 service_port = 8080
metrics_port_name = "main" metrics_port_name = "main"
healthcheck_path = "/metrics"
use_vault = true
templates = [ templates = [
{ {
data = <<EOF data = <<EOF
{{ with secret "kv/data/mailgun_api" -}} {{ with nomadVar "nomad/jobs/minitor" -}}
MAILGUN_API_KEY={{ .Data.data.key }} MAILGUN_API_KEY={{ .mailgun_api_key }}
{{ end -}} {{ end -}}
EOF EOF
dest = "env" dest = "env"
@ -161,19 +159,20 @@ module "photoprism_module" {
}, },
] ]
mysql_bootstrap = { mysql_bootstrap = {
vault_key = "kv/data/photoprism" vault_key = "nomad/jobs/photoprism"
} }
templates = [ templates = [
{ {
data = <<EOF data = <<EOF
{{ with secret "kv/data/photoprism" -}} {{ with nomadVar "nomad/jobs/photoprism" -}}
PHOTOPRISM_ADMIN_USER={{ .Data.data.admin_user }} PHOTOPRISM_ADMIN_USER={{ .admin_user }}
PHOTOPRISM_ADMIN_PASSWORD={{ .Data.data.admin_password }} PHOTOPRISM_ADMIN_PASSWORD={{ .admin_password }}
PHOTOPRISM_DATABASE_DRIVER=mysql PHOTOPRISM_DATABASE_DRIVER=mysql
PHOTOPRISM_DATABASE_NAME={{ .Data.data.db_name }} PHOTOPRISM_DATABASE_NAME={{ .db_name }}
PHOTOPRISM_DATABASE_USER={{ .Data.data.db_user }} PHOTOPRISM_DATABASE_USER={{ .db_user }}
PHOTOPRISM_DATABASE_PASSWORD={{ .Data.data.db_pass }} PHOTOPRISM_DATABASE_PASSWORD={{ .db_pass }}
PHOTOPRISM_DATABASE_SERVER="{{ env "NOMAD_UPSTREAM_ADDR_mysql_server" }}" {{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-server" -}}
PHOTOPRISM_DATABASE_SERVER="{{ .Address" }}:{{ .Port }}"
{{ end -}} {{ end -}}
EOF EOF
dest_prefix = "$${NOMAD_SECRETS_DIR}/" dest_prefix = "$${NOMAD_SECRETS_DIR}/"
@ -206,9 +205,9 @@ module "diun" {
templates = [ templates = [
{ {
data = <<EOF data = <<EOF
{{ with secret "kv/data/slack" -}} {{ with nomadVar "nomad/jobs/diun" -}}
DIUN_NOTIF_SLACK_WEBHOOKURL={{ .Data.data.hook_url }} DIUN_NOTIF_SLACK_WEBHOOKURL={{ .slack_hook_url }}
{{ end -}} {{- end }}
EOF EOF
dest_prefix = "$${NOMAD_SECRETS_DIR}" dest_prefix = "$${NOMAD_SECRETS_DIR}"
dest = "env" dest = "env"

View File

@ -8,7 +8,7 @@ job "multimedia" {
network { network {
mode = "bridge" mode = "bridge"
port "web" { port "web" {
host_network = "loopback" host_network = "wgoverlay"
to = 80 to = 80
} }
} }
@ -21,23 +21,9 @@ job "multimedia" {
service { service {
name = "library" name = "library"
provider = "nomad"
port = "web" port = "web"
connect {
sidecar_service {
proxy {
local_service_port = 80
}
}
sidecar_task {
resources {
cpu = 50
memory = 50
}
}
}
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.library.entryPoints=websecure", "traefik.http.routers.library.entryPoints=websecure",

View File

@ -184,8 +184,8 @@ MYSQL_PASSWORD={{ .Data.data.db_pass }}
name = "nextcloud-backups" name = "nextcloud-backups"
port = "backup" port = "backup"
meta { tags = [
metrics_addr = "${NOMAD_ADDR_backup}" "prometheus.scrape",
} }
} }

View File

@ -10,16 +10,11 @@ job "${name}" {
%{ if service_port != null ~} %{ if service_port != null ~}
port "main" { port "main" {
%{ if ingress } %{ if ingress }
host_network = "loopback" host_network = "wgoverlay"
%{~ endif } %{~ endif }
to = ${service_port} to = ${service_port}
} }
%{ endif } %{ endif }
%{ if connect }
port "envoy_metrics" {
to = 9123
}
%{~ endif }
} }
%{ if length(group_meta) > 0 } %{ if length(group_meta) > 0 }
@ -45,74 +40,14 @@ job "${name}" {
%{ if service_port != null ~} %{ if service_port != null ~}
service { service {
name = "${replace(name, "_", "-")}" name = "${replace(name, "_", "-")}"
provider = "nomad"
port = "main" port = "main"
%{ if connect }
connect {
sidecar_service {
proxy {
local_service_port = ${service_port}
%{ if use_mysql }
upstreams {
destination_name = "mysql-server"
local_bind_port = 4040
}
%{ endif ~}
%{ if use_redis }
upstreams {
destination_name = "redis"
local_bind_port = 6379
}
%{ endif ~}
%{ if use_ldap }
upstreams {
destination_name = "lldap"
local_bind_port = 3890
}
%{ endif ~}
%{ for upstream in upstreams ~}
upstreams {
destination_name = "${upstream.destination_name}"
local_bind_port = ${upstream.local_bind_port}
}
%{ endfor }
config {
envoy_prometheus_bind_addr = "0.0.0.0:9123"
}
}
}
sidecar_task {
resources {
cpu = 50
memory = 50
memory_max = 100
}
}
}
%{~ endif }
%{ if healthcheck_path != null }
check {
type = "http"
path = "${healthcheck_path}"
port = "main"
interval = "10s"
timeout = "10s"
}
%{~ endif }
meta {
%{ if metrics_port_name != null }
metrics_addr = "$${NOMAD_ADDR_${metrics_port_name}}"
%{ endif }
%{ if connect }
envoy_metrics_addr = "$${NOMAD_ADDR_envoy_metrics}"
%{~ endif }
}
tags = [ tags = [
# TODO: Rename metrics_port_name to something like "prometheus_scrape"
%{ if metrics_port_name != null }
"prometheus.scrape",
%{ endif }
%{ if ingress } %{ if ingress }
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.${name}.entryPoints=websecure", "traefik.http.routers.${name}.entryPoints=websecure",
@ -160,15 +95,6 @@ job "${name}" {
%{ endif ~} %{ endif ~}
%{ endfor ~} %{ endfor ~}
} }
%{ if use_vault ~}
vault {
policies = [
"access-tables",
"nomad-task",
]
}
%{ endif ~}
%{ if length(env) > 0 ~} %{ if length(env) > 0 ~}
env = { env = {
@ -236,11 +162,14 @@ EOF
template { template {
data = <<EOF data = <<EOF
[client] [client]
host={{ env "NOMAD_UPSTREAM_IP_mysql_server" }} {{ range nomadService 1 (env "NOMAD_ALLOC_ID") "mysql-server" -}}
port={{ env "NOMAD_UPSTREAM_PORT_mysql_server" }} host={{ .Address }}
port={{ .Port }}
{{ end -}}
user=root user=root
{{ with secret "kv/data/mysql" -}} # TODO: Use via lesser scoped access
password={{ .Data.data.root_password }} {{ with nomadVar "nomad/jobs" -}}
password={{ .mysql_root_password }}
{{ end -}} {{ end -}}
EOF EOF
destination = "$${NOMAD_SECRETS_DIR}/my.cnf" destination = "$${NOMAD_SECRETS_DIR}/my.cnf"
@ -248,14 +177,15 @@ password={{ .Data.data.root_password }}
template { template {
data = <<EOF data = <<EOF
{{ with secret "${mysql_bootstrap.vault_key}" -}} {{ with nomadVar "${mysql_bootstrap.vault_key}" -}}
CREATE DATABASE IF NOT EXISTS `{{ .Data.data.${mysql_bootstrap.db_name_key} }}` CREATE DATABASE IF NOT EXISTS `{{ .${mysql_bootstrap.db_name_key} }}`
CHARACTER SET = 'utf8mb4' CHARACTER SET = 'utf8mb4'
COLLATE = 'utf8mb4_unicode_ci'; COLLATE = 'utf8mb4_unicode_ci';
CREATE USER IF NOT EXISTS '{{ .Data.data.${mysql_bootstrap.db_user_key} }}'@'%' CREATE USER IF NOT EXISTS '{{ .${mysql_bootstrap.db_user_key} }}'@'%'
IDENTIFIED BY '{{ .Data.data.${mysql_bootstrap.db_pass_key} }}'; IDENTIFIED BY '{{ .${mysql_bootstrap.db_pass_key} }}';
GRANT ALL ON `{{ .Data.data.${mysql_bootstrap.db_name_key} }}`.* GRANT ALL ON `{{ .${mysql_bootstrap.db_name_key} }}`.*
TO '{{ .Data.data.${mysql_bootstrap.db_user_key} }}'@'%'; TO '{{ .${mysql_bootstrap.db_user_key} }}'@'%';
# TODO: Optionally grant ro access to ro user
{{ else -}} {{ else -}}
SELECT 'NOOP'; SELECT 'NOOP';
{{ end -}} {{ end -}}

View File

@ -58,38 +58,16 @@ job "whoami" {
network { network {
mode = "bridge" mode = "bridge"
port "web" { port "web" {
host_network = "loopback" host_network = "wgoverlay"
to = 80 to = 80
} }
} }
service { service {
name = "whoami" name = "whoami"
provider = "nomad"
port = "web" port = "web"
connect {
sidecar_service {
proxy {
local_service_port = 80
}
}
sidecar_task {
resources {
cpu = 50
memory = 20
}
}
}
check {
type = "http"
path = "/health"
port = "web"
interval = "10s"
timeout = "10s"
}
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.whoami.entryPoints=websecure", "traefik.http.routers.whoami.entryPoints=websecure",