Refactor use of wesher to be behind a variable toggle
Occasionally I run into issues with Wesher. This makes it easier to disable use of Wesher by setting TF_VAR_use_wesher to false.
This commit is contained in:
parent
e2c35a82a9
commit
d5078b24da
@ -1,3 +1,5 @@
|
||||
module "backups" {
|
||||
source = "./backups"
|
||||
|
||||
use_wesher = var.use_wesher
|
||||
}
|
||||
|
@ -31,7 +31,9 @@ job "backup%{ if batch_node != null }-oneoff-${batch_node}%{ endif }" {
|
||||
mode = "bridge"
|
||||
|
||||
port "metrics" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
to = 8080
|
||||
}
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ resource "nomad_job" "backup" {
|
||||
jobspec = templatefile("${path.module}/backup.nomad", {
|
||||
module_path = path.module,
|
||||
batch_node = null,
|
||||
use_wesher = var.use_wesher
|
||||
})
|
||||
}
|
||||
|
||||
@ -24,5 +25,6 @@ resource "nomad_job" "backup-oneoff" {
|
||||
jobspec = templatefile("${path.module}/backup.nomad", {
|
||||
module_path = path.module,
|
||||
batch_node = each.key,
|
||||
use_wesher = var.use_wesher
|
||||
})
|
||||
}
|
||||
|
5
backups/vars.tf
Normal file
5
backups/vars.tf
Normal file
@ -0,0 +1,5 @@
|
||||
variable "use_wesher" {
|
||||
type = bool
|
||||
description = "Indicates whether or not services should expose themselves on the wesher network"
|
||||
default = true
|
||||
}
|
1
core.tf
1
core.tf
@ -6,6 +6,7 @@ module "core" {
|
||||
source = "./core"
|
||||
|
||||
base_hostname = var.base_hostname
|
||||
use_wesher = var.use_wesher
|
||||
|
||||
# Metrics and Blocky depend on databases
|
||||
depends_on = [module.databases]
|
||||
|
@ -9,6 +9,7 @@ module "authelia" {
|
||||
ingress = true
|
||||
service_port = 9999
|
||||
service_port_static = true
|
||||
use_wesher = var.use_wesher
|
||||
# metrics_port = 9959
|
||||
|
||||
env = {
|
||||
|
@ -24,7 +24,9 @@ job "blocky" {
|
||||
}
|
||||
|
||||
port "api" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
to = "4000"
|
||||
}
|
||||
|
||||
@ -66,7 +68,7 @@ job "blocky" {
|
||||
|
||||
config {
|
||||
image = "ghcr.io/0xerr0r/blocky"
|
||||
args = ["-c", "${NOMAD_TASK_DIR}/config.yml"]
|
||||
args = ["-c", "$${NOMAD_TASK_DIR}/config.yml"]
|
||||
ports = ["dns", "api"]
|
||||
}
|
||||
|
||||
@ -78,7 +80,7 @@ job "blocky" {
|
||||
|
||||
template {
|
||||
data = var.config_data
|
||||
destination = "${NOMAD_TASK_DIR}/config.yml"
|
||||
destination = "$${NOMAD_TASK_DIR}/config.yml"
|
||||
splay = "1m"
|
||||
|
||||
wait {
|
||||
@ -95,7 +97,7 @@ job "blocky" {
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
EOF
|
||||
destination = "${NOMAD_TASK_DIR}/nomad.hosts"
|
||||
destination = "$${NOMAD_TASK_DIR}/nomad.hosts"
|
||||
change_mode = "noop"
|
||||
|
||||
wait {
|
||||
@ -116,7 +118,7 @@ job "blocky" {
|
||||
config {
|
||||
image = "alpine:3.17"
|
||||
ports = ["tls"]
|
||||
args = ["/bin/sh", "${NOMAD_TASK_DIR}/start.sh"]
|
||||
args = ["/bin/sh", "$${NOMAD_TASK_DIR}/start.sh"]
|
||||
}
|
||||
|
||||
resources {
|
||||
@ -130,7 +132,7 @@ set -e
|
||||
apk add stunnel
|
||||
exec stunnel {{ env "NOMAD_TASK_DIR" }}/stunnel.conf
|
||||
EOF
|
||||
destination = "${NOMAD_TASK_DIR}/start.sh"
|
||||
destination = "$${NOMAD_TASK_DIR}/start.sh"
|
||||
}
|
||||
|
||||
template {
|
||||
@ -155,7 +157,7 @@ connect = {{ .Address }}:{{ .Port }}
|
||||
{{- end }}
|
||||
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/stunnel_psk.txt
|
||||
EOF
|
||||
destination = "${NOMAD_TASK_DIR}/stunnel.conf"
|
||||
destination = "$${NOMAD_TASK_DIR}/stunnel.conf"
|
||||
}
|
||||
|
||||
template {
|
||||
@ -169,7 +171,7 @@ EOF
|
||||
data = <<EOF
|
||||
{{- with nomadVar "nomad/jobs/blocky/blocky/stunnel" -}}{{ .redis_stunnel_psk }}{{ end -}}
|
||||
EOF
|
||||
destination = "${NOMAD_SECRETS_DIR}/stunnel_psk.txt"
|
||||
destination = "$${NOMAD_SECRETS_DIR}/stunnel_psk.txt"
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,14 +1,8 @@
|
||||
variable "base_hostname" {
|
||||
type = string
|
||||
description = "Base hostname to serve content from"
|
||||
default = "dev.homelab"
|
||||
}
|
||||
|
||||
locals {
|
||||
config_data = templatefile(
|
||||
"${path.module}/config.yml",
|
||||
{
|
||||
"base_hostname" = var.base_hostname,
|
||||
base_hostname = var.base_hostname,
|
||||
}
|
||||
)
|
||||
}
|
||||
@ -21,5 +15,7 @@ resource "nomad_job" "blocky" {
|
||||
}
|
||||
}
|
||||
|
||||
jobspec = file("${path.module}/blocky.nomad")
|
||||
jobspec = templatefile("${path.module}/blocky.nomad", {
|
||||
use_wesher = var.use_wesher,
|
||||
})
|
||||
}
|
||||
|
11
core/blocky/vars.tf
Normal file
11
core/blocky/vars.tf
Normal file
@ -0,0 +1,11 @@
|
||||
variable "base_hostname" {
|
||||
type = string
|
||||
description = "Base hostname to serve content from"
|
||||
default = "dev.homelab"
|
||||
}
|
||||
|
||||
variable "use_wesher" {
|
||||
type = bool
|
||||
description = "Indicates whether or not services should expose themselves on the wesher network"
|
||||
default = true
|
||||
}
|
@ -9,11 +9,15 @@ job "lldap" {
|
||||
mode = "bridge"
|
||||
|
||||
port "web" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
}
|
||||
|
||||
port "ldap" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
}
|
||||
|
||||
port "tls" {}
|
||||
@ -48,13 +52,13 @@ job "lldap" {
|
||||
config {
|
||||
image = "nitnelave/lldap:latest"
|
||||
ports = ["ldap", "web"]
|
||||
args = ["run", "--config-file", "${NOMAD_SECRETS_DIR}/lldap_config.toml"]
|
||||
args = ["run", "--config-file", "$${NOMAD_SECRETS_DIR}/lldap_config.toml"]
|
||||
}
|
||||
|
||||
env = {
|
||||
"LLDAP_VERBOSE" = "true"
|
||||
"LLDAP_LDAP_PORT" = "${NOMAD_PORT_ldap}"
|
||||
"LLDAP_HTTP_PORT" = "${NOMAD_PORT_web}"
|
||||
"LLDAP_LDAP_PORT" = "$${NOMAD_PORT_ldap}"
|
||||
"LLDAP_HTTP_PORT" = "$${NOMAD_PORT_web}"
|
||||
}
|
||||
|
||||
template {
|
||||
@ -86,7 +90,7 @@ user = "{{ .smtp_user }}"
|
||||
password = "{{ .smtp_password }}"
|
||||
{{ end -}}
|
||||
EOH
|
||||
destination = "${NOMAD_SECRETS_DIR}/lldap_config.toml"
|
||||
destination = "$${NOMAD_SECRETS_DIR}/lldap_config.toml"
|
||||
change_mode = "restart"
|
||||
}
|
||||
|
||||
@ -112,7 +116,7 @@ password = "{{ .smtp_password }}"
|
||||
"2m",
|
||||
"/bin/bash",
|
||||
"-c",
|
||||
"until /usr/bin/mysql --defaults-extra-file=${NOMAD_SECRETS_DIR}/my.cnf < ${NOMAD_SECRETS_DIR}/bootstrap.sql; do sleep 10; done",
|
||||
"until /usr/bin/mysql --defaults-extra-file=$${NOMAD_SECRETS_DIR}/my.cnf < $${NOMAD_SECRETS_DIR}/bootstrap.sql; do sleep 10; done",
|
||||
]
|
||||
}
|
||||
|
||||
@ -127,7 +131,7 @@ user=root
|
||||
password={{ .mysql_root_password }}
|
||||
{{ end -}}
|
||||
EOF
|
||||
destination = "${NOMAD_SECRETS_DIR}/my.cnf"
|
||||
destination = "$${NOMAD_SECRETS_DIR}/my.cnf"
|
||||
}
|
||||
|
||||
template {
|
||||
@ -146,7 +150,7 @@ GRANT ALL ON `{{ .db_name }}`.*
|
||||
SELECT 'NOOP';
|
||||
{{ end -}}
|
||||
EOF
|
||||
destination = "${NOMAD_SECRETS_DIR}/bootstrap.sql"
|
||||
destination = "$${NOMAD_SECRETS_DIR}/bootstrap.sql"
|
||||
}
|
||||
|
||||
resources {
|
||||
@ -166,7 +170,7 @@ SELECT 'NOOP';
|
||||
config {
|
||||
image = "alpine:3.17"
|
||||
ports = ["tls"]
|
||||
args = ["/bin/sh", "${NOMAD_TASK_DIR}/start.sh"]
|
||||
args = ["/bin/sh", "$${NOMAD_TASK_DIR}/start.sh"]
|
||||
}
|
||||
|
||||
resources {
|
||||
@ -180,7 +184,7 @@ set -e
|
||||
apk add stunnel
|
||||
exec stunnel {{ env "NOMAD_TASK_DIR" }}/stunnel.conf
|
||||
EOF
|
||||
destination = "${NOMAD_TASK_DIR}/start.sh"
|
||||
destination = "$${NOMAD_TASK_DIR}/start.sh"
|
||||
}
|
||||
|
||||
template {
|
||||
@ -203,7 +207,7 @@ connect = {{ .Address }}:{{ .Port }}
|
||||
{{- end }}
|
||||
PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/mysql_stunnel_psk.txt
|
||||
EOF
|
||||
destination = "${NOMAD_TASK_DIR}/stunnel.conf"
|
||||
destination = "$${NOMAD_TASK_DIR}/stunnel.conf"
|
||||
}
|
||||
|
||||
template {
|
||||
@ -212,14 +216,14 @@ PSKsecrets = {{ env "NOMAD_SECRETS_DIR" }}/mysql_stunnel_psk.txt
|
||||
{{ .allowed_psks }}
|
||||
{{- end }}
|
||||
EOF
|
||||
destination = "${NOMAD_TASK_DIR}/stunnel_psk.txt"
|
||||
destination = "$${NOMAD_TASK_DIR}/stunnel_psk.txt"
|
||||
}
|
||||
|
||||
template {
|
||||
data = <<EOF
|
||||
{{- with nomadVar "nomad/jobs/lldap/lldap/stunnel" }}{{ .mysql_stunnel_psk }}{{ end -}}
|
||||
EOF
|
||||
destination = "${NOMAD_SECRETS_DIR}/mysql_stunnel_psk.txt"
|
||||
destination = "$${NOMAD_SECRETS_DIR}/mysql_stunnel_psk.txt"
|
||||
}
|
||||
|
||||
}
|
||||
|
11
core/loki.tf
11
core/loki.tf
@ -1,12 +1,15 @@
|
||||
module "loki" {
|
||||
source = "../services/service"
|
||||
|
||||
name = "loki"
|
||||
image = "grafana/loki:2.2.1"
|
||||
args = ["--config.file=$${NOMAD_TASK_DIR}/loki-config.yml"]
|
||||
name = "loki"
|
||||
image = "grafana/loki:2.2.1"
|
||||
args = ["--config.file=$${NOMAD_TASK_DIR}/loki-config.yml"]
|
||||
|
||||
service_port = 3100
|
||||
ingress = true
|
||||
sticky_disk = true
|
||||
use_wesher = var.use_wesher
|
||||
|
||||
sticky_disk = true
|
||||
# healthcheck = "/ready"
|
||||
templates = [
|
||||
{
|
||||
|
10
core/main.tf
10
core/main.tf
@ -2,6 +2,8 @@ module "blocky" {
|
||||
source = "./blocky"
|
||||
|
||||
base_hostname = var.base_hostname
|
||||
use_wesher = var.use_wesher
|
||||
|
||||
# Not in this module
|
||||
# depends_on = [module.databases]
|
||||
}
|
||||
@ -13,7 +15,9 @@ module "traefik" {
|
||||
}
|
||||
|
||||
module "metrics" {
|
||||
source = "./metrics"
|
||||
source = "./metrics"
|
||||
use_wesher = var.use_wesher
|
||||
|
||||
# Not in this module
|
||||
# depends_on = [module.databases]
|
||||
}
|
||||
@ -32,5 +36,7 @@ resource "nomad_job" "ddclient" {
|
||||
}
|
||||
|
||||
resource "nomad_job" "lldap" {
|
||||
jobspec = file("${path.module}/lldap.nomad")
|
||||
jobspec = templatefile("${path.module}/lldap.nomad", {
|
||||
use_wesher = var.use_wesher,
|
||||
})
|
||||
}
|
||||
|
@ -8,7 +8,9 @@ job "exporters" {
|
||||
mode = "bridge"
|
||||
|
||||
port "promtail" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
to = 9080
|
||||
}
|
||||
}
|
||||
@ -19,8 +21,8 @@ job "exporters" {
|
||||
port = "promtail"
|
||||
|
||||
meta {
|
||||
nomad_dc = "${NOMAD_DC}"
|
||||
nomad_node_name = "${node.unique.name}"
|
||||
nomad_dc = "$${NOMAD_DC}"
|
||||
nomad_node_name = "$${node.unique.name}"
|
||||
}
|
||||
|
||||
tags = [
|
||||
@ -39,7 +41,7 @@ job "exporters" {
|
||||
|
||||
config {
|
||||
image = "grafana/promtail:2.7.1"
|
||||
args = ["-config.file=${NOMAD_TASK_DIR}/promtail.yml"]
|
||||
args = ["-config.file=$${NOMAD_TASK_DIR}/promtail.yml"]
|
||||
ports = ["promtail"]
|
||||
|
||||
# Bind mount host machine-id and log directories
|
||||
@ -127,7 +129,7 @@ scrape_configs:
|
||||
- source_labels: ['__journal_com_hashicorp_nomad_task_name']
|
||||
target_label: nomad_task_name
|
||||
EOF
|
||||
destination = "${NOMAD_TASK_DIR}/promtail.yml"
|
||||
destination = "$${NOMAD_TASK_DIR}/promtail.yml"
|
||||
}
|
||||
|
||||
resources {
|
||||
|
@ -8,7 +8,9 @@ job "grafana" {
|
||||
mode = "bridge"
|
||||
|
||||
port "web" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
to = 3000
|
||||
}
|
||||
}
|
||||
|
@ -3,7 +3,9 @@ resource "nomad_job" "exporters" {
|
||||
enabled = true
|
||||
}
|
||||
|
||||
jobspec = file("${path.module}/exporters.nomad")
|
||||
jobspec = templatefile("${path.module}/exporters.nomad", {
|
||||
use_wesher = var.use_wesher,
|
||||
})
|
||||
}
|
||||
|
||||
resource "nomad_job" "prometheus" {
|
||||
@ -11,7 +13,9 @@ resource "nomad_job" "prometheus" {
|
||||
enabled = true
|
||||
}
|
||||
|
||||
jobspec = file("${path.module}/prometheus.nomad")
|
||||
jobspec = templatefile("${path.module}/prometheus.nomad", {
|
||||
use_wesher = var.use_wesher,
|
||||
})
|
||||
}
|
||||
|
||||
resource "nomad_job" "grafana" {
|
||||
@ -21,6 +25,7 @@ resource "nomad_job" "grafana" {
|
||||
|
||||
jobspec = templatefile("${path.module}/grafana.nomad", {
|
||||
module_path = path.module
|
||||
use_wesher = var.use_wesher
|
||||
})
|
||||
|
||||
depends_on = [nomad_job.prometheus]
|
||||
|
@ -8,12 +8,16 @@ job "prometheus" {
|
||||
mode = "bridge"
|
||||
|
||||
port "web" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
to = 9090
|
||||
}
|
||||
|
||||
port "pushgateway" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
static = 9091
|
||||
}
|
||||
}
|
||||
@ -48,8 +52,8 @@ job "prometheus" {
|
||||
image = "prom/prometheus:v2.43.0"
|
||||
ports = ["web"]
|
||||
args = [
|
||||
"--config.file=${NOMAD_TASK_DIR}/prometheus.yml",
|
||||
"--storage.tsdb.path=${NOMAD_ALLOC_DIR}/data/tsdb",
|
||||
"--config.file=$${NOMAD_TASK_DIR}/prometheus.yml",
|
||||
"--storage.tsdb.path=$${NOMAD_ALLOC_DIR}/data/tsdb",
|
||||
"--web.listen-address=0.0.0.0:9090",
|
||||
"--web.console.libraries=/usr/share/prometheus/console_libraries",
|
||||
"--web.console.templates=/usr/share/prometheus/consoles",
|
||||
@ -112,7 +116,7 @@ scrape_configs:
|
||||
EOF
|
||||
change_mode = "signal"
|
||||
change_signal = "SIGHUP"
|
||||
destination = "${NOMAD_TASK_DIR}/prometheus.yml"
|
||||
destination = "$${NOMAD_TASK_DIR}/prometheus.yml"
|
||||
}
|
||||
|
||||
resources {
|
||||
@ -128,7 +132,7 @@ scrape_configs:
|
||||
image = "prom/pushgateway"
|
||||
ports = ["pushgateway"]
|
||||
args = [
|
||||
"--persistence.file=${NOMAD_ALLOC_DIR}/pushgateway-persistence",
|
||||
"--persistence.file=$${NOMAD_ALLOC_DIR}/pushgateway-persistence",
|
||||
]
|
||||
}
|
||||
|
||||
|
5
core/metrics/vars.tf
Normal file
5
core/metrics/vars.tf
Normal file
@ -0,0 +1,5 @@
|
||||
variable "use_wesher" {
|
||||
type = bool
|
||||
description = "Indicates whether or not services should expose themselves on the wesher network"
|
||||
default = true
|
||||
}
|
@ -3,3 +3,9 @@ variable "base_hostname" {
|
||||
description = "Base hostname to serve content from"
|
||||
default = "dev.homelab"
|
||||
}
|
||||
|
||||
variable "use_wesher" {
|
||||
type = bool
|
||||
description = "Indicates whether or not services should expose themselves on the wesher network"
|
||||
default = true
|
||||
}
|
||||
|
@ -1,5 +1,7 @@
|
||||
module "services" {
|
||||
source = "./services"
|
||||
|
||||
use_wesher = var.use_wesher
|
||||
|
||||
depends_on = [module.databases, module.core]
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ module "adminer" {
|
||||
|
||||
ingress = true
|
||||
service_port = 8080
|
||||
use_wesher = var.use_wesher
|
||||
|
||||
use_mysql = true
|
||||
use_postgres = true
|
||||
|
@ -11,6 +11,7 @@ module "bazarr" {
|
||||
|
||||
ingress = true
|
||||
service_port = 6767
|
||||
use_wesher = var.use_wesher
|
||||
|
||||
use_postgres = true
|
||||
postgres_bootstrap = {
|
||||
|
@ -9,7 +9,9 @@ job "ipdvr" {
|
||||
mode = "bridge"
|
||||
|
||||
port "main" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
to = 8080
|
||||
}
|
||||
}
|
||||
@ -75,7 +77,9 @@ job "ipdvr" {
|
||||
network {
|
||||
mode = "bridge"
|
||||
port "main" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
static = 6789
|
||||
}
|
||||
}
|
||||
@ -142,7 +146,9 @@ job "ipdvr" {
|
||||
network {
|
||||
mode = "bridge"
|
||||
port "main" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
to = 8989
|
||||
}
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ module "lidarr" {
|
||||
|
||||
ingress = true
|
||||
service_port = 8686
|
||||
use_wesher = var.use_wesher
|
||||
|
||||
use_postgres = true
|
||||
postgres_bootstrap = {
|
||||
|
@ -1,7 +1,5 @@
|
||||
module "backups" {
|
||||
source = "./backups"
|
||||
}
|
||||
|
||||
resource "nomad_job" "ipdvr" {
|
||||
jobspec = file("${path.module}/ip-dvr.nomad")
|
||||
jobspec = templatefile("${path.module}/ip-dvr.nomad", {
|
||||
use_wesher = var.use_wesher,
|
||||
})
|
||||
}
|
||||
|
@ -6,6 +6,8 @@ module "media-library" {
|
||||
args = ["caddy", "file-server", "--root", "/mnt/media", "--browse"]
|
||||
ingress = true
|
||||
service_port = 80
|
||||
use_wesher = var.use_wesher
|
||||
|
||||
host_volumes = [
|
||||
{
|
||||
name = "media-read"
|
||||
|
@ -5,6 +5,7 @@ module "minitor" {
|
||||
image = "iamthefij/minitor-go:1.4.1"
|
||||
args = ["-metrics", "-config=$${NOMAD_TASK_DIR}/config.yml"]
|
||||
service_port = 8080
|
||||
use_wesher = var.use_wesher
|
||||
prometheus = true
|
||||
|
||||
env = {
|
||||
|
@ -39,6 +39,7 @@ module "photoprism_module" {
|
||||
|
||||
ingress = true
|
||||
service_port = 2342
|
||||
use_wesher = var.use_wesher
|
||||
ingress_middlewares = [
|
||||
"authelia@nomad"
|
||||
]
|
||||
|
@ -20,6 +20,7 @@ resource "nomad_job" "service" {
|
||||
stunnel_resources = var.stunnel_resources
|
||||
service_tags = var.service_tags
|
||||
custom_services = var.custom_services
|
||||
use_wesher = var.use_wesher
|
||||
|
||||
ingress = var.ingress
|
||||
ingress_rule = var.ingress_rule
|
||||
|
@ -12,7 +12,9 @@ job "${name}" {
|
||||
mode = "bridge"
|
||||
%{ if service_port != null ~}
|
||||
port "main" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
%{ if service_port_static ~}
|
||||
static = ${service_port}
|
||||
%{ else ~}
|
||||
|
@ -239,3 +239,9 @@ variable "custom_services" {
|
||||
|
||||
default = []
|
||||
}
|
||||
|
||||
variable "use_wesher" {
|
||||
type = bool
|
||||
description = "Indicates whether or not services should expose themselves on the wesher network"
|
||||
default = true
|
||||
}
|
||||
|
5
services/vars.tf
Normal file
5
services/vars.tf
Normal file
@ -0,0 +1,5 @@
|
||||
variable "use_wesher" {
|
||||
type = bool
|
||||
description = "Indicates whether or not services should expose themselves on the wesher network"
|
||||
default = true
|
||||
}
|
@ -15,7 +15,9 @@ job "whoami" {
|
||||
network {
|
||||
mode = "bridge"
|
||||
port "web" {
|
||||
%{~ if use_wesher ~}
|
||||
host_network = "wesher"
|
||||
%{~ endif ~}
|
||||
to = 80
|
||||
}
|
||||
}
|
||||
@ -45,7 +47,7 @@ job "whoami" {
|
||||
config {
|
||||
image = "containous/whoami:latest"
|
||||
ports = ["web"]
|
||||
args = ["--port", "${NOMAD_PORT_web}"]
|
||||
args = ["--port", "$${NOMAD_PORT_web}"]
|
||||
}
|
||||
|
||||
resources {
|
||||
|
@ -6,5 +6,7 @@ resource "nomad_job" "whoami" {
|
||||
}
|
||||
}
|
||||
|
||||
jobspec = file("${path.module}/whoami.nomad")
|
||||
jobspec = templatefile("${path.module}/whoami.nomad", {
|
||||
use_wesher = var.use_wesher
|
||||
})
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user