Clean up Grafana and Loki bootstraps

This commit is contained in:
IamTheFij 2022-07-25 15:49:29 -07:00
parent 349f7b930b
commit 11e89de947
11 changed files with 161 additions and 72 deletions

45
core.tf
View File

@ -32,6 +32,7 @@ module "loki" {
service_port = 3100
ingress = true
sticky_disk = true
healthcheck = "/ready"
templates = jsonencode([
{
data = file("./loki-config.yml")
@ -41,6 +42,34 @@ module "loki" {
}
}
resource "consul_config_entry" "loki_intent" {
name = "loki"
kind = "service-intentions"
config_json = jsonencode({
Sources = [
{
Action = "allow"
Name = "grafana"
Precedence = 9
Type = "consul"
},
{
Action = "allow"
Name = "promtail"
Precedence = 9
Type = "consul"
},
{
Action = "allow"
Name = "syslogng-promtail"
Precedence = 9
Type = "consul"
},
]
})
}
resource "nomad_job" "syslog-ng" {
hcl2 {
enabled = true
@ -48,3 +77,19 @@ resource "nomad_job" "syslog-ng" {
jobspec = file("${path.module}/syslogng.nomad")
}
resource "consul_config_entry" "syslogng_promtail_intent" {
name = "syslogng-promtail"
kind = "service-intentions"
config_json = jsonencode({
Sources = [
{
Action = "allow"
Name = "syslogng"
Precedence = 9
Type = "consul"
},
]
})
}

View File

@ -5,6 +5,7 @@ provider "registry.terraform.io/hashicorp/consul" {
version = "2.15.0"
hashes = [
"h1:o+Su3YqeOkHgf86GEArIVDZfaZQphYFjAOwpi/b0bzs=",
"h1:tAb2gwW+oZ8/t2j7lExdqpNrxmaWsHbyA2crFWClPb0=",
"zh:0bd2a9873099d89bd52e9eee623dd20ccb275d1e2f750da229a53a4d5b23450c",
"zh:1c9f87d4d97b2c61d006c0bef159d61d2a661a103025f8276ebbeb000129f931",
"zh:25b73a34115255c464be10a53f2510c4a1db958a71be31974d30654d5472e624",
@ -22,6 +23,7 @@ provider "registry.terraform.io/hashicorp/consul" {
provider "registry.terraform.io/hashicorp/nomad" {
version = "1.4.16"
hashes = [
"h1:PQxNPNmMVOErxryTWIJwr22k95DTSODmgRylqjc2TjI=",
"h1:tyfjD/maKzb0RxxD9KWgLnkJu9lnYziYsQgGw85Giz8=",
"zh:0d4fbb7030d9caac3b123e60afa44f50c83cc2a983e1866aec7f30414abe7b0e",
"zh:0db080228e07c72d6d8ca8c45249d6f97cd0189fce82a77abbdcd49a52e57572",

View File

@ -2,32 +2,19 @@ job "metrics" {
datacenters = ["dc1"]
type = "system"
group "exporters" {
group "cadvisor" {
network {
mode = "bridge"
port "cadvisor" {
host_network = "nomad-bridge"
to = 8080
}
port "node_exporter" {
host_network = "nomad-bridge"
to = 9100
}
port "promtail" {
host_network = "nomad-bridge"
to = 9080
}
port "expose" {
host_network = "nomad-bridge"
}
port "cadvisor_envoy_metrics" {
host_network = "nomad-bridge"
to = 9102
}
}
@ -148,6 +135,18 @@ job "metrics" {
}
}
}
group "node_exporter" {
network {
mode = "bridge"
port "node_exporter" {
to = 9100
}
}
service {
name = "nodeexporter"
port = "node_exporter"
@ -162,6 +161,15 @@ job "metrics" {
sidecar_service {
proxy {
local_service_port = 9100
expose {
path {
path = "/metrics"
protocol = "http"
local_path_port = 9100
listener_port = "node_exporter"
}
}
}
}
@ -205,6 +213,18 @@ job "metrics" {
}
}
}
group "promtail" {
network {
mode = "bridge"
port "promtail" {
to = 9080
}
}
service {
name = "promtail"
port = "promtail"
@ -219,6 +239,11 @@ job "metrics" {
sidecar_service {
proxy {
local_service_port = 9080
upstreams {
destination_name = "loki"
local_bind_port = 1000
}
}
}
@ -255,12 +280,6 @@ job "metrics" {
}
# Bind mount host machine-id and log directories
mount {
type = "bind"
source = "/etc/machine-id"
target = "/etc/machine-id"
readonly = true
}
mount {
type = "bind"
@ -283,12 +302,12 @@ job "metrics" {
readonly = true
}
mount {
type = "bind"
source = "/var/log/audit"
target = "/var/log/audit"
readonly = true
}
# mount {
# type = "bind"
# source = "/var/log/audit"
# target = "/var/log/audit"
# readonly = true
# }
}
template {
@ -299,7 +318,8 @@ server:
http_listen_port: 9080
clients:
- url: http://${NOMAD_UPSTREAM_ADDR_loki}/loki/api/v1/push
# loki upstream: {{ env "NOMAD_UPSTREAM_ADDR_loki" }}
- url: http://{{ env "NOMAD_UPSTREAM_ADDR_loki" }}/loki/api/v1/push
scrape_configs:

View File

@ -33,6 +33,11 @@ job "grafana" {
destination_name = "prometheus"
local_bind_port = 9090
}
upstreams {
destination_name = "loki"
local_bind_port = 3100
}
}
}
@ -72,10 +77,36 @@ job "grafana" {
}
env = {
"GF_SECURITY_ADMIN_PASSWORD" = "password",
"GF_INSTALL_PLUGINS" = "grafana-clock-panel,grafana-piechart-panel,grafana-polystat-panel",
}
vault {
policies = [
"access-tables",
"nomad-task",
]
}
template {
data = <<EOF
{{ with secret "kv/data/grafana" }}
GF_SECURITY_ADMIN_PASSWORD={{ .Data.data.admin_pw }}
GF_SMTP_USER={{ .Data.data.smtp_user }}
GF_SMTP_PASSWORD={{ .Data.data.smtp_password }}
GF_EXTERNAL_IMAGE_STORAGE_S3_ACCESS_KEY={{ .Data.data.minio_access_key }}
GF_EXTERNAL_IMAGE_STORAGE_S3_SECRET_KEY={{ .Data.data.minio_secret_key }}
GRAFANA_ALERT_EMAIL_ADDRESSES={{ .Data.data.alert_email_addresses }}
{{ end }}
{{ with secret "kv/data/slack" }}
SLACK_BOT_URL={{ .Data.data.bot_url }}
SLACK_BOT_TOKEN={{ .Data.data.bot_token }}
SLACK_HOOK_URL={{ .Data.data.hook_url }}
{{ end }}
EOF
env = true
destination = "secrets/conf.env"
}
%{ for config_file in fileset(join("/", [module_path, "grafana"]), "**") ~}
template {
data = <<EOF
@ -85,7 +116,7 @@ ${file(join("/", [module_path, "grafana", config_file]))}
change_signal = "SIGHUP"
destination = "local/config/${config_file}"
# Change template delimeter for dashboard files that use json and have double curly braces and square braces
%{ if length(regexall(".*/dashboard/.*", config_file)) > 0 ~}
%{ if length(regexall("dashboard", config_file)) > 0 ~}
left_delimiter = "<<<<"
right_delimiter = ">>>>"
%{ endif ~}

View File

@ -281,11 +281,18 @@ log_queries =
#################################### Auth Proxy ##########################
[auth.proxy]
{{ with service "traefik" -}}
enabled = true
header_name = X-WEBAUTH-USER
header_property = username
auto_sign_up = true
whitelist = 192.168.2.20
{{- $last := len . | subtract 1 -}}
{{- $services := . -}}
whitelist = {{ range $i := loop $last -}}
{{- with index $services $i }}{{ .Address }},{{ end -}}
{{- end -}}
{{- with index . $last }}{{ .Address }}{{ end -}}
{{- end }}
#################################### Basic Auth ##########################
[auth.basic]

View File

@ -3,7 +3,7 @@ apiVersion: 1
datasources:
- name: Loki
url: http://loki:3100
url: http://{{ env "NOMAD_UPSTREAM_ADDR_loki" }}
type: loki
access: proxy
isDefault: false

View File

@ -3,7 +3,7 @@ apiVersion: 1
datasources:
- name: Prometheus
url: http://prom:9090
url: http://{{ env "NOMAD_UPSTREAM_ADDR_prometheus" }}
type: prometheus
access: proxy
isDefault: true

View File

@ -1,15 +1,6 @@
---
notifiers:
# - name: Telegram
# type: telegram
# uid: telegram-1
# org_id: 1
# is_default: false
# settings:
# chatid: ${TELEGRAM_CHATID}
# bottoken: ${TELEGRAM_BOTTOKEN}
# uploadImage: true
{{ with secret "kv/data/grafana" -}}
notifiers:
- name: Personal email
type: email
uid: email-1

View File

@ -3,13 +3,13 @@ variable "consul_address" {
description = "address of consul server for dynamic scraping"
}
# resource "nomad_job" "exporters" {
# hcl2 {
# enabled = true
# }
#
# jobspec = file("${path.module}/exporters.nomad")
# }
resource "nomad_job" "exporters" {
hcl2 {
enabled = true
}
jobspec = file("${path.module}/exporters.nomad")
}
data "consul_nodes" "all-nodes" {
query_options {

View File

@ -97,8 +97,8 @@ scrape_configs:
format:
- "prometheus"
consul_sd_configs:
# - server: "${var.consul_address}"
- server: "${CONSUL_HTTP_ADDR}"
- server: "${var.consul_address}"
# - server: "{{ env "CONSUL_HTTP_ADDR" }}"
services:
- "nomad"
tags:
@ -110,8 +110,8 @@ scrape_configs:
format:
- "prometheus"
consul_sd_configs:
# - server: "${var.consul_address}"
- server: "${CONSUL_HTTP_ADDR}"
- server: "${var.consul_address}"
# - server: "{{ env "CONSUL_HTTP_ADDR" }}"
services:
- "nomad-client"
@ -121,8 +121,8 @@ scrape_configs:
format:
- "prometheus"
consul_sd_configs:
# - server: "${var.consul_address}"
- server: "${CONSUL_HTTP_ADDR}"
- server: "${var.consul_address}"
# - server: "{{ env "CONSUL_HTTP_ADDR" }}"
services:
- "consul"
relabel_configs:
@ -133,8 +133,8 @@ scrape_configs:
- job_name: "exporters"
metrics_path: "/metrics"
consul_sd_configs:
# - server: "${var.consul_address}"
- server: "${CONSUL_HTTP_ADDR}"
- server: "${var.consul_address}"
# - server: "{{ env "CONSUL_HTTP_ADDR" }}"
relabel_configs:
- source_labels: [__meta_consul_service]
action: drop
@ -156,8 +156,8 @@ scrape_configs:
- job_name: "envoy"
metrics_path: "/metrics"
consul_sd_configs:
# - server: "${var.consul_address}"
- server: "${CONSUL_HTTP_ADDR}"
- server: "${var.consul_address}"
# - server: "{{ env "CONSUL_HTTP_ADDR" }}"
relabel_configs:
- source_labels: [__meta_consul_service]
action: keep

View File

@ -23,8 +23,9 @@ job "syslogng" {
connect {
sidecar_service {
local_service_port = 1514
proxy {
local_service_port = 1514
upstreams {
destination_name = "loki"
local_bind_port = 1000
@ -64,13 +65,13 @@ server:
http_listen_port: 9080
clients:
- url: http://${NOMAD_UPSTREAM_ADDR_loki}/loki/api/v1/push
- url: http://{{ env "NOMAD_UPSTREAM_ADDR_loki" }}/loki/api/v1/push
scrape_configs:
# TCP syslog receiver
- job_name: syslog
syslog:
listen_address: 0.0.0.0:${NOMAD_PORT_main}
listen_address: 0.0.0.0:{{ env "NOMAD_PORT_main" }}
labels:
job: syslog
relabel_configs:
@ -125,21 +126,13 @@ EOF
}
}
}
check {
type = "tcp"
port = "main"
interval = "10s"
timeout = "10s"
}
}
task "syslogng" {
driver = "docker"
config {
image = "balbit/syslog-ng:latest"
image = "balabit/syslog-ng:3.37.1"
ports = ["main"]
args = ["--no-caps"]
@ -165,7 +158,7 @@ source s_internal {
destination d_loki {
# Forward to Connect proxy to Promtail
syslog("${NOMAD_UPSTREAM_IP_syslogngpromtail}" transport("tcp") port(${NOMAD_UPSTREAM_PORT_syslogngpromtail}));
syslog("{{ env "NOMAD_UPSTREAM_IP_syslogng-promtail" }}" transport("tcp") port({{ env "NOMAD_UPSTREAM_PORT_syslogng-promtail" }}));
};
log { source(s_internal); destination(d_loki); };