--- - name: Build Consul cluster hosts: consul_instances any_errors_fatal: true roles: - role: ansible-consul vars: consul_version: "1.11.3" consul_install_remotely: true consul_install_upgrade: true consul_node_role: server consul_bootstrap_expect: true consul_user: consul consul_manage_user: true consul_group: bin consul_manage_group: true consul_architecture_map: x86_64: amd64 armhfv6: arm armv7l: arm # consul_tls_enable: true consul_connect_enabled: true consul_ports_grpc: 8502 consul_client_address: "0.0.0.0" # Enable metrics consul_config_custom: telemetry: prometheus_retention_time: "2h" become: true tasks: - name: Start Consul systemd: state: started name: consul become: true - name: Add values block: - name: Install python-consul pip: name: python-consul extra_args: --index-url https://pypi.org/simple - name: Add a value to Consul consul_kv: host: "{{ inventory_hostname }}" key: ansible_test value: Hello from Ansible! delegate_to: localhost run_once: true - name: Setup Vault cluster hosts: vault_instances roles: - name: ansible-vault vars: # Doesn't support multi-arch installs vault_install_hashi_repo: true vault_bin_path: /usr/bin vault_harden_file_perms: true vault_address: 0.0.0.0 vault_backend: consul become: true tasks: - name: Unseal vault command: argv: - "vault" - "operator" - "unseal" - "-address=http://127.0.0.1:8200/" - "{{ item }}" loop: "{{ vault_keys }}" no_log: true when: vault_keys is defined # Not on Ubuntu 20.04 # - name: Install Podman # hosts: nomad_instances # become: true # # tasks: # - name: Install Podman # package: # name: podman # state: present - name: Create NFS mounts hosts: nomad_instances become: true tasks: - name: Install nfs package: name: nfs-common state: present - name: Create Motioneye NFS mount ansible.posix.mount: src: 192.168.2.10:/Recordings/Motioneye path: /srv/volumes/motioneye-recordings opts: proto=tcp,port=2049,rw state: mounted fstype: nfs4 - name: Create Media Library RO NFS mount ansible.posix.mount: src: 192.168.2.10:/Multimedia path: /srv/volumes/media-read opts: proto=tcp,port=2049,ro state: mounted fstype: nfs4 - name: Build Nomad cluster hosts: nomad_instances any_errors_fatal: true become: true vars: shared_host_volumes: - name: motioneye-recordings path: /srv/volumes/motioneye-recordings owner: "root" group: "bin" mode: "0755" read_only: false - name: media-read path: /srv/volumes/media-read owner: "root" group: "root" mode: "0777" read_only: true roles: - name: ansible-nomad vars: nomad_version: "1.2.6" nomad_install_remotely: true nomad_install_upgrade: true nomad_allow_purge_config: true nomad_user: root nomad_manage_user: true nomad_group: bin nomad_manage_group: true # Properly map install arch nomad_architecture_map: x86_64: amd64 armhfv6: arm armv7l: arm nomad_encrypt_enable: true # nomad_use_consul: true # Metrics nomad_telemetry: true nomad_telemetry_prometheus_metrics: true nomad_telemetry_publish_allocation_metrics: true nomad_telemetry_publish_node_metrics: true # Enable container plugins nomad_cni_enable: true nomad_cni_version: 1.0.1 nomad_docker_enable: true nomad_docker_dmsetup: false # nomad_podman_enable: true nomad_host_volumes: "{{ shared_host_volumes + (nomad_unique_host_volumes | default([])) }}" # Customize docker plugin nomad_plugins: docker: config: volumes: enabled: true selinuxlabel: "z" extra_labels: - "job_name" - "job_id" - "task_group_name" - "task_name" - "namespace" - "node_name" - "node_id" # Bind nomad nomad_bind_address: 0.0.0.0 # Default interface for binding tasks # nomad_network_interface: lo # Create networks for binding task ports nomad_host_networks: # - name: public # interface: eth0 # reserved_ports: "22" - name: nomad-bridge interface: nomad reserved_ports: "22" - name: loopback interface: lo reserved_ports: "22" # Enable ACLs nomad_acl_enabled: true # Enable vault integration nomad_vault_enabled: "{{ vault_token is defined }}" nomad_vault_token: "{{ vault_token | default('') }}" nomad_config_custom: ui: enabled: true consul: ui_url: "http://{{ ansible_hostname }}:8500/ui" vault: ui_url: "http://{{ ansible_hostname }}:8200/ui" consul: tags: - "traefik.enable=true" - "traefik.consulcatalog.connect=true" - "traefik.http.routers.nomadclient.entrypoints=websecure" tasks: - name: Start Nomad systemd: state: started name: nomad - name: Bootstrap Nomad ACLs hosts: nomad_instances tasks: - name: Bootstrap ACLs command: argv: - "nomad" - "acl" - "bootstrap" - "-json" run_once: true ignore_errors: true register: bootstrap_result - name: Save bootstrap result copy: content: "{{ bootstrap_result.stdout }}" dest: "./nomad_bootstrap.json" when: bootstrap_result is succeeded delegate_to: localhost run_once: true - name: Look for policy command: argv: - nomad - acl - policy - list run_once: true register: policies - name: Read secret command: argv: - jq - -r - .SecretID - nomad_bootstrap.json delegate_to: localhost run_once: true register: read_secretid - name: Copy policy copy: src: ./acls/nomad-anon-bootstrap.hcl dest: /tmp/anonymous.policy.hcl delegate_to: "{{ play_hosts[0] }}" register: anon_policy run_once: true - name: Create anon-policy command: argv: - nomad - acl - policy - apply - -description="Anon RW" - anonymous - /tmp/anonymous.policy.hcl environment: NOMAD_TOKEN: "{{ read_secretid.stdout }}" when: policies.stdout == "No policies found" or anon_policy.changed delegate_to: "{{ play_hosts[0] }}" run_once: true