diff --git a/.github/workflows/pr-key-inject.yml b/.github/workflows/pr-key-inject.yml deleted file mode 100644 index 66c762f..0000000 --- a/.github/workflows/pr-key-inject.yml +++ /dev/null @@ -1,18 +0,0 @@ -name: check PR (key_inject) - -on: - pull_request: - paths: - - roles/key_inject/** - - .github/** - -jobs: - run-molecule-tests: - strategy: - fail-fast: false - matrix: - molecule-driver: [docker] - uses: ./.github/workflows/reusable-molecule.yml - with: - role-name: key_inject - molecule-driver: ${{ matrix.molecule-driver }} \ No newline at end of file diff --git a/roles/key_inject/.ansible-lint b/roles/key_inject/.ansible-lint new file mode 100644 index 0000000..c6e764b --- /dev/null +++ b/roles/key_inject/.ansible-lint @@ -0,0 +1,7 @@ +--- +skip_list: + - name[casing] + - empty-string-compare # Don't compare to empty string + - experimental # all rules tagged as experimental + - "306" # Ignore not setting pipefail - required for sh shell + - name[template] # Style enforcement diff --git a/roles/key_inject/defaults/main.yml b/roles/key_inject/defaults/main.yml index c59f4ef..6a9a17b 100644 --- a/roles/key_inject/defaults/main.yml +++ b/roles/key_inject/defaults/main.yml @@ -1,8 +1,9 @@ -subkey_path: 'https://releases.parity.io/substrate/x86_64-debian%3Astretch/v3.0.0/subkey/subkey' +--- +subkey_path: https://releases.parity.io/substrate/x86_64-debian%3Astretch/v3.0.0/subkey/subkey # Parachain key injection variables key_inject_parachain_rpc_port: 9955 -key_inject_parachain_scheme: "sr25519" +key_inject_parachain_scheme: sr25519 # key_inject_parachain_aura_private_key= # Relay chain key injection variables diff --git a/roles/key_inject/tasks/check_session_key.yml b/roles/key_inject/tasks/check_session_key.yml index 91e93fa..bdeb263 100644 --- a/roles/key_inject/tasks/check_session_key.yml +++ b/roles/key_inject/tasks/check_session_key.yml @@ -1,33 +1,32 @@ +--- - name: Check session key | Generate session ansible.builtin.set_fact: - key_inject_session_key: "0x\ - {% for key in key_inject_relay_chain_key_list %}\ - {{ (key.priv_key | parity.chain.subkey_inspect(scheme=(key.scheme | default('sr25519')) )).publicKey.replace('0x','') }}\ - {% endfor %}" + key_inject_session_key: "0x{% for key in key_inject_relay_chain_key_list %}{{ (key.priv_key | parity.chain.subkey_inspect(scheme=(key.scheme | default('sr25519')))).publicKey.replace('0x', + '') }}{% endfor %}" - name: Check session key | Run rpc ansible.builtin.uri: - url: "http://127.0.0.1:{{ key_inject_relay_chain_rpc_port }}" + url: http://127.0.0.1:{{ key_inject_relay_chain_rpc_port }} method: POST body: jsonrpc: "2.0" - method: "author_hasSessionKeys" + method: author_hasSessionKeys params: ["{{ key_inject_session_key }}"] id: 1 body_format: json headers: - Content-Type: 'application/json' + Content-Type: application/json use_proxy: false changed_when: false check_mode: false register: key_inject_has_session_keys - name: Check session key | Debug - debug: + ansible.builtin.debug: msg: "RPC call failed: {{ key_inject_has_session_keys.json }}" when: key_inject_has_session_keys.json.result is not defined - name: Check session key | Check ansible.builtin.debug: - msg: "Session Key {{ key_inject_session_key }} is {{ 'NOT ' if not key_inject_has_session_keys.json.result else '' }}present in keystore" - changed_when: not key_inject_has_session_keys.json.result \ No newline at end of file + msg: Session Key {{ key_inject_session_key }} is {{ 'NOT ' if not key_inject_has_session_keys.json.result else '' }}present in keystore + changed_when: not key_inject_has_session_keys.json.result diff --git a/roles/key_inject/tasks/inject.yml b/roles/key_inject/tasks/inject.yml index 35ed021..a500e71 100644 --- a/roles/key_inject/tasks/inject.yml +++ b/roles/key_inject/tasks/inject.yml @@ -1,22 +1,23 @@ +--- - name: Inject keys + tags: [key-inject, key_inject] block: - name: Inject | Setting {{ item.type }} pub keys ansible.builtin.set_fact: - key_inject_pub_key: "{{ (item.priv_key | - parity.chain.subkey_inspect(scheme=(item.scheme | default('sr25519')) )).publicKey }}" + key_inject_pub_key: "{{ (item.priv_key | parity.chain.subkey_inspect(scheme=(item.scheme | default('sr25519')))).publicKey }}" - name: Inject | Check {{ item.type }} key ansible.builtin.uri: - url: "http://127.0.0.1:{{ item.rpc_port | default(key_inject_relay_chain_rpc_port) }}" + url: http://127.0.0.1:{{ item.rpc_port | default(key_inject_relay_chain_rpc_port) }} method: POST body: jsonrpc: "2.0" - method: "author_hasKey" + method: author_hasKey params: ["{{ key_inject_pub_key }}", "{{ item.type }}"] id: 1 body_format: json headers: - Content-Type: 'application/json' + Content-Type: application/json use_proxy: false changed_when: false check_mode: false @@ -28,21 +29,22 @@ - name: Inject | Check {{ item.type }} key results ansible.builtin.debug: - msg: "Key {{ key_inject_pub_key }} ({{ item.type }}, {{ item.scheme | default('sr25519') }}) is {{ 'NOT ' if not key_inject_uri.json.result else '' }}present in keystore" + msg: Key {{ key_inject_pub_key }} ({{ item.type }}, {{ item.scheme | default('sr25519') }}) is {{ 'NOT ' if not key_inject_uri.json.result else '' }}present + in keystore changed_when: not key_inject_uri.json.result - name: Inject | Inject {{ item.type }} keys ansible.builtin.uri: - url: "http://127.0.0.1:{{ item.rpc_port | default(key_inject_relay_chain_rpc_port) }}" + url: http://127.0.0.1:{{ item.rpc_port | default(key_inject_relay_chain_rpc_port) }} method: POST body: jsonrpc: "2.0" - method: "author_insertKey" + method: author_insertKey params: ["{{ item.type }}", "{{ item.priv_key }}", "{{ key_inject_pub_key }}"] id: 1 body_format: json headers: - Content-Type: 'application/json' + Content-Type: application/json use_proxy: false changed_when: true notify: Restart service @@ -53,5 +55,3 @@ ansible.builtin.debug: var: key_inject_uri when: not ansible_check_mode - - tags: ['key-inject', 'key_inject'] diff --git a/roles/key_inject/tasks/main.yml b/roles/key_inject/tasks/main.yml index 269c65f..724b5ab 100644 --- a/roles/key_inject/tasks/main.yml +++ b/roles/key_inject/tasks/main.yml @@ -1,28 +1,26 @@ -- block: +--- +- tags: [key-inject, key_inject] + block: + - name: Parachain keys + ansible.builtin.include_tasks: inject.yml + loop: + - rpc_port: "{{ key_inject_parachain_rpc_port }}" + scheme: "{{ key_inject_parachain_scheme }}" + type: aura + priv_key: "{{ key_inject_parachain_aura_private_key }}" + loop_control: + label: Parachain {{ item.type }} key + when: key_inject_parachain_aura_private_key is defined + - name: Relaychain keys + ansible.builtin.include_tasks: inject.yml + loop: "{{ key_inject_relay_chain_key_list }}" + loop_control: + label: Relaychain {{ item.type }} key + when: key_inject_relay_chain_key_list is defined - - name: Parachain keys - ansible.builtin.include_tasks: inject.yml - loop: - - rpc_port: "{{ key_inject_parachain_rpc_port }}" - scheme: "{{ key_inject_parachain_scheme }}" - type: "aura" - priv_key: "{{ key_inject_parachain_aura_private_key }}" - loop_control: - label: "Parachain {{ item.type }} key" - when: key_inject_parachain_aura_private_key is defined - - - name: Relaychain keys - ansible.builtin.include_tasks: inject.yml - loop: "{{ key_inject_relay_chain_key_list }}" - loop_control: - label: "Relaychain {{ item.type }} key" - when: key_inject_relay_chain_key_list is defined - - - name: Check session key is present - ansible.builtin.include_tasks: check_seesion_key.yml - when: - - key_inject_relay_chain_key_list is defined - - key_inject_check_session_key - - tags: ['key-inject', 'key_inject'] + - name: Check session key is present + ansible.builtin.include_tasks: check_seesion_key.yml + when: + - key_inject_relay_chain_key_list is defined + - key_inject_check_session_key diff --git a/roles/nginx/.ansible-lint b/roles/nginx/.ansible-lint new file mode 100644 index 0000000..c6e764b --- /dev/null +++ b/roles/nginx/.ansible-lint @@ -0,0 +1,7 @@ +--- +skip_list: + - name[casing] + - empty-string-compare # Don't compare to empty string + - experimental # all rules tagged as experimental + - "306" # Ignore not setting pipefail - required for sh shell + - name[template] # Style enforcement diff --git a/roles/nginx/defaults/main.yml b/roles/nginx/defaults/main.yml index 0e1572f..b5fafe0 100644 --- a/roles/nginx/defaults/main.yml +++ b/roles/nginx/defaults/main.yml @@ -1,4 +1,5 @@ -nginx_letsencrypt_email: "devops-team@parity.io" +--- +nginx_letsencrypt_email: devops-team@parity.io nginx_letsencrypt_mock: false nginx_dhparam_size: 4096 nginx_worker_rlimit_nofile: 30000 @@ -6,7 +7,6 @@ nginx_worker_rlimit_nofile: 30000 nginx_max_request_rate: 2 nginx_burst_request_rate: 5 - # print extended data about clients nginx_log_extended_enable: false diff --git a/roles/nginx/handlers/main.yml b/roles/nginx/handlers/main.yml index e4dacf6..bfa1286 100644 --- a/roles/nginx/handlers/main.yml +++ b/roles/nginx/handlers/main.yml @@ -1,8 +1,7 @@ --- - - name: reload nginx config ansible.builtin.systemd: - name: "nginx" + name: nginx state: reloaded - enabled: yes - daemon_reload: yes + enabled: true + daemon_reload: true diff --git a/roles/nginx/molecule/default/converge.yml b/roles/nginx/molecule/default/converge.yml index be4cb18..2ddbe35 100644 --- a/roles/nginx/molecule/default/converge.yml +++ b/roles/nginx/molecule/default/converge.yml @@ -4,6 +4,6 @@ tasks: - name: converge | deploy nginx without wipe ansible.builtin.include_role: - name: "nginx" + name: nginx vars: nginx_remove_enable: false diff --git a/roles/nginx/molecule/default/group_vars/all.yml b/roles/nginx/molecule/default/group_vars/all.yml index 7235298..497f440 100644 --- a/roles/nginx/molecule/default/group_vars/all.yml +++ b/roles/nginx/molecule/default/group_vars/all.yml @@ -1,3 +1,4 @@ +--- ## Molecule ansible_user: root @@ -5,26 +6,26 @@ nginx_letsencrypt_mock: true nginx_dhparam_size: 1024 nginx_sites: - template: site-rpc.j2 - domain: "a.rpc.lan" + domain: a.rpc.lan ssl_issuer: letsencrypt params: rpc_port: 9933 rpc_ws_port: 9944 - template: site-rpc.j2 - domain: "b.rpc.lan" + domain: b.rpc.lan ssl_issuer: manual - ssl_manual_cert_file: "test1.pem" + ssl_manual_cert_file: test1.pem params: rpc_port: 9933 rpc_ws_port: 9944 - template: site-connect.j2 - domain: "c.rpc.lan" + domain: c.rpc.lan ssl_issuer: letsencrypt params: connect_port: 9944 - template: site-connect.j2 - domain: "d.rpc.lan" + domain: d.rpc.lan ssl_issuer: manual - ssl_manual_cert_file: "test2.pem" + ssl_manual_cert_file: test2.pem params: connect_port: 9944 diff --git a/roles/nginx/molecule/default/molecule.yml b/roles/nginx/molecule/default/molecule.yml index 85b0b38..0fa28b6 100644 --- a/roles/nginx/molecule/default/molecule.yml +++ b/roles/nginx/molecule/default/molecule.yml @@ -9,26 +9,26 @@ platforms: source: alias: debian/bullseye/amd64 # DOCKER - image: "paritytech/debian11:latest" + image: paritytech/debian11:latest command: ${MOLECULE_DOCKER_COMMAND:-""} # need this for systemctl to work in Docker privileged: true # to pull image from docker hub uncomment this pre_build_image: true etc_hosts: - 'a.rpc.lan': '127.0.0.1' - 'b.rpc.lan': '127.0.0.1' - 'c.rpc.lan': '127.0.0.1' - 'd.rpc.lan': '127.0.0.1' + a.rpc.lan: 127.0.0.1 + b.rpc.lan: 127.0.0.1 + c.rpc.lan: 127.0.0.1 + d.rpc.lan: 127.0.0.1 provisioner: name: ansible options: - D: True + D: true config_options: defaults: callbacks_enabled: timer verifier: name: ansible options: - D: True + D: true diff --git a/roles/nginx/molecule/default/prepare.yml b/roles/nginx/molecule/default/prepare.yml index 87db92e..77eec80 100644 --- a/roles/nginx/molecule/default/prepare.yml +++ b/roles/nginx/molecule/default/prepare.yml @@ -1,3 +1,4 @@ +--- - name: prepare hosts: all gather_facts: false @@ -6,17 +7,17 @@ ansible.builtin.raw: apt -y update && apt install -y python3 changed_when: false vars: - websocat_dist_binary: "https://github.com/vi/websocat/releases/download/v1.9.0/websocat_linux64" - websocat_binary: "/usr/local/bin/websocat" - pebble_dist_binary: "https://github.com/letsencrypt/pebble/releases/download/v2.3.1/pebble_linux-amd64" - pebble_binary: "/usr/local/bin/pebble" - pebble_conf_dir: "/usr/local/etc/pebble/" + websocat_dist_binary: https://github.com/vi/websocat/releases/download/v1.9.0/websocat_linux64 + websocat_binary: /usr/local/bin/websocat + pebble_dist_binary: https://github.com/letsencrypt/pebble/releases/download/v2.3.1/pebble_linux-amd64 + pebble_binary: /usr/local/bin/pebble + pebble_conf_dir: /usr/local/etc/pebble/ tasks: - name: prepare | install packeges ansible.builtin.apt: name: "{{ packeges }}" state: present - update_cache: no + update_cache: false vars: packeges: - ca-certificates @@ -24,8 +25,8 @@ - netcat-openbsd - name: prepare | build hosts file ansible.builtin.lineinfile: - dest: "/etc/hosts" - line: "127.0.0.1 {{ item.domain }}" + dest: /etc/hosts + line: 127.0.0.1 {{ item.domain }} state: present loop: "{{ nginx_sites }}" when: molecule_yml.driver.name == 'lxd' @@ -34,9 +35,9 @@ ansible.builtin.get_url: url: "{{ websocat_dist_binary }}" dest: "{{ websocat_binary }}" - mode: 0755 - owner: "root" - group: "root" + mode: "0755" + owner: root + group: root # pebble provides mock for ACME (letsencrypt) - name: prepare | create pebble config directory ansible.builtin.file: @@ -49,9 +50,9 @@ ansible.builtin.get_url: url: "{{ pebble_dist_binary }}" dest: "{{ pebble_binary }}" - mode: 0755 - owner: "root" - group: "root" + mode: "0755" + owner: root + group: root - name: prepare | copy pebble config files ansible.builtin.copy: src: "{{ item.src }}" @@ -60,8 +61,8 @@ group: root mode: "0644" loop: - - {src: "pebble/cert.pem", dst: "{{ pebble_conf_dir }}"} # fake 127.0.0.1 certificate - - {src: "pebble/key.pem", dst: "{{ pebble_conf_dir }}"} # fake 127.0.0.1 certificate + - { src: pebble/cert.pem, dst: "{{ pebble_conf_dir }}" } # fake 127.0.0.1 certificate + - { src: pebble/key.pem, dst: "{{ pebble_conf_dir }}" } # fake 127.0.0.1 certificate - name: prepare | copy config templates ansible.builtin.template: src: "{{ item.src }}" @@ -70,10 +71,10 @@ group: root mode: "0644" loop: - - {src: "pebble.service.j2", dst: "/etc/systemd/system/pebble.service"} - - {src: "pebble-config.json.j2", dst: "{{ pebble_conf_dir }}pebble-config.json"} - - {src: "websocat.service.j2", dst: "/etc/systemd/system/websocat.service"} - - {src: "http-stub.service.j2", dst: "/etc/systemd/system/http-stub.service"} + - { src: pebble.service.j2, dst: /etc/systemd/system/pebble.service } + - { src: pebble-config.json.j2, dst: "{{ pebble_conf_dir }}pebble-config.json" } + - { src: websocat.service.j2, dst: /etc/systemd/system/websocat.service } + - { src: http-stub.service.j2, dst: /etc/systemd/system/http-stub.service } - name: prepare | run services ansible.builtin.systemd: name: "{{ item }}" @@ -81,15 +82,15 @@ enabled: true daemon_reload: true loop: - - "pebble.service" - - "http-stub.service" - - "websocat.service" + - pebble.service + - http-stub.service + - websocat.service - name: prepare | collect service facts ansible.builtin.service_facts: - name: prepare | check services ansible.builtin.assert: that: ansible_facts.services[item].state == 'running' loop: - - "pebble.service" - - "http-stub.service" - - "websocat.service" + - pebble.service + - http-stub.service + - websocat.service diff --git a/roles/nginx/molecule/default/verify.yml b/roles/nginx/molecule/default/verify.yml index f7ab861..f0c6443 100644 --- a/roles/nginx/molecule/default/verify.yml +++ b/roles/nginx/molecule/default/verify.yml @@ -5,16 +5,16 @@ tasks: - name: verify | deploy nginx with wipe ansible.builtin.include_role: - name: "nginx" + name: nginx vars: nginx_remove_enable: true - name: verify | check https RPC endpoints ansible.builtin.uri: - url: "https://{{ item.domain }}" - validate_certs: no + url: https://{{ item.domain }} + validate_certs: false loop: "{{ nginx_sites }}" when: item.template == 'site-rpc.j2' - name: verify | check wss RPC endpoints - ansible.builtin.command: "websocat --insecure -E wss:///{{ item.domain }}" - changed_when: False + ansible.builtin.command: websocat --insecure -E wss:///{{ item.domain }} + changed_when: false loop: "{{ nginx_sites }}" diff --git a/roles/nginx/tasks/certs-loop.yml b/roles/nginx/tasks/certs-loop.yml index 49abf83..574b456 100644 --- a/roles/nginx/tasks/certs-loop.yml +++ b/roles/nginx/tasks/certs-loop.yml @@ -1,12 +1,9 @@ --- - -- block: - - - name: nginx | certs | copy {{ item }} - ansible.builtin.copy: - src: "{{ item }}" - dest: "{{ _nginx_custom_certs_base_path }}{{ item }}" - owner: root - group: root - mode: "0600" - notify: reload nginx config +- name: nginx | certs | copy {{ item }} + ansible.builtin.copy: + src: "{{ item }}" + dest: "{{ _nginx_custom_certs_base_path }}{{ item }}" + owner: root + group: root + mode: "0600" + notify: reload nginx config diff --git a/roles/nginx/tasks/certs.yml b/roles/nginx/tasks/certs.yml index ea8b9eb..fbec401 100644 --- a/roles/nginx/tasks/certs.yml +++ b/roles/nginx/tasks/certs.yml @@ -1,5 +1,4 @@ --- - - name: nginx | certs | calculate list of custom certs ansible.builtin.set_fact: _nginx_custom_certs: "{{ nginx_sites | json_query(pattern) | map(attribute='ssl_manual_cert_file') | unique }}" @@ -18,30 +17,29 @@ group: root mode: "0755" -- block: - - - name: nginx | certs | find unmanaged custom certs files +- name: nginx | certs | custom certs + when: _nginx_custom_certs | length > 0 + block: + - name: nginx | certs | custom certs | find unmanaged custom certs files ansible.builtin.find: paths: "{{ _nginx_custom_certs_base_path }}" - patterns: "^((?!{{ _nginx_custom_certs | join('|') }}).)*$" + patterns: ^((?!{{ _nginx_custom_certs | join('|') }}).)*$ use_regex: true register: _nginx_unmanaged_custom_certs_files - - name: nginx | certs | print list of unmanaged custom certs files + - name: nginx | certs | custom certs | print list of unmanaged custom certs files ansible.builtin.debug: msg: "{{ _nginx_unmanaged_custom_certs_files.files | map(attribute='path') }}" - - name: nginx | certs | remove unmanaged custom certs files + - name: nginx | certs | custom certs | remove unmanaged custom certs files ansible.builtin.file: path: "{{ item.path }}" state: absent loop: "{{ _nginx_unmanaged_custom_certs_files.files }}" - when: _nginx_custom_certs | length > 0 - - name: nginx | certs | include tasks of sorts copying ansible.builtin.include_tasks: file: certs-loop.yml apply: - tags: ['nginx', 'nginx-custom-certs'] + tags: [nginx, nginx-custom-certs] loop: "{{ _nginx_custom_certs }}" diff --git a/roles/nginx/tasks/letsencrypt-loop.yml b/roles/nginx/tasks/letsencrypt-loop.yml index 6657dcc..1c51064 100644 --- a/roles/nginx/tasks/letsencrypt-loop.yml +++ b/roles/nginx/tasks/letsencrypt-loop.yml @@ -1,11 +1,8 @@ --- - - name: nginx | letsencrypt | domain - {{ item }} | setup letsencrypt cmd ansible.builtin.set_fact: - _nginx_letsencrypt_cmd: "certbot certonly --webroot -w /var/www/letsencrypt -d {{ item }} -n \ - -m {{ nginx_letsencrypt_email }} --agree-tos \ - {%- if ansible_check_mode %} --dry-run{% endif %} \ - {%- if nginx_letsencrypt_mock %} --server https://127.0.0.1:14000/dir --no-verify-ssl{%- endif %}" + _nginx_letsencrypt_cmd: certbot certonly --webroot -w /var/www/letsencrypt -d {{ item }} -n -m {{ nginx_letsencrypt_email }} --agree-tos{%- if ansible_check_mode + %} --dry-run{% endif %}{%- if nginx_letsencrypt_mock %} --server https://127.0.0.1:14000/dir --no-verify-ssl{%- endif %} - name: nginx | letsencrypt | domain - {{ item }} | print letsencrypt cmd ansible.builtin.debug: diff --git a/roles/nginx/tasks/letsencrypt.yml b/roles/nginx/tasks/letsencrypt.yml index e1d2029..fa23aa4 100644 --- a/roles/nginx/tasks/letsencrypt.yml +++ b/roles/nginx/tasks/letsencrypt.yml @@ -1,3 +1,4 @@ +--- - name: nginx | letsencrypt | calculate list of letsencrypt domains ansible.builtin.set_fact: _nginx_letsencrypt_domains: "{{ nginx_sites | json_query(pattern) | map(attribute='domain') | unique }}" @@ -12,5 +13,5 @@ ansible.builtin.include_tasks: file: letsencrypt-loop.yml apply: - tags: ['nginx', 'nginx-letsencrypt'] + tags: [nginx, nginx-letsencrypt] loop: "{{ _nginx_letsencrypt_domains }}" diff --git a/roles/nginx/tasks/main.yml b/roles/nginx/tasks/main.yml index 28ce7fa..beef6a5 100644 --- a/roles/nginx/tasks/main.yml +++ b/roles/nginx/tasks/main.yml @@ -1,123 +1,124 @@ --- - -- block: - - - name: nginx | include test tasks - ansible.builtin.include_tasks: - file: tests.yml - apply: - tags: ['nginx', 'nginx-tests'] - tags: ['nginx-tests'] - - - name: nginx | include remove tasks - ansible.builtin.include_tasks: - file: remove.yml - apply: - tags: ['nginx', 'nginx-remove'] - when: nginx_remove_enable | bool - tags: ['nginx-remove'] - - - name: nginx | install packeges - ansible.builtin.apt: - name: "{{ packeges }}" - state: present - update_cache: yes - vars: - packeges: - - nginx - - certbot - - - name: nginx | create directories - ansible.builtin.file: - name: "{{ item }}" - state: directory - owner: root - group: root - mode: "0755" - loop: - - "/var/www/letsencrypt" - - "/etc/letsencrypt/renewal-hooks/deploy" - - "/etc/systemd/system/nginx.service.d" - - - name: nginx | copy letsencrypt renewal-hook reload script - ansible.builtin.copy: - src: reload-nginx-config - dest: "/etc/letsencrypt/renewal-hooks/deploy" - owner: root - group: root - mode: "0744" - - - name: nginx | stat dhparams - ansible.builtin.stat: - path: /etc/nginx/dhparams.pem - register: stat_dhparams - - # The file is generated locally because it takes a LONG time to generate on VMs - - name: nginx | generate dhparams locally - become: no - community.crypto.openssl_dhparam: - path: /tmp/dhparams_{{ inventory_hostname }}.pem - size: "{{ nginx_dhparam_size }}" - delegate_to: localhost - when: not stat_dhparams.stat.exists - # molecule skip test - tags: molecule-notest - - - name: nginx | copy dhparams to node - ansible.builtin.copy: - src: /tmp/dhparams_{{ inventory_hostname }}.pem - dest: /etc/nginx/dhparams.pem - notify: reload nginx config - when: not stat_dhparams.stat.exists - # molecule skip test - tags: molecule-notest - - - name: nginx | copy config templates - ansible.builtin.template: - src: "{{ item.src }}" - dest: "{{ item.dst }}" - owner: "root" - group: "root" - mode: "0644" - notify: reload nginx config - loop: - - { src: "nginx.conf.j2", dst: "/etc/nginx/nginx.conf" } - - { src: "site-default.j2", dst: "/etc/nginx/sites-enabled/default" } - - { src: "override.conf.j2", dst: "/etc/systemd/system/nginx.service.d/override.conf" } - - - name: nginx | flush handlers - ansible.builtin.meta: flush_handlers - - - name: nginx | include custom certs tasks - ansible.builtin.include_tasks: - file: certs.yml - apply: - tags: ['nginx', 'nginx-custom-certs'] - tags: ['nginx-custom-certs'] - - - name: nginx | include letsencrypt tasks - ansible.builtin.include_tasks: - file: letsencrypt.yml - apply: - tags: ['nginx', 'nginx-letsencrypt'] - tags: ['nginx-letsencrypt'] - - - name: nginx | include site tasks - ansible.builtin.include_tasks: - file: sites.yml - apply: - tags: ['nginx', 'nginx-sites'] - tags: ['nginx-sites'] +- name: nginx + tags: [nginx] + block: + - name: nginx | include test tasks + ansible.builtin.include_tasks: + file: tests.yml + apply: + tags: [nginx, nginx-tests] + tags: [nginx-tests] + + - name: nginx | include remove tasks + ansible.builtin.include_tasks: + file: remove.yml + apply: + tags: [nginx, nginx-remove] + when: nginx_remove_enable | bool + tags: [nginx-remove] + + - name: nginx | install packeges + ansible.builtin.apt: + name: "{{ packeges }}" + state: present + update_cache: true + vars: + packeges: + - nginx + - certbot + + - name: nginx | create directories + ansible.builtin.file: + name: "{{ item }}" + state: directory + owner: root + group: root + mode: "0755" + loop: + - /var/www/letsencrypt + - /etc/letsencrypt/renewal-hooks/deploy + - /etc/systemd/system/nginx.service.d + + - name: nginx | copy letsencrypt renewal-hook reload script + ansible.builtin.copy: + src: reload-nginx-config + dest: /etc/letsencrypt/renewal-hooks/deploy + owner: root + group: root + mode: "0744" + + - name: nginx | stat dhparams + ansible.builtin.stat: + path: /etc/nginx/dhparams.pem + register: stat_dhparams + + # The file is generated locally because it takes a LONG time to generate on VMs + - name: nginx | generate dhparams locally + become: false + community.crypto.openssl_dhparam: + path: /tmp/dhparams_{{ inventory_hostname }}.pem + size: "{{ nginx_dhparam_size }}" + delegate_to: localhost + when: not stat_dhparams.stat.exists + # molecule skip test + tags: molecule-notest + + - name: nginx | copy dhparams to node + ansible.builtin.copy: + src: /tmp/dhparams_{{ inventory_hostname }}.pem + dest: /etc/nginx/dhparams.pem + owner: root + group: root + mode: "0600" + notify: reload nginx config + when: not stat_dhparams.stat.exists + # molecule skip test + tags: molecule-notest + + - name: nginx | copy config templates + ansible.builtin.template: + src: "{{ item.src }}" + dest: "{{ item.dst }}" + owner: root + group: root + mode: "0644" + notify: reload nginx config + loop: + - { src: nginx.conf.j2, dst: /etc/nginx/nginx.conf } + - { src: site-default.j2, dst: /etc/nginx/sites-enabled/default } + - { src: override.conf.j2, dst: /etc/systemd/system/nginx.service.d/override.conf } + + - name: nginx | flush handlers + ansible.builtin.meta: flush_handlers + + - name: nginx | include custom certs tasks + ansible.builtin.include_tasks: + file: certs.yml + apply: + tags: [nginx, nginx-custom-certs] + tags: [nginx-custom-certs] + + - name: nginx | include letsencrypt tasks + ansible.builtin.include_tasks: + file: letsencrypt.yml + apply: + tags: [nginx, nginx-letsencrypt] + tags: [nginx-letsencrypt] + + - name: nginx | include site tasks + ansible.builtin.include_tasks: + file: sites.yml + apply: + tags: [nginx, nginx-sites] + tags: [nginx-sites] # to avoid 2 restarts during the first deploy - - name: nginx | flush handlers - ansible.builtin.meta: flush_handlers - - - name: nginx | start nginx - ansible.builtin.systemd: - name: "nginx" - state: started - enabled: yes - daemon_reload: yes - - tags: ['nginx'] + - name: nginx | flush handlers + ansible.builtin.meta: flush_handlers + + - name: nginx | start nginx + ansible.builtin.systemd: + name: nginx + state: started + enabled: true + daemon_reload: true diff --git a/roles/nginx/tasks/remove.yml b/roles/nginx/tasks/remove.yml index 55b8073..b623a60 100644 --- a/roles/nginx/tasks/remove.yml +++ b/roles/nginx/tasks/remove.yml @@ -1,15 +1,14 @@ --- - - name: nginx | remove | stop nginx ansible.builtin.systemd: - name: "nginx" + name: nginx state: stopped - name: nginx | remove | remove packeges ansible.builtin.apt: name: "{{ packeges }}" state: absent - purge: yes + purge: true vars: packeges: - nginx @@ -22,7 +21,7 @@ name: "{{ item }}" state: absent loop: - - "/var/www/letsencrypt" - - "/etc/letsencrypt" - - "/etc/nginx" - - "/etc/systemd/system/nginx.service.d" + - /var/www/letsencrypt + - /etc/letsencrypt + - /etc/nginx + - /etc/systemd/system/nginx.service.d diff --git a/roles/nginx/tasks/sites.yml b/roles/nginx/tasks/sites.yml index 7c1b323..dac64c2 100644 --- a/roles/nginx/tasks/sites.yml +++ b/roles/nginx/tasks/sites.yml @@ -1,15 +1,11 @@ --- - - name: nginx | sites | build the list of site configs 1 ansible.builtin.set_fact: _nginx_sites: [] - name: nginx | sites | build the list of site configs 2 ansible.builtin.set_fact: - _nginx_sites: "{{ _nginx_sites + [ item | combine({'site_name': _nginx_site_name, - 'site_id': _nginx_site_id, - 'params': _nginx_site_params}, - recursive=True) ] }}" + _nginx_sites: "{{ _nginx_sites + [item | combine({'site_name': _nginx_site_name, 'site_id': _nginx_site_id, 'params': _nginx_site_params}, recursive=True)] }}" vars: _nginx_site_name: "{{ (item.template.split('.')[0] + '_' + item.domain) | regex_replace('[^0-9a-zA-Z]+', '_') }}" _nginx_site_id: "{{ (_nginx_site_name | hash('sha1'))[:6] }}" @@ -22,8 +18,8 @@ - name: nginx | sites | find unmanaged site config files ansible.builtin.find: - paths: "/etc/nginx/sites-enabled" - patterns: "^((?!{{ _nginx_sites | map(attribute='site_name') | join('|') }}|default).)*$" + paths: /etc/nginx/sites-enabled + patterns: ^((?!{{ _nginx_sites | map(attribute='site_name') | join('|') }}|default).)*$ use_regex: true register: _nginx_unmanaged_site_config_files @@ -41,9 +37,9 @@ - name: nginx | sites | copy site configs ansible.builtin.template: src: "{{ item.template }}" - dest: "/etc/nginx/sites-enabled/{{ item.site_name }}" - owner: "root" - group: "root" + dest: /etc/nginx/sites-enabled/{{ item.site_name }} + owner: root + group: root mode: "0644" notify: reload nginx config loop: "{{ _nginx_sites }}" diff --git a/roles/nginx/tasks/tests.yml b/roles/nginx/tasks/tests.yml index 9b905a6..3cd5c39 100644 --- a/roles/nginx/tasks/tests.yml +++ b/roles/nginx/tasks/tests.yml @@ -1,13 +1,11 @@ --- - - name: nginx | tests | fail if the site isn't unique 1 ansible.builtin.set_fact: _nginx_revised_sites: [] - name: nginx | tests | fail if the site isn't unique 2 ansible.builtin.set_fact: - _nginx_revised_sites: "{{ _nginx_revised_sites + [ 'template: ' + item.template + - ' domain: ' + item.domain ] }}" + _nginx_revised_sites: "{{ _nginx_revised_sites + ['template: ' + item.template + ' domain: ' + item.domain] }}" loop: "{{ nginx_sites }}" - name: nginx | tests | fail if the site isn't unique 3 @@ -15,17 +13,17 @@ msg: "{{ item }}. A pair of 'template' and 'domain' variables must be unique for each item of the 'nginx_sites' variable " loop: "{{ _nginx_revised_sites | sort }}" loop_control: - extended: yes + extended: true when: not ansible_loop.last and (item == ansible_loop.nextitem) - name: nginx | tests | check the ssl_issuer variable ansible.builtin.fail: - msg: "The 'ssl_issuer' variable must be defined, it can contain only 'manual', 'letsencrypt' values!" + msg: The 'ssl_issuer' variable must be defined, it can contain only 'manual', 'letsencrypt' values! loop: "{{ nginx_sites }}" when: item.ssl_issuer is not defined or item.ssl_issuer not in ['manual', 'letsencrypt'] - name: nginx | tests | check the ssl_manual_cert_file variable ansible.builtin.fail: - msg: "The 'ssl_manual_cert_file' variable must be defined, if 'ssl_issuer' == 'manual'" + msg: The 'ssl_manual_cert_file' variable must be defined, if 'ssl_issuer' == 'manual' loop: "{{ nginx_sites }}" when: item.ssl_issuer == 'manual' and item.ssl_manual_cert_file is not defined diff --git a/roles/nginx/vars/main.yml b/roles/nginx/vars/main.yml index a134bd2..a8fc4b3 100644 --- a/roles/nginx/vars/main.yml +++ b/roles/nginx/vars/main.yml @@ -1,3 +1,3 @@ --- -_nginx_custom_certs_base_path: "/etc/nginx/tls-certs/" +_nginx_custom_certs_base_path: /etc/nginx/tls-certs/ diff --git a/roles/nginx_exporter/.ansible-lint b/roles/nginx_exporter/.ansible-lint new file mode 100644 index 0000000..c6e764b --- /dev/null +++ b/roles/nginx_exporter/.ansible-lint @@ -0,0 +1,7 @@ +--- +skip_list: + - name[casing] + - empty-string-compare # Don't compare to empty string + - experimental # all rules tagged as experimental + - "306" # Ignore not setting pipefail - required for sh shell + - name[template] # Style enforcement diff --git a/roles/nginx_exporter/defaults/main.yml b/roles/nginx_exporter/defaults/main.yml index a694ec0..34beb6e 100644 --- a/roles/nginx_exporter/defaults/main.yml +++ b/roles/nginx_exporter/defaults/main.yml @@ -1,6 +1,6 @@ --- -nginx_exporter_name: "nginx-exporter" -nginx_exporter_user: "www-data" -nginx_exporter_binary: "https://github.com/nginxinc/nginx-prometheus-exporter/releases/download/v0.10.0/nginx-prometheus-exporter_0.10.0_linux_amd64.tar.gz" +nginx_exporter_name: nginx-exporter +nginx_exporter_user: www-data +nginx_exporter_binary: https://github.com/nginxinc/nginx-prometheus-exporter/releases/download/v0.10.0/nginx-prometheus-exporter_0.10.0_linux_amd64.tar.gz nginx_metric_port: 8080 diff --git a/roles/nginx_exporter/handlers/main.yml b/roles/nginx_exporter/handlers/main.yml index 71caeca..25f04e0 100644 --- a/roles/nginx_exporter/handlers/main.yml +++ b/roles/nginx_exporter/handlers/main.yml @@ -1,5 +1,4 @@ --- - - name: Restart nginx-exporter ansible.builtin.systemd: name: "{{ nginx_exporter_name }}" diff --git a/roles/nginx_exporter/tasks/main.yml b/roles/nginx_exporter/tasks/main.yml index 840d6a2..00b69cf 100644 --- a/roles/nginx_exporter/tasks/main.yml +++ b/roles/nginx_exporter/tasks/main.yml @@ -1,38 +1,36 @@ --- - - name: Nginx exporter - tags: ["nginx-exporter"] + tags: [nginx-exporter] block: - - name: Nginx exporter | download exporter ansible.builtin.unarchive: src: "{{ nginx_exporter_binary }}" dest: "{{ _nginx_exporter_file | dirname }}" - remote_src: yes - owner: "root" - group: "root" - mode: 0644 + remote_src: true + owner: root + group: root + mode: "0644" notify: Restart nginx-exporter - name: Nginx exporter | change permissions of binary ansible.builtin.file: path: "{{ _nginx_exporter_file }}" - owner: "root" - group: "root" - mode: 0755 + owner: root + group: root + mode: "0755" state: file notify: Restart nginx-exporter - name: Nginx exporter | copy exporter systemd unit file ansible.builtin.template: - src: ".service.j2" - dest: "/etc/systemd/system/{{ nginx_exporter_name }}.service" - owner: "root" - group: "root" + src: .service.j2 + dest: /etc/systemd/system/{{ nginx_exporter_name }}.service + owner: root + group: root mode: "0600" notify: Restart nginx-exporter - # to avoid 2 restarts during the first deploy + # to avoid 2 restarts during the first deploy - name: Nginx exporter | flush handlers ansible.builtin.meta: flush_handlers diff --git a/roles/nginx_exporter/vars/main.yml b/roles/nginx_exporter/vars/main.yml index 7ea7679..69449b2 100644 --- a/roles/nginx_exporter/vars/main.yml +++ b/roles/nginx_exporter/vars/main.yml @@ -1 +1,2 @@ -_nginx_exporter_file: "/usr/local/bin/nginx-prometheus-exporter" \ No newline at end of file +--- +_nginx_exporter_file: /usr/local/bin/nginx-prometheus-exporter diff --git a/roles/node/.ansible-lint b/roles/node/.ansible-lint index 7552279..a8b43d5 100644 --- a/roles/node/.ansible-lint +++ b/roles/node/.ansible-lint @@ -1,5 +1,8 @@ +--- skip_list: - - empty-string-compare # Don't compare to empty string - - experimental # all rules tagged as experimental - - '306' # Ignore not setting pipefail - required for sh shell + - name[casing] + - empty-string-compare # Don't compare to empty string + - experimental # all rules tagged as experimental + - "306" # Ignore not setting pipefail - required for sh shell - name[template] # Style enforcement + - ignore-errors diff --git a/roles/node/defaults/main.yml b/roles/node/defaults/main.yml index 3db6a39..8f89d3a 100644 --- a/roles/node/defaults/main.yml +++ b/roles/node/defaults/main.yml @@ -7,7 +7,7 @@ # It's used for naming of systemd unit files, binary files, key files etc. node_app_name: "{{ node_chain }}" # A node will be ran with permissions of this user -node_user: "polkadot" +node_user: polkadot # if it's empty, the '(home directory of node_user)/.local/share/polkadot' directory will be used node_data_root_path: "" @@ -52,7 +52,7 @@ node_start_service: true # File name for prometheus node-exporter textfile collector # Example: /var/lib/prometheus/node-exporter/substrate.prom node_prometheus_file_exporter_path: "" -node_prometheus_file_exporter_event: "node_role" +node_prometheus_file_exporter_event: node_role ##################################################################################### # Relaychain ##################################################################################### @@ -61,7 +61,7 @@ node_prometheus_file_exporter_event: "node_role" # It's used for telemetry node_public_name: "{{ inventory_hostname }}" # It's role of node. It can be "validator", "boot", "full", "rpc". -node_role: "full" +node_role: full # You have to specify it in your playbook or inventory! node_chain: "" # Set up name of a chainspec template file from "templates" folder or https url to file. @@ -124,7 +124,7 @@ node_chain_backup_tmp_restore_path: "" ## HTTP backups # link to HTTP backups -node_chain_backup_http_base_url: "https://snapshots.polkadot.io" +node_chain_backup_http_base_url: https://snapshots.polkadot.io # full link to a HTTP backup node_chain_backup_http_url: "" # If you don't have pre-installed rclone the role can install it @@ -138,9 +138,10 @@ node_chain_backup_url: "" ### Loging and telemetry node_telemetry_enable: true # If you set an empty value, it will use the default telemetry server -node_telemetry_url: "wss:/telemetry.polkadot.io/submit/ 1" +node_telemetry_url: wss:/telemetry.polkadot.io/submit/ 1 node_log_trace_enable: false -node_log_trace_config: "babe=trace,imonline=trace,slots=trace,sync=trace,consensus=trace,client=trace,forks=trace,txpool=debug,afg=trace,sub-authority-discovery=debug,sc_offchain=trace,runtime=trace,staking=trace,runtime::election-provider=trace" +node_log_trace_config: + babe=trace,imonline=trace,slots=trace,sync=trace,consensus=trace,client=trace,forks=trace,txpool=debug,afg=trace,sub-authority-discovery=debug,sc_offchain=trace,runtime=trace,staking=trace,runtime::election-provider=trace # custom labels to be added to journald logs. e.g. "chain=kusama team=kusama-statemint" node_syslog_labels: "" @@ -172,7 +173,6 @@ node_parachain_wasm_runtime: "" ### Experimental Feature # Remote Relay Chain via RPC node_parachain_relay_chain_rpc_urls: [] - ### Keys ## p2p key # If it's empty, the node will generate default key file @@ -233,9 +233,10 @@ node_parachain_chain_backup_url: "" ### Loging and telemetry node_parachain_telemetry_enable: true # If you set an empty value, it will use the default telemetry server -node_parachain_telemetry_url: "wss://telemetry.polkadot.io/submit/ 1" +node_parachain_telemetry_url: wss://telemetry.polkadot.io/submit/ 1 node_parachain_log_trace_enable: false -node_parachain_log_trace_config: "babe=trace,imonline=trace,slots=trace,sync=trace,consensus=trace,client=trace,forks=trace,txpool=debug,afg=trace,sub-authority-discovery=debug,sc_offchain=trace,runtime=trace,staking=trace,runtime::election-provider=trace" +node_parachain_log_trace_config: + babe=trace,imonline=trace,slots=trace,sync=trace,consensus=trace,client=trace,forks=trace,txpool=debug,afg=trace,sub-authority-discovery=debug,sc_offchain=trace,runtime=trace,staking=trace,runtime::election-provider=trace ### Role flow # Group of variables to manage the flow of the role @@ -246,10 +247,10 @@ node_parachain_database_wipe: false ##################################################################################### node_memory_profiler_enable: false -node_memory_profiler_binary: "https://github.com/koute/bytehound/releases/download/0.11.0/bytehound-x86_64-unknown-linux-gnu.tgz" +node_memory_profiler_binary: https://github.com/koute/bytehound/releases/download/0.11.0/bytehound-x86_64-unknown-linux-gnu.tgz # if it's empty, the '(home directory of node_user)/logs' directory will be used node_memory_profiler_log_path: "" -node_memory_profiler_log_level: "info" +node_memory_profiler_log_level: info # This value (in milliseconds) decides which allocations are considered temporary. # diff --git a/roles/node/molecule/default/converge.yml b/roles/node/molecule/default/converge.yml index 9671f96..6f8d892 100644 --- a/roles/node/molecule/default/converge.yml +++ b/roles/node/molecule/default/converge.yml @@ -3,6 +3,6 @@ hosts: all gather_facts: true tasks: - - name: "Include node" + - name: Include node ansible.builtin.include_role: - name: "node" + name: node diff --git a/roles/node/molecule/default/group_vars/all.yml b/roles/node/molecule/default/group_vars/all.yml index a71d409..3b012bf 100644 --- a/roles/node/molecule/default/group_vars/all.yml +++ b/roles/node/molecule/default/group_vars/all.yml @@ -1,3 +1,4 @@ +--- ## Molecule ansible_user: root @@ -11,13 +12,13 @@ node_binary: https://github.com/paritytech/polkadot-sdk/releases/download/polkad node_binary_signature: https://github.com/paritytech/polkadot-sdk/releases/download/polkadot-{{ node_binary_version }}/polkadot.asc node_pruning: 256 ode_paritydb_enable: true -node_chain_backup_restoring_type: "none" -node_parachain_chain_backup_restoring_type: "none" +node_chain_backup_restoring_type: none +node_parachain_chain_backup_restoring_type: none # This private key is only for modulecule tests # Note: don't modify this key either, because the last character (which is invisible here) is special # and without it, subkey won't be able to work with it node_p2p_private_key: a4964e8e979c29fcdd79403db8c374cae91857e69a13162f7664a6529bd66093 -node_prometheus_file_exporter_path: "/tmp/substrate.prom" -node_data_root_path: "/opt/polkadot-root" -node_memory_profiler_log_path: "/opt/polkadot-root-logs" -node_enable_public_ip_detection: false \ No newline at end of file +node_prometheus_file_exporter_path: /tmp/substrate.prom +node_data_root_path: /opt/polkadot-root +node_memory_profiler_log_path: /opt/polkadot-root-logs +node_enable_public_ip_detection: false diff --git a/roles/node/molecule/default/molecule.yml b/roles/node/molecule/default/molecule.yml index 579f2f0..305a0ca 100644 --- a/roles/node/molecule/default/molecule.yml +++ b/roles/node/molecule/default/molecule.yml @@ -9,7 +9,7 @@ platforms: source: alias: debian/bullseye/amd64 # DOCKER - image: "paritytech/debian11:latest" + image: paritytech/debian11:latest command: ${MOLECULE_DOCKER_COMMAND:-""} # need this for systemctl to work in Docker privileged: true @@ -19,11 +19,11 @@ platforms: provisioner: name: ansible options: - D: True + D: true config_options: defaults: callbacks_enabled: timer verifier: name: ansible options: - D: True + D: true diff --git a/roles/node/molecule/default/prepare.yml b/roles/node/molecule/default/prepare.yml index 0ded797..51d4632 100644 --- a/roles/node/molecule/default/prepare.yml +++ b/roles/node/molecule/default/prepare.yml @@ -1,3 +1,4 @@ +--- - name: Prepare hosts: all gather_facts: false @@ -9,5 +10,5 @@ ansible.builtin.apt: name: - gpg - update_cache: no + update_cache: false changed_when: false diff --git a/roles/node/molecule/default/verify.yml b/roles/node/molecule/default/verify.yml index a6a670c..9c8ff57 100644 --- a/roles/node/molecule/default/verify.yml +++ b/roles/node/molecule/default/verify.yml @@ -3,102 +3,99 @@ hosts: all gather_facts: false tasks: - - name: Collect service facts - ansible.builtin.service_facts: - - - name: Print service facts - ansible.builtin.debug: - var: ansible_facts.services['polkadot.service'] - - - name: check service - ansible.builtin.assert: - that: ansible_facts.services['polkadot.service'].state == 'running' - - - name: Get system_health - ansible.builtin.uri: - url: "http://127.0.0.1:{{ node_rpc_port }}" - method: POST - body: - {"id":1, "jsonrpc":"2.0", "method":"system_health", "params":[]} - body_format: json - headers: - Content-Type: 'application/json' - use_proxy: false - until: _system_health_result.status is defined and _system_health_result.status == 200 - retries: 3 - delay: 10 - register: _system_health_result - - - name: Print system_health - ansible.builtin.debug: - msg: "{{ _system_health_result.json }}" - - - name: Re-deploy node with additional parameters - ansible.builtin.include_role: - name: "node" - vars: - node_database_wipe: true - node_parachain_database_wipe: true - node_start_service: false - - - name: Collect service facts 1 - ansible.builtin.service_facts: - - - name: Print service facts 1 - ansible.builtin.debug: - var: ansible_facts.services['polkadot.service'] - - - name: Check service 1 - ansible.builtin.assert: - that: ansible_facts.services['polkadot.service'].state == 'stopped' - - - name: Start {{ node_app_name }} service - ansible.builtin.systemd: - name: "{{ node_app_name }}" - state: "started" - - - name: Collect service facts 2 - ansible.builtin.service_facts: - - - name: Print service facts 2 - ansible.builtin.debug: - var: ansible_facts.services['polkadot.service'] - - - name: Check service 2 - ansible.builtin.assert: - that: ansible_facts.services['polkadot.service'].state == 'running' - - - name: Get system_health - ansible.builtin.uri: - url: "http://127.0.0.1:{{ node_rpc_port }}" - method: POST - body: - {"id":1, "jsonrpc":"2.0", "method":"system_health", "params":[]} - body_format: json - headers: - Content-Type: 'application/json' - use_proxy: false - until: _system_health_result.status is defined and _system_health_result.status == 200 - retries: 3 - delay: 10 - register: _system_health_result - - - name: Print system_health - ansible.builtin.debug: - msg: "{{ _system_health_result.json }}" - - - name: Get system_syncState - ansible.builtin.uri: - url: "http://127.0.0.1:{{ node_rpc_port }}" - method: POST - body: - {"id":1, "jsonrpc":"2.0", "method":"system_syncState", "params":[]} - body_format: json - headers: - Content-Type: 'application/json' - use_proxy: false - register: _system_syncstate_result - - - name: Print system_syncState - ansible.builtin.debug: - msg: "{{ _system_syncstate_result.json }}" + - name: Collect service facts + ansible.builtin.service_facts: + + - name: Print service facts + ansible.builtin.debug: + var: ansible_facts.services['polkadot.service'] + + - name: check service + ansible.builtin.assert: + that: ansible_facts.services['polkadot.service'].state == 'running' + + - name: Get system_health + ansible.builtin.uri: + url: http://127.0.0.1:{{ node_rpc_port }} + method: POST + body: { id: 1, jsonrpc: "2.0", method: system_health, params: [] } + body_format: json + headers: + Content-Type: application/json + use_proxy: false + until: _system_health_result.status is defined and _system_health_result.status == 200 + retries: 3 + delay: 10 + register: _system_health_result + + - name: Print system_health + ansible.builtin.debug: + msg: "{{ _system_health_result.json }}" + + - name: Re-deploy node with additional parameters + ansible.builtin.include_role: + name: node + vars: + node_database_wipe: true + node_parachain_database_wipe: true + node_start_service: false + + - name: Collect service facts 1 + ansible.builtin.service_facts: + + - name: Print service facts 1 + ansible.builtin.debug: + var: ansible_facts.services['polkadot.service'] + + - name: Check service 1 + ansible.builtin.assert: + that: ansible_facts.services['polkadot.service'].state == 'stopped' + + - name: Start {{ node_app_name }} service + ansible.builtin.systemd: + name: "{{ node_app_name }}" + state: started + + - name: Collect service facts 2 + ansible.builtin.service_facts: + + - name: Print service facts 2 + ansible.builtin.debug: + var: ansible_facts.services['polkadot.service'] + + - name: Check service 2 + ansible.builtin.assert: + that: ansible_facts.services['polkadot.service'].state == 'running' + + - name: Get system_health + ansible.builtin.uri: + url: http://127.0.0.1:{{ node_rpc_port }} + method: POST + body: { id: 1, jsonrpc: "2.0", method: system_health, params: [] } + body_format: json + headers: + Content-Type: application/json + use_proxy: false + until: _system_health_result.status is defined and _system_health_result.status == 200 + retries: 3 + delay: 10 + register: _system_health_result + + - name: Print system_health + ansible.builtin.debug: + msg: "{{ _system_health_result.json }}" + + - name: Get system_syncState + ansible.builtin.uri: + url: http://127.0.0.1:{{ node_rpc_port }} + method: POST + body: { id: 1, jsonrpc: "2.0", method: system_syncState, params: [] } + body_format: json + headers: + Content-Type: application/json + use_proxy: false + register: _system_syncstate_result + + - name: Print system_syncState + ansible.builtin.debug: + msg: "{{ _system_syncstate_result.json }}" diff --git a/roles/node/molecule/parachain/converge.yml b/roles/node/molecule/parachain/converge.yml index 9671f96..6f8d892 100644 --- a/roles/node/molecule/parachain/converge.yml +++ b/roles/node/molecule/parachain/converge.yml @@ -3,6 +3,6 @@ hosts: all gather_facts: true tasks: - - name: "Include node" + - name: Include node ansible.builtin.include_role: - name: "node" + name: node diff --git a/roles/node/molecule/parachain/group_vars/all.yml b/roles/node/molecule/parachain/group_vars/all.yml index 4139fb5..10262e5 100644 --- a/roles/node/molecule/parachain/group_vars/all.yml +++ b/roles/node/molecule/parachain/group_vars/all.yml @@ -1,3 +1,4 @@ +--- ## Molecule ansible_user: root @@ -8,16 +9,16 @@ node_rpc_port: 9944 node_parachain_rpc_port: 9954 node_binary: https://github.com/paritytech/cumulus/releases/download/{{ node_binary_version }}/polkadot-parachain node_binary_signature: https://github.com/paritytech/cumulus/releases/download/{{ node_binary_version }}/polkadot-parachain.asc -node_app_name: "shell" -node_prometheus_file_exporter_path: "/tmp/substrate.prom" +node_app_name: shell +node_prometheus_file_exporter_path: /tmp/substrate.prom node_enable_public_ip_detection: false # Relaychain -node_chain: "rococo" -node_chainspec: "https://paritytech.github.io/chainspecs/rococo/relaychain/chainspec.json" -node_chain_backup_restoring_type: "none" +node_chain: rococo +node_chainspec: https://paritytech.github.io/chainspecs/rococo/relaychain/chainspec.json +node_chain_backup_restoring_type: none # Parachain -node_parachain_chain: "shell" -node_parachain_chain_backup_restoring_type: "none" -node_parachain_role: "collator" +node_parachain_chain: shell +node_parachain_chain_backup_restoring_type: none +node_parachain_role: collator diff --git a/roles/node/molecule/parachain/molecule.yml b/roles/node/molecule/parachain/molecule.yml index 9264c15..b63f72a 100644 --- a/roles/node/molecule/parachain/molecule.yml +++ b/roles/node/molecule/parachain/molecule.yml @@ -9,7 +9,7 @@ platforms: source: alias: debian/bullseye/amd64 # DOCKER - image: "paritytech/debian11:latest" + image: paritytech/debian11:latest command: ${MOLECULE_DOCKER_COMMAND:-""} # need this for systemctl to work in Docker privileged: true @@ -19,11 +19,11 @@ platforms: provisioner: name: ansible options: - D: True + D: true config_options: defaults: callbacks_enabled: timer verifier: name: ansible options: - D: True + D: true diff --git a/roles/node/molecule/parachain/prepare.yml b/roles/node/molecule/parachain/prepare.yml index 02d0dc1..3448375 100644 --- a/roles/node/molecule/parachain/prepare.yml +++ b/roles/node/molecule/parachain/prepare.yml @@ -1,3 +1,4 @@ +--- - name: Prepare hosts: all gather_facts: false @@ -9,5 +10,5 @@ ansible.builtin.apt: name: - gpg - update_cache: no + update_cache: false changed_when: false diff --git a/roles/node/molecule/parachain/verify.yml b/roles/node/molecule/parachain/verify.yml index 2a360f4..b772630 100644 --- a/roles/node/molecule/parachain/verify.yml +++ b/roles/node/molecule/parachain/verify.yml @@ -3,100 +3,96 @@ hosts: all gather_facts: false tasks: - - name: re-deploy node with wipe - ansible.builtin.include_role: - name: "node" - vars: - node_database_wipe: true - node_parachain_database_wipe: true + - name: re-deploy node with wipe + ansible.builtin.include_role: + name: node + vars: + node_database_wipe: true + node_parachain_database_wipe: true - - name: Collect service facts - ansible.builtin.service_facts: + - name: Collect service facts + ansible.builtin.service_facts: - - name: Print service facts - ansible.builtin.debug: - var: ansible_facts.services['shell.service'] + - name: Print service facts + ansible.builtin.debug: + var: ansible_facts.services['shell.service'] - - name: check service - ansible.builtin.assert: - that: ansible_facts.services['shell.service'].state == 'running' + - name: check service + ansible.builtin.assert: + that: ansible_facts.services['shell.service'].state == 'running' - - name: Get relaychain system_health - ansible.builtin.uri: - url: "http://127.0.0.1:{{ node_rpc_port }}" - method: POST - body: - {"id":1, "jsonrpc":"2.0", "method":"system_health", "params":[]} - body_format: json - headers: - Content-Type: 'application/json' - use_proxy: false - until: _relaychain_system_health_result.status is defined and _relaychain_system_health_result.status == 200 - retries: 3 - delay: 10 - register: _relaychain_system_health_result + - name: Get relaychain system_health + ansible.builtin.uri: + url: http://127.0.0.1:{{ node_rpc_port }} + method: POST + body: { id: 1, jsonrpc: "2.0", method: system_health, params: [] } + body_format: json + headers: + Content-Type: application/json + use_proxy: false + until: _relaychain_system_health_result.status is defined and _relaychain_system_health_result.status == 200 + retries: 3 + delay: 10 + register: _relaychain_system_health_result - - name: Get parachain system_health - ansible.builtin.uri: - url: "http://127.0.0.1:{{ node_parachain_rpc_port }}" - method: POST - body: - {"id":1, "jsonrpc":"2.0", "method":"system_health", "params":[]} - body_format: json - headers: - Content-Type: 'application/json' - use_proxy: false - until: _parachain_system_health_result.status is defined and _parachain_system_health_result.status == 200 - retries: 3 - delay: 10 - register: _parachain_system_health_result + - name: Get parachain system_health + ansible.builtin.uri: + url: http://127.0.0.1:{{ node_parachain_rpc_port }} + method: POST + body: { id: 1, jsonrpc: "2.0", method: system_health, params: [] } + body_format: json + headers: + Content-Type: application/json + use_proxy: false + until: _parachain_system_health_result.status is defined and _parachain_system_health_result.status == 200 + retries: 3 + delay: 10 + register: _parachain_system_health_result - - name: Print system_health - ansible.builtin.debug: - msg: - - "Relaychain: {{ _relaychain_system_health_result.json }}" - - "Parachain: {{ _parachain_system_health_result.json }}" + - name: Print system_health + ansible.builtin.debug: + msg: + - "Relaychain: {{ _relaychain_system_health_result.json }}" + - "Parachain: {{ _parachain_system_health_result.json }}" - - name: relay chain syncing - ansible.builtin.assert: - that: _relaychain_system_health_result.json['result']['isSyncing'] + - name: relay chain syncing + ansible.builtin.assert: + that: _relaychain_system_health_result.json['result']['isSyncing'] - - name: parachain is not syncing (it is not onboarded) - ansible.builtin.assert: - that: not _parachain_system_health_result.json['result']['isSyncing'] + - name: parachain is not syncing (it is not onboarded) + ansible.builtin.assert: + that: not _parachain_system_health_result.json['result']['isSyncing'] - - name: Get relaychain system_syncState - ansible.builtin.uri: - url: "http://127.0.0.1:{{ node_rpc_port }}" - method: POST - body: - {"id":1, "jsonrpc":"2.0", "method":"system_syncState", "params":[]} - body_format: json - headers: - Content-Type: 'application/json' - use_proxy: false - until: _relaychain_system_syncstate_result.status is defined and _relaychain_system_syncstate_result.status == 200 - retries: 3 - delay: 10 - register: _relaychain_system_syncstate_result + - name: Get relaychain system_syncState + ansible.builtin.uri: + url: http://127.0.0.1:{{ node_rpc_port }} + method: POST + body: { id: 1, jsonrpc: "2.0", method: system_syncState, params: [] } + body_format: json + headers: + Content-Type: application/json + use_proxy: false + until: _relaychain_system_syncstate_result.status is defined and _relaychain_system_syncstate_result.status == 200 + retries: 3 + delay: 10 + register: _relaychain_system_syncstate_result - - name: Get parachain system_syncState - ansible.builtin.uri: - url: "http://127.0.0.1:{{ node_parachain_rpc_port }}" - method: POST - body: - {"id":1, "jsonrpc":"2.0", "method":"system_syncState", "params":[]} - body_format: json - headers: - Content-Type: 'application/json' - use_proxy: false - until: _parachain_system_syncstate_result.status is defined and _parachain_system_syncstate_result.status == 200 - retries: 3 - delay: 10 - register: _parachain_system_syncstate_result + - name: Get parachain system_syncState + ansible.builtin.uri: + url: http://127.0.0.1:{{ node_parachain_rpc_port }} + method: POST + body: { id: 1, jsonrpc: "2.0", method: system_syncState, params: [] } + body_format: json + headers: + Content-Type: application/json + use_proxy: false + until: _parachain_system_syncstate_result.status is defined and _parachain_system_syncstate_result.status == 200 + retries: 3 + delay: 10 + register: _parachain_system_syncstate_result - - name: Print system_syncState - ansible.builtin.debug: - msg: - - "Relaychain: {{ _relaychain_system_syncstate_result.json }}" - - "Parachain: {{ _parachain_system_syncstate_result.json }}" \ No newline at end of file + - name: Print system_syncState + ansible.builtin.debug: + msg: + - "Relaychain: {{ _relaychain_system_syncstate_result.json }}" + - "Parachain: {{ _parachain_system_syncstate_result.json }}" diff --git a/roles/node/molecule/parachain_remote_rc/converge.yml b/roles/node/molecule/parachain_remote_rc/converge.yml index adf7050..cf64bb8 100644 --- a/roles/node/molecule/parachain_remote_rc/converge.yml +++ b/roles/node/molecule/parachain_remote_rc/converge.yml @@ -3,13 +3,13 @@ hosts: all gather_facts: true tasks: - - name: "parachain" + - name: parachain ansible.builtin.include_role: - name: "node" + name: node vars: - node_app_name: "parachain-shell" - node_parachain_chain: "shell" - node_parachain_role: "collator" - node_parachain_chain_backup_restoring_type: "none" - node_parachain_relay_chain_rpc_urls: ["ws://127.0.0.1:9944"] - node_prometheus_file_exporter_path: "/tmp/substrate-pc.prom" + node_app_name: parachain-shell + node_parachain_chain: shell + node_parachain_role: collator + node_parachain_chain_backup_restoring_type: none + node_parachain_relay_chain_rpc_urls: [ws://127.0.0.1:9944] + node_prometheus_file_exporter_path: /tmp/substrate-pc.prom diff --git a/roles/node/molecule/parachain_remote_rc/group_vars/all.yml b/roles/node/molecule/parachain_remote_rc/group_vars/all.yml index 7d90ed3..e66d3f9 100644 --- a/roles/node/molecule/parachain_remote_rc/group_vars/all.yml +++ b/roles/node/molecule/parachain_remote_rc/group_vars/all.yml @@ -1,11 +1,12 @@ +--- ## Molecule ansible_user: root # Common -node_app_name: "dummy" +node_app_name: dummy node_binary_version: v0.9.430 node_legacy_rpc_flags: false node_parachain_rpc_port: 9954 node_binary: https://github.com/paritytech/cumulus/releases/download/{{ node_binary_version }}/polkadot-parachain node_binary_signature: https://github.com/paritytech/cumulus/releases/download/{{ node_binary_version }}/polkadot-parachain.asc -node_enable_public_ip_detection: false \ No newline at end of file +node_enable_public_ip_detection: false diff --git a/roles/node/molecule/parachain_remote_rc/molecule.yml b/roles/node/molecule/parachain_remote_rc/molecule.yml index 8847240..04eecfd 100644 --- a/roles/node/molecule/parachain_remote_rc/molecule.yml +++ b/roles/node/molecule/parachain_remote_rc/molecule.yml @@ -8,18 +8,18 @@ platforms: source: alias: debian/bullseye/amd64 # DOCKER - image: "paritytech/debian11:latest" + image: paritytech/debian11:latest command: ${MOLECULE_DOCKER_COMMAND:-""} privileged: true pre_build_image: true provisioner: name: ansible options: - D: True + D: true config_options: defaults: callbacks_enabled: timer verifier: name: ansible options: - D: True + D: true diff --git a/roles/node/molecule/parachain_remote_rc/prepare.yml b/roles/node/molecule/parachain_remote_rc/prepare.yml index ca8937d..5401f06 100644 --- a/roles/node/molecule/parachain_remote_rc/prepare.yml +++ b/roles/node/molecule/parachain_remote_rc/prepare.yml @@ -1,3 +1,4 @@ +--- - name: Prepare hosts: all gather_facts: false @@ -9,21 +10,21 @@ ansible.builtin.apt: name: - gpg - update_cache: no + update_cache: false changed_when: false tasks: - - name: "relaychain" + - name: relaychain ansible.builtin.include_role: - name: "node" + name: node vars: - node_app_name: "relaychain-shell" - node_data_root_path: "/opt/{{ node_app_name }}" - node_chain: "rococo-dev" - node_custom_options: [ '--alice' ] + node_app_name: relaychain-shell + node_data_root_path: /opt/{{ node_app_name }} + node_chain: rococo-dev + node_custom_options: [--alice] node_binary_version: v0.9.43 node_legacy_rpc_flags: false node_binary: https://github.com/paritytech/polkadot/releases/download/{{ node_binary_version }}/polkadot node_binary_signature: https://github.com/paritytech/polkadot/releases/download/{{ node_binary_version }}/polkadot.asc node_rpc_port: 9944 - node_chain_backup_restoring_type: "none" - node_prometheus_file_exporter_path: "/tmp/substrate-rc.prom" + node_chain_backup_restoring_type: none + node_prometheus_file_exporter_path: /tmp/substrate-rc.prom diff --git a/roles/node/molecule/parachain_remote_rc/verify.yml b/roles/node/molecule/parachain_remote_rc/verify.yml index a9cfb8d..2d83aa5 100644 --- a/roles/node/molecule/parachain_remote_rc/verify.yml +++ b/roles/node/molecule/parachain_remote_rc/verify.yml @@ -3,32 +3,31 @@ hosts: all gather_facts: false tasks: - - name: Collect service facts - ansible.builtin.service_facts: + - name: Collect service facts + ansible.builtin.service_facts: - - name: Print service facts - ansible.builtin.debug: - var: ansible_facts.services['parachain-shell.service'] + - name: Print service facts + ansible.builtin.debug: + var: ansible_facts.services['parachain-shell.service'] - - name: check service - ansible.builtin.assert: - that: ansible_facts.services['parachain-shell.service'].state == 'running' + - name: check service + ansible.builtin.assert: + that: ansible_facts.services['parachain-shell.service'].state == 'running' - - name: Get parachain system_health - ansible.builtin.uri: - url: "http://127.0.0.1:{{ node_parachain_rpc_port }}" - method: POST - body: - {"id":1, "jsonrpc":"2.0", "method":"system_health", "params":[]} - body_format: json - headers: - Content-Type: 'application/json' - use_proxy: false - until: _parachain_system_health_result.status is defined and _parachain_system_health_result.status == 200 - retries: 3 - delay: 10 - register: _parachain_system_health_result + - name: Get parachain system_health + ansible.builtin.uri: + url: http://127.0.0.1:{{ node_parachain_rpc_port }} + method: POST + body: { id: 1, jsonrpc: "2.0", method: system_health, params: [] } + body_format: json + headers: + Content-Type: application/json + use_proxy: false + until: _parachain_system_health_result.status is defined and _parachain_system_health_result.status == 200 + retries: 3 + delay: 10 + register: _parachain_system_health_result - - name: Print system_health - ansible.builtin.debug: - msg: "Parachain: {{ _parachain_system_health_result.json }}" + - name: Print system_health + ansible.builtin.debug: + msg: "Parachain: {{ _parachain_system_health_result.json }}" diff --git a/roles/node/tasks/001-health-check.yml b/roles/node/tasks/001-health-check.yml index 0f8a340..badc0ed 100644 --- a/roles/node/tasks/001-health-check.yml +++ b/roles/node/tasks/001-health-check.yml @@ -1,12 +1,11 @@ --- - - name: Health check | Collect service facts ansible.builtin.service_facts: - tags: ['node-restore-chain'] + tags: [node-restore-chain] - name: Health check | Fail is service is not running ansible.builtin.fail: - msg: "Service {{ node_app_name }} is not running" + msg: Service {{ node_app_name }} is not running when: - ansible_facts.services[node_app_name+'.service'] is defined - ansible_facts.services[node_app_name+'.service'].state != 'running' @@ -17,55 +16,52 @@ var: ansible_facts.services[node_app_name+'.service'].state - name: Health Check | Block + when: ansible_facts.services[node_app_name+'.service'] is defined and ansible_facts.services[node_app_name+'.service'].state not in ['stopped', 'inactive'] block: + - name: Health check | Run health check + ansible.builtin.uri: + url: http://127.0.0.1:{{ (node_parachain_relay_chain_rpc_urls | length != 0) | ternary(node_parachain_rpc_port, node_rpc_port) }} + method: POST + body_format: json + body: + id: 1 + jsonrpc: "2.0" + method: system_health + params: [] + return_content: true + use_proxy: false + register: _node_health_check_register + until: _node_health_check_register.status is defined and _node_health_check_register.status == 200 + retries: 12 + delay: 10 + check_mode: false + changed_when: false - - name: Health check | Run health check - ansible.builtin.uri: - url: "http://127.0.0.1:{{ ( node_parachain_relay_chain_rpc_urls | length != 0 ) | ternary(node_parachain_rpc_port, node_rpc_port) }}" - method: "POST" - body_format: "json" - body: - id: 1 - jsonrpc: "2.0" - method: "system_health" - params: [] - return_content: yes - use_proxy: false - register: _node_health_check_register - until: _node_health_check_register.status is defined and _node_health_check_register.status == 200 - retries: 12 - delay: 10 - check_mode: false - changed_when: false - - - name: Health check | Print health check result - ansible.builtin.debug: - msg: | - {{ node_app_name }} is healthy - peers: {{ _node_health_check_register.json.result.peers }} - - - name: Health check | Check the current version using API - ansible.builtin.uri: - url: "http://127.0.0.1:{{ ( node_parachain_relay_chain_rpc_urls | length != 0 ) | ternary(node_parachain_rpc_port, node_rpc_port) }}" - method: "POST" - body_format: "json" - body: - id: 1 - jsonrpc: "2.0" - method: "system_version" - params: [ ] - return_content: yes - use_proxy: false - register: _node_version_check_register - until: _node_version_check_register.status is defined and _node_version_check_register.status == 200 - retries: 2 - delay: 10 - check_mode: false - changed_when: false + - name: Health check | Print health check result + ansible.builtin.debug: + msg: | + {{ node_app_name }} is healthy + peers: {{ _node_health_check_register.json.result.peers }} - - name: Health check | Print the current version according to API - ansible.builtin.debug: - msg: "The current version is {{ _node_version_check_register.json.result }}" + - name: Health check | Check the current version using API + ansible.builtin.uri: + url: http://127.0.0.1:{{ (node_parachain_relay_chain_rpc_urls | length != 0) | ternary(node_parachain_rpc_port, node_rpc_port) }} + method: POST + body_format: json + body: + id: 1 + jsonrpc: "2.0" + method: system_version + params: [] + return_content: true + use_proxy: false + register: _node_version_check_register + until: _node_version_check_register.status is defined and _node_version_check_register.status == 200 + retries: 2 + delay: 10 + check_mode: false + changed_when: false - when: ansible_facts.services[node_app_name+'.service'] is defined and - ansible_facts.services[node_app_name+'.service'].state not in ['stopped', 'inactive'] + - name: Health check | Print the current version according to API + ansible.builtin.debug: + msg: The current version is {{ _node_version_check_register.json.result }} diff --git a/roles/node/tasks/002-restart.yml b/roles/node/tasks/002-restart.yml index c3ac1e1..92347c7 100644 --- a/roles/node/tasks/002-restart.yml +++ b/roles/node/tasks/002-restart.yml @@ -1,10 +1,9 @@ --- - - name: Restart | Restart service ansible.builtin.systemd: name: "{{ node_app_name }}" state: restarted - enabled: yes - daemon_reload: yes + enabled: true + daemon_reload: true notify: health check {{ node_handler_id }} ignore_errors: "{{ not _node_systemd_unit_file_stat.stat.exists }}" diff --git a/roles/node/tasks/100-tests.yml b/roles/node/tasks/100-tests.yml index 7413f81..c640232 100644 --- a/roles/node/tasks/100-tests.yml +++ b/roles/node/tasks/100-tests.yml @@ -1,25 +1,24 @@ --- - - name: Test | Check node_binary ansible.builtin.fail: - msg: "The 'node_binary' variable can't be empty!" + msg: The 'node_binary' variable can't be empty! when: node_binary == '' - name: Test | Check if node_binary_signature is a URL ansible.builtin.fail: - msg: "The 'node_binary_signature' variable must be a URL!" + msg: The 'node_binary_signature' variable must be a URL! when: - node_binary_signature != '' - not node_binary_signature.startswith('http') - name: Test | Check node_app_name ansible.builtin.fail: - msg: "The 'node_app_name' variable can't be empty!" + msg: The 'node_app_name' variable can't be empty! when: node_app_name == '' - name: Test | Check node_chain ansible.builtin.fail: - msg: "The 'node_chain' variable can't be empty!" + msg: The 'node_chain' variable can't be empty! when: - node_chain == '' - node_chainspec == '' @@ -27,47 +26,47 @@ - name: Test | Check node_parachain_chain ansible.builtin.fail: - msg: "The 'node_parachain_chain' variable can't be empty!" + msg: The 'node_parachain_chain' variable can't be empty! when: node_parachain_role != '' and node_parachain_chain == '' and node_parachain_chainspec == '' - name: Test | Check node_role ansible.builtin.fail: - msg: "The 'node_role' variable can contain only 'validator', 'boot', 'full' or 'rpc' values!" + msg: The 'node_role' variable can contain only 'validator', 'boot', 'full' or 'rpc' values! when: node_role == '' or node_role not in ["validator", "boot", "rpc", "full"] - name: Test | Check node_parachain_role ansible.builtin.fail: - msg: "The 'node_parachain_role' variable can contain only 'collator', 'validator' 'rpc' or 'full' values!" + msg: The 'node_parachain_role' variable can contain only 'collator', 'validator' 'rpc' or 'full' values! when: node_parachain_role != '' and node_parachain_role not in ["collator", "rpc", "full", "validator"] - name: Test | Check correctness of role variables ansible.builtin.fail: - msg: "You use the wrong combination of 'node_role' and 'node_parachain_role' variables!" + msg: You use the wrong combination of 'node_role' and 'node_parachain_role' variables! when: node_role == 'validator' and node_parachain_role == 'collator' - name: Test | Check node_app_name ansible.builtin.fail: - msg: "The 'node_app_name' variable can contain only '0-9a-zA-Z_-' symbols!" + msg: The 'node_app_name' variable can contain only '0-9a-zA-Z_-' symbols! when: node_app_name is regex('[^0-9a-zA-Z_-]+') - name: Test | Check node_chain_backup_restoring_type ansible.builtin.fail: - msg: "The 'node_chain_backup_restoring_type' variable can contain only 'http', 'tar' or 'none' values!" + msg: The 'node_chain_backup_restoring_type' variable can contain only 'http', 'tar' or 'none' values! when: node_chain_backup_restoring_type not in ["http", "tar", "none"] - name: Test | Check node_parachain_chain_backup_restoring_type ansible.builtin.fail: - msg: "The 'node_parachain_chain_backup_restoring_type' variable can contain only 'http', 'tar' or 'none' values!" + msg: The 'node_parachain_chain_backup_restoring_type' variable can contain only 'http', 'tar' or 'none' values! when: node_parachain_chain_backup_restoring_type not in ["http", "tar", "none"] - name: Test | Check node_chain_backup_url ansible.builtin.fail: - msg: "If you use tar backups, you have to set the node_chain_backup_url variable" + msg: If you use tar backups, you have to set the node_chain_backup_url variable when: node_chain_backup_restoring_type == 'tar' and node_chain_backup_url == '' - name: Test | Check node_parachain_chain_backup_url ansible.builtin.fail: - msg: "If you use tar backups, you have to set the node_parachain_chain_backup_url variable" + msg: If you use tar backups, you have to set the node_parachain_chain_backup_url variable when: node_parachain_chain_backup_restoring_type == 'tar' and node_parachain_chain_backup_url == '' - name: Test | Check number of executions in play diff --git a/roles/node/tasks/1000-post-tasks.yml b/roles/node/tasks/1000-post-tasks.yml index 52e0142..2f1aea6 100644 --- a/roles/node/tasks/1000-post-tasks.yml +++ b/roles/node/tasks/1000-post-tasks.yml @@ -1,5 +1,4 @@ --- - - name: Post tasks | Remove temporary directory ansible.builtin.file: path: "{{ _node_temp_dir.path }}" diff --git a/roles/node/tasks/200-prepare.yml b/roles/node/tasks/200-prepare.yml index dc65467..3f52386 100644 --- a/roles/node/tasks/200-prepare.yml +++ b/roles/node/tasks/200-prepare.yml @@ -1,5 +1,4 @@ --- - - name: Prepare | Create user ansible.builtin.user: name: "{{ node_user }}" @@ -17,7 +16,7 @@ - name: Prepare | Check '_node_user_home_path' variable 2 ansible.builtin.fail: - msg: "The user home dir {{ _node_user_home_path }} must be created!" + msg: The user home dir {{ _node_user_home_path }} must be created! when: _node_user_home_path_stat.stat.isdir is not defined or not _node_user_home_path_stat.stat.isdir ignore_errors: "{{ not _node_systemd_unit_file_stat.stat.exists }}" diff --git a/roles/node/tasks/300-wipe.yml b/roles/node/tasks/300-wipe.yml index 4978f58..441d8dd 100644 --- a/roles/node/tasks/300-wipe.yml +++ b/roles/node/tasks/300-wipe.yml @@ -1,5 +1,4 @@ --- - - name: Wipe | Stop service ansible.builtin.systemd: name: "{{ node_app_name }}" @@ -13,12 +12,12 @@ state: absent vars: # remove whole `chains` folder - _node_chain_id: '' + _node_chain_id: "" notify: restart service {{ node_handler_id }} retries: 5 register: _node_wipe_results until: not _node_wipe_results.failed - delay: "60" + delay: 60 when: node_database_wipe | bool - name: Wipe | Delete parachain DB @@ -26,14 +25,14 @@ path: "{{ _node_parachain_data_chain_path }}" state: absent vars: - _node_parachain_chain_id: '' + _node_parachain_chain_id: "" notify: restart service {{ node_handler_id }} retries: 5 register: _node_wipe_results until: not _node_wipe_results.failed - delay: "60" + delay: 60 when: node_parachain_role != '' and (node_parachain_database_wipe | bool) # we need it to update information about free space after the wipe - name: Wipe | Gather facts - ansible.builtin.gather_facts: \ No newline at end of file + ansible.builtin.gather_facts: diff --git a/roles/node/tasks/400-binary.yml b/roles/node/tasks/400-binary.yml index 9411c0b..098e5f7 100644 --- a/roles/node/tasks/400-binary.yml +++ b/roles/node/tasks/400-binary.yml @@ -1,5 +1,4 @@ --- - - name: Binary | Set variables ansible.builtin.set_fact: _node_binaries: "{{ [{'url': node_binary, 'signature_url': node_binary_signature, 'dst': _node_main_binary_file_name}] }}" @@ -22,25 +21,23 @@ - name: Binary | Add prepare_worker to the '_node_binaries' variable ansible.builtin.set_fact: - _node_binaries: "{{ _node_binaries + [{'url': node_prepare_worker_binary, - 'signature_url': node_prepare_worker_binary_signature, - 'dst': 'polkadot-prepare-worker'}] }}" + _node_binaries: "{{ _node_binaries + [{'url': node_prepare_worker_binary, 'signature_url': node_prepare_worker_binary_signature, 'dst': 'polkadot-prepare-worker'}] + }}" when: node_prepare_worker_binary != '' - name: Binary | Add execute_worker to the '_node_binaries' variable ansible.builtin.set_fact: - _node_binaries: "{{ _node_binaries + [{'url': node_execute_worker_binary, - 'signature_url': node_execute_worker_binary_signature, - 'dst': 'polkadot-execute-worker'}] }}" + _node_binaries: "{{ _node_binaries + [{'url': node_execute_worker_binary, 'signature_url': node_execute_worker_binary_signature, 'dst': 'polkadot-execute-worker'}] + }}" when: node_execute_worker_binary != '' - name: Binary | Download binaries to temp bin directory ansible.builtin.get_url: url: "{{ item.url }}" dest: "{{ _node_temp_binary_path }}/{{ item.dst }}" - mode: 0755 - owner: "root" - group: "root" + mode: "0755" + owner: root + group: root timeout: 30 headers: PRIVATE-TOKEN: "{{ node_binary_download_private_token }}" @@ -49,12 +46,14 @@ changed_when: false - name: Binary | GPG signature verification - block: + when: node_binary_signature != '' or node_prepare_worker_binary_signature != '' or node_execute_worker_binary_signature != '' + block: - name: Binary | Download GPG signatures for binaries ansible.builtin.get_url: url: "{{ item.signature_url }}" dest: "{{ _node_temp_binary_path }}/{{ item.dst }}.asc" + mode: "0644" check_mode: false changed_when: false loop: "{{ _node_binaries }}" @@ -78,8 +77,6 @@ when: item.signature_url != '' failed_when: _node_verifyout.rc != 0 - when: node_binary_signature != '' or node_prepare_worker_binary_signature != '' or node_execute_worker_binary_signature != '' - - name: Binary | Check new version ansible.builtin.command: "{{ _node_temp_binary_path }}/{{ _node_main_binary_file_name }} --version" register: _node_new_version @@ -107,18 +104,15 @@ _node_commit_hash: "{{ new_version.split('-')[1] }}" _node_version_equal: "{{ current_version == new_version }}" vars: - current_version: "{% if _node_current_version.stdout is defined -%} - {{ _node_current_version.stdout.split(' ')[1] }}{%- else -%}absent{%- endif %}" + current_version: "{% if _node_current_version.stdout is defined -%} {{ _node_current_version.stdout.split(' ')[1] }}{%- else -%}absent{%- endif %}" new_version: "{{ _node_new_version.stdout.split(' ')[1] }}" - name: Binary | Print versions according to the '--version' flag ansible.builtin.debug: - msg: "{% if _node_version_equal -%}Versions are equal! - {%- else -%}{{ _node_version_msg }}{%- endif %}" + msg: "{% if _node_version_equal -%}Versions are equal!{%- else -%}{{ _node_version_msg }}{%- endif %}" - name: Binary | Migration between versions block: - - name: Binary | Check new version ansible.builtin.command: "{{ _node_temp_binary_path }}/{{ _node_main_binary_file_name }} --help" register: _node_new_help @@ -132,7 +126,8 @@ - name: Binary | Check new rpc flags ansible.builtin.fail: - msg: "ERROR: RPC flag --ws-port {{ 'IS' if _node_legacy_rpc_flags_supported else 'NOT' }} supported. 'node_legacy_rpc_flags' should be set to {{ _node_legacy_rpc_flags_supported }}" + msg: "ERROR: RPC flag --ws-port {{ 'IS' if _node_legacy_rpc_flags_supported else 'NOT' }} supported. 'node_legacy_rpc_flags' should be set to {{ _node_legacy_rpc_flags_supported + }}" when: # XOR (skip fail if both true or both false) - node_legacy_rpc_flags or _node_legacy_rpc_flags_supported @@ -141,8 +136,8 @@ - name: Binary | Check new worker flags ansible.builtin.fail: msg: > - ERROR: node flag --workers-path {{ 'IS' if _node_separate_binary_supported else 'NOT' }} supported. - 'node_prepare_worker_binary' and 'node_execute_worker_binary' variables should be set + ERROR: node flag --workers-path {{ 'IS' if _node_separate_binary_supported else 'NOT' }} supported. 'node_prepare_worker_binary' and 'node_execute_worker_binary' + variables should be set when: - _node_separate_binary_supported - node_role == 'validator' @@ -169,17 +164,18 @@ ansible.builtin.copy: src: "{{ _node_temp_binary_path }}/{{ item.dst }}" dest: "{{ _node_binary_path }}/{{ item.dst }}" - remote_src: yes - mode: 0755 + remote_src: true + mode: "0755" owner: "{{ node_user }}" group: "{{ node_user }}" loop: "{{ _node_binaries }}" notify: restart service {{ node_handler_id }} - ignore_errors: "{{ ansible_check_mode and (not _node_binary_path_stat.stat.exists or (_node_binary_path_stat.stat.exists and not _node_binary_path_stat.stat.isdir)) }}" + ignore_errors: "{{ ansible_check_mode and (not _node_binary_path_stat.stat.exists or (_node_binary_path_stat.stat.exists and not _node_binary_path_stat.stat.isdir)) + }}" - name: Binary | Block + when: node_prometheus_file_exporter_path != '' block: - - name: Binary | Print _node_binary_version_from_url ansible.builtin.debug: var: _node_binary_version_from_url @@ -190,6 +186,4 @@ dest: "{{ node_prometheus_file_exporter_path }}" owner: root group: root - mode: '0644' - - when: node_prometheus_file_exporter_path != '' + mode: "0644" diff --git a/roles/node/tasks/500-memory-profiler.yml b/roles/node/tasks/500-memory-profiler.yml index c66e6cf..115d775 100644 --- a/roles/node/tasks/500-memory-profiler.yml +++ b/roles/node/tasks/500-memory-profiler.yml @@ -1,10 +1,9 @@ --- - - name: Memory profiler | Create directories ansible.builtin.file: path: "{{ item }}" state: directory - mode: '0755' + mode: "0755" owner: "{{ node_user }}" group: "{{ node_user }}" loop: @@ -14,7 +13,7 @@ ansible.builtin.unarchive: src: "{{ node_memory_profiler_binary }}" dest: "{{ _node_binary_path }}" - remote_src: yes + remote_src: true owner: "{{ node_user }}" group: "{{ node_user }}" - mode: 0644 + mode: "0644" diff --git a/roles/node/tasks/600-chain.yml b/roles/node/tasks/600-chain.yml index 3d8b426..fc09930 100644 --- a/roles/node/tasks/600-chain.yml +++ b/roles/node/tasks/600-chain.yml @@ -1,10 +1,9 @@ --- - - name: Systemd | Create directories ansible.builtin.file: path: "{{ item }}" state: directory - mode: '0755' + mode: "0755" owner: "{{ node_user }}" group: "{{ node_user }}" loop: @@ -19,7 +18,7 @@ owner: "{{ node_user }}" group: "{{ node_user }}" dest: "{{ _node_chainspec_file }}" - mode: 0644 + mode: "0644" notify: restart service {{ node_handler_id }} ignore_errors: "{{ _node_create_directories_register.changed }}" when: node_chainspec != '' and not node_chainspec.startswith('http') @@ -30,7 +29,7 @@ owner: "{{ node_user }}" group: "{{ node_user }}" dest: "{{ _node_chainspec_file }}" - mode: 0644 + mode: "0644" force: true notify: restart service {{ node_handler_id }} when: @@ -44,7 +43,7 @@ owner: "{{ node_user }}" group: "{{ node_user }}" dest: "{{ _node_parachain_chainspec_file }}" - mode: 0644 + mode: "0644" notify: restart service {{ node_handler_id }} ignore_errors: "{{ _node_create_directories_register.changed }}" when: node_parachain_role != '' and node_parachain_chainspec != '' and not node_parachain_chainspec.startswith('http') @@ -55,7 +54,7 @@ owner: "{{ node_user }}" group: "{{ node_user }}" dest: "{{ _node_parachain_chainspec_file }}" - mode: 0644 + mode: "0644" force: true notify: restart service {{ node_handler_id }} when: @@ -70,7 +69,7 @@ owner: "{{ node_user }}" group: "{{ node_user }}" dest: "{{ _node_wasm_runtime_base_path }}/relaychain.wasm" - mode: 0644 + mode: "0644" notify: restart service {{ node_handler_id }} ignore_errors: "{{ _node_create_directories_register.changed }}" when: node_wasm_runtime != '' @@ -81,7 +80,7 @@ owner: "{{ node_user }}" group: "{{ node_user }}" dest: "{{ _node_wasm_runtime_base_path }}/parachain.wasm" - mode: 0644 + mode: "0644" notify: restart service {{ node_handler_id }} ignore_errors: "{{ _node_create_directories_register.changed }}" when: node_parachain_role != '' and node_parachain_wasm_runtime != '' @@ -89,7 +88,7 @@ - name: Chain | Find unmanaged {{ node_app_name }} wasm runtime file ansible.builtin.find: paths: "{{ _node_wasm_runtime_base_path }}" - patterns: "^((?!relaychain.wasm|parachain.wasm).)*$" + patterns: ^((?!relaychain.wasm|parachain.wasm).)*$ use_regex: true register: _node_unmanaged_wasm_runtime_files @@ -106,7 +105,7 @@ owner: "{{ node_user }}" group: "{{ node_user }}" dest: "{{ _node_p2p_key_file }}" - mode: 0600 + mode: "0600" notify: restart service {{ node_handler_id }} ignore_errors: "{{ _node_create_directories_register.changed }}" when: node_p2p_private_key != '' @@ -117,7 +116,7 @@ owner: "{{ node_user }}" group: "{{ node_user }}" dest: "{{ _node_parachain_p2p_key_file }}" - mode: 0600 + mode: "0600" notify: restart service {{ node_handler_id }} ignore_errors: "{{ _node_create_directories_register.changed }}" when: node_parachain_role != '' and node_parachain_p2p_private_key != '' diff --git a/roles/node/tasks/700-get-chainid.yml b/roles/node/tasks/700-get-chainid.yml index e3e37c4..9cafaa8 100644 --- a/roles/node/tasks/700-get-chainid.yml +++ b/roles/node/tasks/700-get-chainid.yml @@ -1,5 +1,4 @@ --- - # Relay chain - name: Get chain id | Slurp chainspec_file ansible.builtin.slurp: @@ -11,10 +10,7 @@ - name: Get chain id | Set chain id ansible.builtin.set_fact: - _node_chain_id: "{{ (_node_current_chainspec['content'] | b64decode | from_json).id - if node_chainspec != '' else - node_chain_backup_chain_path - }}" + _node_chain_id: "{{ (_node_current_chainspec['content'] | b64decode | from_json).id if node_chainspec != '' else node_chain_backup_chain_path }}" - name: Get chain id | print _node_chain_id ansible.builtin.debug: @@ -26,20 +22,16 @@ src: "{{ _node_parachain_chainspec_file }}" register: _node_current_parachain_chainspec when: - - node_parachain_role != '' - - node_parachain_chainspec != '' + - node_parachain_role != '' + - node_parachain_chainspec != '' check_mode: false changed_when: false - name: Get chain id | Set parachain chain id ansible.builtin.set_fact: - _node_parachain_chain_id: " - {%- if node_parachain_role != '' and node_parachain_chainspec != '' -%} - {{ (_node_current_parachain_chainspec['content'] | b64decode | from_json).id }} - {%- elif node_parachain_role != '' and node_parachain_chainspec == '' -%} - {{ node_parachain_chain_backup_chain_path }} - {%- else -%} - {%- endif %}" + _node_parachain_chain_id: "{%- if node_parachain_role != '' and node_parachain_chainspec != '' -%} {{ (_node_current_parachain_chainspec['content'] | b64decode + | from_json).id }}{%- elif node_parachain_role != '' and node_parachain_chainspec == '' -%} {{ node_parachain_chain_backup_chain_path }}{%- else -%} {%- endif + %}" - name: Get chain id | print _node_parachain_chain_id ansible.builtin.debug: diff --git a/roles/node/tasks/800-restore-chain.yml b/roles/node/tasks/800-restore-chain.yml index 4aad4d8..b592334 100644 --- a/roles/node/tasks/800-restore-chain.yml +++ b/roles/node/tasks/800-restore-chain.yml @@ -1,5 +1,4 @@ --- - - name: Restore {{ item.part }} | Check if chain folder already exists ansible.builtin.stat: path: "{{ item.chain_path }}" @@ -20,88 +19,80 @@ - name: Restore {{ item.part }} | Set custom facts 1 ansible.builtin.set_fact: - _node_run_restore: "{{ not _node_data_chain_path_db_stat.stat.exists or - _node_data_chain_path_db_size.stdout == '0' }}" + _node_run_restore: "{{ not _node_data_chain_path_db_stat.stat.exists or _node_data_chain_path_db_size.stdout == '0' }}" # we only use the tmp_restore_path directory if we really need it (when a service is run) - _node_use_tmp_restore_path: "{{ ansible_facts.services[node_app_name+'.service'] is defined and - ansible_facts.services[node_app_name+'.service'].state == 'running' and - node_chain_backup_tmp_restore_path != '' }}" + _node_use_tmp_restore_path: "{{ ansible_facts.services[node_app_name + '.service'] is defined and ansible_facts.services[node_app_name + '.service'].state == + 'running' and node_chain_backup_tmp_restore_path != '' }}" #https://docs.ansible.com/ansible/latest/user_guide/complex_data_manipulation.html#find-mount-point # List can be empty if a mounted device doesn't start with `/` (exotic FS like docker overlay, bug https://github.com/ansible/ansible/issues/24644). # Be careful, if you use these FSs, the free space will not be checked. _node_run_check_size_mounts: "{{ ansible_mounts | selectattr('mount', 'in', _node_data_root_path) | list | sort(attribute='mount') }}" - - name: Restore {{ item.part }} | Set custom facts 2 ansible.builtin.set_fact: # We don't need to calculate free space if we sync a backup to an existing DB # Because we can't know the required amount of free space before syncing - _node_run_check_size: "{{ not (_node_data_chain_path_db_stat.stat.exists and - _node_data_chain_path_db_size.stdout != '0' and - not _node_use_tmp_restore_path) and - _node_run_check_size_mounts | length > 0 }}" - _node_backup_dl_path: "{{ node_chain_backup_tmp_restore_path if _node_use_tmp_restore_path - else item.chain_path + '/' + item.db_folder }}" + _node_run_check_size: "{{ not (_node_data_chain_path_db_stat.stat.exists and _node_data_chain_path_db_size.stdout != '0' and not _node_use_tmp_restore_path) and + _node_run_check_size_mounts | length > 0 }}" + _node_backup_dl_path: "{{ node_chain_backup_tmp_restore_path if _node_use_tmp_restore_path else item.chain_path + '/' + item.db_folder }}" - name: Run {{ item.part }} restoring + when: _node_run_restore block: - # A previous run can be stopped unexpectedly. # We have to remove the temp directory to calculate the right amount of free space. - - name: Restore {{ item.part }} | Delete temporary folder - ansible.builtin.file: - path: "{{ node_chain_backup_tmp_restore_path }}" - state: absent - when: _node_use_tmp_restore_path - - # It doesn't really matter what directory it is, temporary or not. - # Anyway, we really need an existing directory to allow sync utilities to be run - - name: Restore {{ item.part }} | Make sure download path exists - ansible.builtin.file: - path: "{{ _node_backup_dl_path }}" - state: directory - owner: "{{ node_user }}" - group: "{{ node_user }}" - mode: '0755' - - - name: Restore {{ item.part }} | Check free space in '_node_data_root_path' - ansible.builtin.set_fact: - _node_restore_free_space: "{{ _node_run_check_size_mounts[-1]['size_available'] }}" - when: _node_run_check_size - - - name: Restore {{ item.part }} | Print free space in '_node_data_root_path' - ansible.builtin.debug: - msg: Free space at destination = {{ _node_restore_free_space | filesizeformat(true) }} - when: _node_run_check_size - - - name: Restore {{ item.part }} | Tar restoring - ansible.builtin.include_tasks: - file: 801-restore-chain-tar.yml - apply: - tags: [ 'node', 'node-restore-chain' ] - when: item.restoring_type == 'tar' - - - name: Restore {{ item.part }} | HTTP restoring - ansible.builtin.include_tasks: - file: 803-restore-chain-http.yml - apply: - tags: [ 'node', 'node-restore-chain' ] - when: item.restoring_type == 'http' - - - name: Restore {{ item.part }} | Check if data_root folder already exists - ansible.builtin.stat: - path: "{{ _node_data_root_path }}" - get_checksum: false - register: _node_data_root_path_stat - when: ansible_check_mode - - - name: Restore {{ item.part }} | Recursively change ownership of the '_node_data_root_path' directory - ansible.builtin.file: - path: "{{ _node_data_root_path }}" - state: directory - recurse: yes - owner: "{{ node_user }}" - group: "{{ node_user }}" - ignore_errors: "{{ ansible_check_mode and not _node_data_root_path_stat.stat.exists }}" - - when: _node_run_restore + - name: Restore {{ item.part }} | Delete temporary folder + ansible.builtin.file: + path: "{{ node_chain_backup_tmp_restore_path }}" + state: absent + when: _node_use_tmp_restore_path + + # It doesn't really matter what directory it is, temporary or not. + # Anyway, we really need an existing directory to allow sync utilities to be run + - name: Restore {{ item.part }} | Make sure download path exists + ansible.builtin.file: + path: "{{ _node_backup_dl_path }}" + state: directory + owner: "{{ node_user }}" + group: "{{ node_user }}" + mode: "0755" + + - name: Restore {{ item.part }} | Check free space in '_node_data_root_path' + ansible.builtin.set_fact: + _node_restore_free_space: "{{ _node_run_check_size_mounts[-1]['size_available'] }}" + when: _node_run_check_size + + - name: Restore {{ item.part }} | Print free space in '_node_data_root_path' + ansible.builtin.debug: + msg: Free space at destination = {{ _node_restore_free_space | filesizeformat(true) }} + when: _node_run_check_size + + - name: Restore {{ item.part }} | Tar restoring + ansible.builtin.include_tasks: + file: 801-restore-chain-tar.yml + apply: + tags: [node, node-restore-chain] + when: item.restoring_type == 'tar' + + - name: Restore {{ item.part }} | HTTP restoring + ansible.builtin.include_tasks: + file: 803-restore-chain-http.yml + apply: + tags: [node, node-restore-chain] + when: item.restoring_type == 'http' + + - name: Restore {{ item.part }} | Check if data_root folder already exists + ansible.builtin.stat: + path: "{{ _node_data_root_path }}" + get_checksum: false + register: _node_data_root_path_stat + when: ansible_check_mode + + - name: Restore {{ item.part }} | Recursively change ownership of the '_node_data_root_path' directory + ansible.builtin.file: + path: "{{ _node_data_root_path }}" + state: directory + recurse: true + owner: "{{ node_user }}" + group: "{{ node_user }}" + ignore_errors: "{{ ansible_check_mode and not _node_data_root_path_stat.stat.exists }}" diff --git a/roles/node/tasks/801-restore-chain-tar.yml b/roles/node/tasks/801-restore-chain-tar.yml index d5eeffc..c805b95 100644 --- a/roles/node/tasks/801-restore-chain-tar.yml +++ b/roles/node/tasks/801-restore-chain-tar.yml @@ -1,5 +1,4 @@ --- - - name: Restore {{ item.part }} | Tar restoring | Delete temp download folder ansible.builtin.file: path: "{{ item.chain_path }}/tmp" @@ -13,14 +12,14 @@ state: directory owner: "{{ node_user }}" group: "{{ node_user }}" - mode: '0755' + mode: "0755" changed_when: false ignore_errors: "{{ not _node_data_chain_path_stat.stat.exists }}" - name: Restore {{ item.part }} | Tar restoring | Check if temp download folder already exists ansible.builtin.stat: path: "{{ item.chain_path }}/tmp" - get_checksum: False + get_checksum: false register: _node_data_chain_tmp_path_stat - name: Restore {{ item.part }} | Tar restoring | Download chain backup @@ -30,19 +29,19 @@ tmp_dest: "{{ item.chain_path }}/tmp" owner: "{{ node_user }}" group: "{{ node_user }}" - mode: '0644' + mode: "0644" timeout: 900 ignore_errors: "{{ not _node_data_chain_path_stat.stat.exists or not _node_data_chain_tmp_path_stat.stat.exists }}" - name: Restore {{ item.part }} | Tar restoring | Check if backup file already exists ansible.builtin.stat: path: "{{ item.chain_path }}/db.tar" - get_checksum: False + get_checksum: false register: _node_data_chain_backup_file_stat - name: Restore {{ item.part }} | Tar restoring | Extract chain backup ansible.builtin.unarchive: - copy: no + copy: false src: "{{ item.chain_path }}/db.tar" dest: "{{ item.chain_path }}/{{ item.db_folder }}" owner: "{{ node_user }}" diff --git a/roles/node/tasks/803-restore-chain-http.yml b/roles/node/tasks/803-restore-chain-http.yml index a426809..2da3a37 100644 --- a/roles/node/tasks/803-restore-chain-http.yml +++ b/roles/node/tasks/803-restore-chain-http.yml @@ -1,6 +1,4 @@ --- - - - name: Restore {{ item.part }} | HTTP restoring | Install rclone ansible.builtin.apt: deb: "{{ _node_chain_backup_http_rclone_deb }}" @@ -9,12 +7,11 @@ - name: Restore {{ item.part }} | HTTP restoring | Check last version ansible.builtin.uri: url: "{{ item.http_url | trim | trim('/') }}/latest_version.meta.txt" - method: "GET" - return_content: yes + method: GET + return_content: true use_proxy: false register: _node_chain_backup_last_version_register - until: _node_chain_backup_last_version_register.status is defined and - _node_chain_backup_last_version_register.status == 200 + until: _node_chain_backup_last_version_register.status is defined and _node_chain_backup_last_version_register.status == 200 retries: 3 delay: 10 check_mode: false @@ -23,13 +20,12 @@ - name: Restore {{ item.part }} | HTTP restoring | Setup _node_chain_backup_http_full_url 1 ansible.builtin.set_fact: - _node_chain_backup_http_full_url: "{% if item.custom_http_url == '' %} - {{ item.http_url }}/{{ _node_chain_backup_last_version_register.content }} - {% else %}{{ item.custom_http_url }}{% endif %}" + _node_chain_backup_http_full_url: "{% if item.custom_http_url == '' %} {{ item.http_url }}/{{ _node_chain_backup_last_version_register.content }} {% else %}{{ + item.custom_http_url }}{% endif %}" - name: Restore {{ item.part }} | HTTP restoring | Setup _node_chain_backup_http_full_url 2 ansible.builtin.set_fact: - _node_chain_backup_http_full_url: "{{ _node_chain_backup_http_full_url | regex_replace('[\\s]+','') | trim('/') }}" + _node_chain_backup_http_full_url: "{{ _node_chain_backup_http_full_url | regex_replace('[\\s]+', '') | trim('/') }}" - name: Restore {{ item.part }} | HTTP restoring | Print backup url ansible.builtin.debug: @@ -42,12 +38,11 @@ - name: Restore {{ item.part }} | HTTP restoring | Check the size of the backup ansible.builtin.uri: url: "{{ _node_chain_backup_http_full_url }}.meta.txt" - method: "GET" - return_content: yes + method: GET + return_content: true use_proxy: false register: _node_chain_restore_backup_size_register - until: _node_chain_restore_backup_size_register.status is defined and - _node_chain_restore_backup_size_register.status == 200 + until: _node_chain_restore_backup_size_register.status is defined and _node_chain_restore_backup_size_register.status == 200 retries: 3 delay: 10 check_mode: false @@ -56,7 +51,7 @@ - name: Restore {{ item.part }} | HTTP restoring | Print backup size ansible.builtin.debug: - msg: "Backup size = {{ (_node_chain_restore_backup_size_register.content | from_yaml).size | int | filesizeformat(true) }}" + msg: Backup size = {{ (_node_chain_restore_backup_size_register.content | from_yaml).size | int | filesizeformat(true) }} when: _node_run_check_size - name: Restore {{ item.part }} | HTTP restoring | Fail if free space <500MB @@ -74,9 +69,9 @@ ansible.builtin.get_url: url: "{{ _node_chain_backup_http_full_url }}/files.txt" dest: "{{ _node_temp_dir.path }}/{{ item.part }}-files.txt" - mode: 0755 - owner: "root" - group: "root" + mode: "0655" + owner: root + group: root timeout: 30 check_mode: false changed_when: false @@ -99,22 +94,21 @@ notify: restart service {{ node_handler_id }} - name: Restore {{ item.part }} | HTTP restoring | Manage node_chain_backup_tmp_restore_path - block: - - - name: Restore {{ item.part }} | GCP restoring | Stop service and cleanup DB - ansible.builtin.include_tasks: includes/_delete_db_folder.yml - - - name: Restore {{ item.part }} | HTTP restoring | Copy backup from temporary folder - ansible.builtin.copy: - src: "{{ node_chain_backup_tmp_restore_path }}/" - dest: "{{ item.chain_path }}/{{ item.db_folder }}" - owner: "{{ node_user }}" - group: "{{ node_user }}" - remote_src: true - - - name: Restore {{ item.part }} | HTTP restoring | Delete temporary folder - ansible.builtin.file: - path: "{{ node_chain_backup_tmp_restore_path }}" - state: absent - when: _node_use_tmp_restore_path + block: + - name: Restore {{ item.part }} | GCP restoring | Stop service and cleanup DB + ansible.builtin.include_tasks: includes/_delete_db_folder.yml + + - name: Restore {{ item.part }} | HTTP restoring | Copy backup from temporary folder + ansible.builtin.copy: + src: "{{ node_chain_backup_tmp_restore_path }}/" + dest: "{{ item.chain_path }}/{{ item.db_folder }}" + owner: "{{ node_user }}" + group: "{{ node_user }}" + remote_src: true + mode: "0655" + + - name: Restore {{ item.part }} | HTTP restoring | Delete temporary folder + ansible.builtin.file: + path: "{{ node_chain_backup_tmp_restore_path }}" + state: absent diff --git a/roles/node/tasks/900-systemd.yml b/roles/node/tasks/900-systemd.yml index 94b5f66..17232f8 100644 --- a/roles/node/tasks/900-systemd.yml +++ b/roles/node/tasks/900-systemd.yml @@ -1,5 +1,4 @@ --- - - name: Systemd | Get public IP community.general.ipify_facts: timeout: 30 @@ -13,7 +12,7 @@ ansible.builtin.file: path: "{{ item }}" state: directory - mode: '0755' + mode: "0755" owner: "{{ node_user }}" group: "{{ node_user }}" loop: @@ -21,22 +20,22 @@ - name: Systemd | Copy {{ node_app_name }} systemd unit file ansible.builtin.template: - src: "node.service.j2" + src: node.service.j2 dest: "{{ _node_unit_file }}" - owner: "root" - group: "root" + owner: root + group: root mode: "0600" notify: restart service {{ node_handler_id }} - name: Systemd | Copy {{ node_app_name }} environment variable file ansible.builtin.template: - src: "env.j2" + src: env.j2 owner: "{{ node_user }}" group: "{{ node_user }}" - dest: "/etc/default/polkadot-{{ node_app_name }}" - mode: 0644 + dest: /etc/default/polkadot-{{ node_app_name }} + mode: "0644" notify: restart service {{ node_handler_id }} - tags: ['node-memory-profiler'] + tags: [node-memory-profiler] # to avoid 2 restarts during the first deploy - name: Systemd | Flush handlers @@ -46,7 +45,7 @@ ansible.builtin.systemd: name: "{{ node_app_name }}" state: "{{ 'started' if (node_start_service | bool) else 'stopped' }}" - enabled: yes - daemon_reload: yes + enabled: true + daemon_reload: true notify: health check {{ node_handler_id }} ignore_errors: "{{ not _node_systemd_unit_file_stat.stat.exists }}" diff --git a/roles/node/tasks/includes/_delete_db_folder.yml b/roles/node/tasks/includes/_delete_db_folder.yml index 76d5c87..318c045 100644 --- a/roles/node/tasks/includes/_delete_db_folder.yml +++ b/roles/node/tasks/includes/_delete_db_folder.yml @@ -1,17 +1,18 @@ --- - - name: Restore {{ item.part }} | Stop the service - ansible.builtin.systemd: - name: "{{ node_app_name }}" - state: stopped - notify: restart service {{ node_handler_id }} - ignore_errors: "{{ not _node_systemd_unit_file_stat.stat.exists }}" +- name: Restore {{ item.part }} | Stop the service + ansible.builtin.systemd: + name: "{{ node_app_name }}" + state: stopped + notify: restart service {{ node_handler_id }} + ignore_errors: "{{ not _node_systemd_unit_file_stat.stat.exists }}" - - name: Restore {{ item.part }} | Delete db folder - ansible.builtin.file: - path: "{{ item.chain_path }}/{{ item.db_folder }}" - state: absent +- name: Restore {{ item.part }} | Delete db folder + ansible.builtin.file: + path: "{{ item.chain_path }}/{{ item.db_folder }}" + state: absent - - name: Restore {{ item.part }} | Recreate db folder - ansible.builtin.file: - path: "{{ item.chain_path }}/{{ item.db_folder }}" - state: directory \ No newline at end of file +- name: Restore {{ item.part }} | Recreate db folder + ansible.builtin.file: + path: "{{ item.chain_path }}/{{ item.db_folder }}" + state: directory + mode: "0655" diff --git a/roles/node/tasks/main.yml b/roles/node/tasks/main.yml index b7822ed..829c507 100644 --- a/roles/node/tasks/main.yml +++ b/roles/node/tasks/main.yml @@ -1,104 +1,153 @@ --- - - name: node | Tests ansible.builtin.include_tasks: file: 100-tests.yml apply: - tags: ['node', 'node-tests'] - tags: ['node', 'node-tests'] + tags: [node, node-tests] + tags: [node, node-tests] - name: node | Check the systemd unit file exists ansible.builtin.stat: path: "{{ _node_unit_file }}" register: _node_systemd_unit_file_stat - tags: ['node', 'node-wipe', 'node-health-check', 'node-binary', 'node-memory-profiler', 'node-chain', 'node-restore-chain', 'node-systemd', 'node-restart', 'node-post-tasks'] + tags: + - node + - node-wipe + - node-health-check + - node-binary + - node-memory-profiler + - node-chain + - node-restore-chain + - node-systemd + - node-restart + - node-post-tasks - name: node | Prepare ansible.builtin.include_tasks: file: 200-prepare.yml apply: - tags: ['node', 'node-prepare', 'node-health-check', 'node-binary', 'node-memory-profiler', 'node-chain', 'node-restore-chain', 'node-systemd', 'node-restart', 'node-post-tasks'] - tags: ['node', 'node-prepare', 'node-health-check', 'node-binary', 'node-memory-profiler', 'node-chain', 'node-restore-chain', 'node-systemd', 'node-restart', 'node-post-tasks'] + tags: + - node + - node-prepare + - node-health-check + - node-binary + - node-memory-profiler + - node-chain + - node-restore-chain + - node-systemd + - node-restart + - node-post-tasks + tags: + - node + - node-prepare + - node-health-check + - node-binary + - node-memory-profiler + - node-chain + - node-restore-chain + - node-systemd + - node-restart + - node-post-tasks - name: node | Wipe ansible.builtin.include_tasks: file: 300-wipe.yml apply: - tags: ['node', 'node-wipe'] + tags: [node, node-wipe] when: node_database_wipe | bool or node_parachain_database_wipe | bool - tags: ['node', 'node-wipe'] + tags: [node, node-wipe] # It will only be run if the systemd service state is 'running'. # It should help to avoid the situation when we try to update a broken node. - name: node | Check the node state before deploying ansible.builtin.include_tasks: - file: 001-health-check.yml + file: "001-health-check.yml" apply: - tags: ['node', 'node-health-check'] + tags: [node, node-health-check] vars: _node_pre_check: true - tags: ['node', 'node-health-check', 'node-restore-chain'] + tags: [node, node-health-check, node-restore-chain] - name: node | Binary ansible.builtin.include_tasks: file: 400-binary.yml apply: - tags: ['node', 'node-binary'] + tags: [node, node-binary] when: node_binary_deployment | bool - tags: ['node', 'node-binary'] + tags: [node, node-binary] - name: node | Memory profiler ansible.builtin.include_tasks: file: 500-memory-profiler.yml apply: - tags: ['node', 'node-memory-profiler'] + tags: [node, node-memory-profiler] when: node_memory_profiler_enable | bool - tags: ['node', 'node-memory-profiler'] + tags: [node, node-memory-profiler] - name: node | Chain ansible.builtin.include_tasks: file: 600-chain.yml apply: - tags: ['node', 'node-chain'] + tags: [node, node-chain] when: node_chain_deployment | bool - tags: ['node', 'node-chain'] + tags: [node, node-chain] - name: node | Get chain IDs ansible.builtin.include_tasks: file: 700-get-chainid.yml apply: - tags: ['node', 'node-restore-chain'] + tags: [node, node-restore-chain] when: _node_restore_relaychain or _node_restore_parachain - tags: ['node', 'node-restore-chain'] + tags: [node, node-restore-chain] - name: node | Restore chain ansible.builtin.include_tasks: file: 800-restore-chain.yml apply: - tags: ['node', 'node-restore-chain'] + tags: [node, node-restore-chain] loop: "{{ _node_restore_list }}" when: _node_restore_relaychain or _node_restore_parachain - tags: ['node', 'node-restore-chain'] + tags: [node, node-restore-chain] - name: node | Systemd ansible.builtin.include_tasks: file: 900-systemd.yml apply: - tags: ['node', 'node-systemd'] + tags: [node, node-systemd] when: node_systemd_deployment | bool - tags: ['node', 'node-systemd'] + tags: [node, node-systemd] - name: node | Restart ansible.builtin.include_tasks: - file: 002-restart.yml + file: "002-restart.yml" apply: - tags: ['node', 'node-restart'] + tags: [node, node-restart] when: node_start_service | bool and node_force_restart | bool - tags: ['node', 'node-restart'] + tags: [node, node-restart] - name: node | Post tasks ansible.builtin.include_tasks: file: 1000-post-tasks.yml apply: - tags: ['node', 'node-prepare', 'node-health-check', 'node-binary', 'node-memory-profiler', 'node-chain', 'node-restore-chain', 'node-systemd', 'node-restart', 'node-post-tasks'] - tags: ['node', 'node-prepare', 'node-health-check', 'node-binary', 'node-memory-profiler', 'node-chain', 'node-restore-chain', 'node-systemd', 'node-restart', 'node-post-tasks'] \ No newline at end of file + tags: + - node + - node-prepare + - node-health-check + - node-binary + - node-memory-profiler + - node-chain + - node-restore-chain + - node-systemd + - node-restart + - node-post-tasks + tags: + - node + - node-prepare + - node-health-check + - node-binary + - node-memory-profiler + - node-chain + - node-restore-chain + - node-systemd + - node-restart + - node-post-tasks diff --git a/roles/node/vars/main.yml b/roles/node/vars/main.yml index e832993..50df9dc 100644 --- a/roles/node/vars/main.yml +++ b/roles/node/vars/main.yml @@ -5,63 +5,48 @@ ##################################################################################### # https://semver.org/ -_node_semver_regex: '^.*((?:0|(?:[1-9]\d*))\.(?:0|(?:[1-9]\d*))\.(?:0|(?:[1-9]\d*))(?:-[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?(?:\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?).*$' +_node_semver_regex: ^.*((?:0|(?:[1-9]\d*))\.(?:0|(?:[1-9]\d*))\.(?:0|(?:[1-9]\d*))(?:-[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?(?:\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?).*$ _node_binary_version_from_url: "{{ (node_binary | regex_search(_node_semver_regex, '\\1'))[0] }}" # Name of the binary or absolute path to it _node_binary_gpg_binary: gpg -_node_data_root_path: "{% if node_data_root_path != '' -%} - {{ node_data_root_path }} - {%- else -%} - {{ _node_user_home_path }}/.local/share/polkadot - {%- endif %}" -_node_memory_profiler_log_path: "{% if node_memory_profiler_log_path != '' -%} - {{ node_memory_profiler_log_path }} - {%- else -%} - {{ _node_user_home_path }}/logs - {%- endif %}" +_node_data_root_path: "{% if node_data_root_path != '' -%} {{ node_data_root_path }} {%- else -%} {{ _node_user_home_path }}/.local/share/polkadot {%- endif %}" +_node_memory_profiler_log_path: "{% if node_memory_profiler_log_path != '' -%} {{ node_memory_profiler_log_path }} {%- else -%} {{ _node_user_home_path }}/logs {%- + endif %}" _node_binary_path: "{{ _node_user_home_path }}/bin/{{ node_app_name }}" _node_main_binary_file_name: node _node_memory_profiler_binary_file: "{{ _node_binary_path }}/libbytehound.so" _node_wasm_runtime_base_path: "{{ _node_user_home_path }}/wasm_runtime/{{ node_app_name }}" -_node_unit_file: "/etc/systemd/system/{{ node_app_name }}.service" +_node_unit_file: /etc/systemd/system/{{ node_app_name }}.service -_node_chain_backup_http_rclone_deb: "https://downloads.rclone.org/v1.63.1/rclone-v1.63.1-linux-amd64.deb" +_node_chain_backup_http_rclone_deb: https://downloads.rclone.org/v1.63.1/rclone-v1.63.1-linux-amd64.deb _node_profiles: validator: in_peers: "25" out_peers: "25" - memory_high: "7900M" - memory_max: "8000M" + memory_high: 7900M + memory_max: 8000M boot: in_peers: "25" out_peers: "25" - memory_high: "10400M" - memory_max: "10500M" + memory_high: 10400M + memory_max: 10500M rpc: in_peers: "25" out_peers: "25" - memory_high: "5400M" - memory_max: "5500M" + memory_high: 5400M + memory_max: 5500M full: in_peers: "25" out_peers: "25" - memory_high: "5400M" - memory_max: "5500M" - -_node_restore_list: " - {%- if _node_restore_relaychain and not _node_restore_parachain -%} - {{ [_node_chain_backup_data] }} - {%- elif not _node_restore_relaychain and _node_restore_parachain -%} - {{ [_node_parachain_chain_backup_data] }} - {%- elif _node_restore_relaychain and _node_restore_parachain -%} - {{ [_node_chain_backup_data, _node_parachain_chain_backup_data] }} - {%- else -%} - {{ [] }} - {%- endif %}" + memory_high: 5400M + memory_max: 5500M +_node_restore_list: " {%- if _node_restore_relaychain and not _node_restore_parachain -%} {{ [_node_chain_backup_data] }} {%- elif not _node_restore_relaychain and + _node_restore_parachain -%} {{ [_node_parachain_chain_backup_data] }} {%- elif _node_restore_relaychain and _node_restore_parachain -%} {{ [_node_chain_backup_data, + _node_parachain_chain_backup_data] }} {%- else -%} {{ [] }} {%- endif %}" ##################################################################################### # Relaychain @@ -71,23 +56,18 @@ _node_data_chain_path: "{{ _node_data_root_path }}{% if node_parachain_role != ' _node_p2p_key_file: "{{ _node_user_home_path }}/keys/{{ node_app_name }}_relaychain_p2p_key" _node_chainspec_file: "{{ _node_user_home_path }}/chainspecs/{{ node_app_name }}_relaychain_chainspec.json" -_node_restore_relaychain: "{{ - (node_chain_backup_restoring_type == 'http' and ( node_chain_backup_http_base_url != '' or node_chain_backup_http_url != '')) or - (node_chain_backup_restoring_type == 'tar' and node_chain_backup_url != '') - }}" +_node_restore_relaychain: "{{ (node_chain_backup_restoring_type == 'http' and ( node_chain_backup_http_base_url != '' or node_chain_backup_http_url != '')) or (node_chain_backup_restoring_type + == 'tar' and node_chain_backup_url != '') }}" _node_chain_backup_data: part: relaychain restoring_type: "{{ node_chain_backup_restoring_type }}" chain_path: "{{ _node_data_chain_path }}" db_folder: "{{ 'paritydb' if node_paritydb_enable else 'db' }}" tar_url: "{{ node_chain_backup_url }}" - http_url: "{{ node_chain_backup_http_base_url + '/' + - node_chain + - ('-paritydb' if node_paritydb_enable else '-rocksdb') + ('-prune' if node_pruning > 0 else '-archive') }}" + http_url: "{{ node_chain_backup_http_base_url + '/' + node_chain + ('-paritydb' if node_paritydb_enable else '-rocksdb') + ('-prune' if node_pruning > 0 else '-archive') + }}" custom_http_url: "{{ node_chain_backup_http_url }}" - - ##################################################################################### # Parachain ##################################################################################### @@ -96,17 +76,14 @@ _node_parachain_data_chain_path: "{{ _node_data_root_path }}/chains/{{ _node_par _node_parachain_p2p_key_file: "{{ _node_user_home_path }}/keys/{{ node_app_name }}_parachain_p2p_key" _node_parachain_chainspec_file: "{{ _node_user_home_path }}/chainspecs/{{ node_app_name }}_parachain_chainspec.json" -_node_restore_parachain: "{{ node_parachain_role != '' and - ((node_parachain_chain_backup_restoring_type == 'http' and (node_parachain_chain_backup_http_base_url != '' or node_parachain_chain_backup_http_url != '' )) or - (node_parachain_chain_backup_restoring_type == 'tar' and node_parachain_chain_backup_url != '')) - }}" +_node_restore_parachain: "{{ node_parachain_role != '' and ((node_parachain_chain_backup_restoring_type == 'http' and (node_parachain_chain_backup_http_base_url != + '' or node_parachain_chain_backup_http_url != '' )) or (node_parachain_chain_backup_restoring_type == 'tar' and node_parachain_chain_backup_url != '')) }}" _node_parachain_chain_backup_data: part: parachain restoring_type: "{{ node_parachain_chain_backup_restoring_type }}" chain_path: "{{ _node_parachain_data_chain_path }}" db_folder: "{{ 'paritydb' if node_parachain_paritydb_enable else 'db' }}" tar_url: "{{ node_parachain_chain_backup_url }}" - http_url: "{{ node_parachain_chain_backup_http_base_url + '/' + - node_parachain_chain + - ('-paritydb' if node_parachain_paritydb_enable else '-rocksdb') + ('-prune' if node_parachain_pruning > 0 else '-archive') }}" + http_url: "{{ node_parachain_chain_backup_http_base_url + '/' + node_parachain_chain + ('-paritydb' if node_parachain_paritydb_enable else '-rocksdb') + ('-prune' + if node_parachain_pruning > 0 else '-archive') }}" custom_http_url: "{{ node_parachain_chain_backup_http_url }}" diff --git a/roles/node_backup/.ansible-lint b/roles/node_backup/.ansible-lint new file mode 100644 index 0000000..c175a9a --- /dev/null +++ b/roles/node_backup/.ansible-lint @@ -0,0 +1,3 @@ +--- +skip_list: + - name[casing] diff --git a/roles/node_backup/defaults/main.yml b/roles/node_backup/defaults/main.yml index d04cd27..e0c9108 100644 --- a/roles/node_backup/defaults/main.yml +++ b/roles/node_backup/defaults/main.yml @@ -11,10 +11,10 @@ node_backup_max_concurrent_requests: 50 node_backup_schedule: - "*-*-* 01:00:00" -node_backup_user: "polkadot" +node_backup_user: polkadot -node_backup_base_path: "/opt/node_backup" -node_backup_tmp_path: "/tmp" +node_backup_base_path: /opt/node_backup +node_backup_tmp_path: /tmp # It wipes a local cash of the node-bakcup expoter. # It's useful if you rename or remove some backups from the 'node_backup_targets' variable @@ -37,4 +37,4 @@ node_backup_targets: [] # bucket_name: "backup" # # the public domain name of the bucket # # it's empty by default -# bucket_domain: "backup.polkadot.io" \ No newline at end of file +# bucket_domain: "backup.polkadot.io" diff --git a/roles/node_backup/handlers/main.yml b/roles/node_backup/handlers/main.yml index 129b520..198a201 100644 --- a/roles/node_backup/handlers/main.yml +++ b/roles/node_backup/handlers/main.yml @@ -1,15 +1,14 @@ --- - - name: restart node-backup exporter ansible.builtin.systemd: - name: "node-backup-exporter" + name: node-backup-exporter state: restarted enabled: true daemon_reload: true - name: restart node-backup timer ansible.builtin.systemd: - name: "node-backup.timer" + name: node-backup.timer state: restarted enabled: true - daemon_reload: true \ No newline at end of file + daemon_reload: true diff --git a/roles/node_backup/molecule/default/collections.yml b/roles/node_backup/molecule/default/collections.yml index 88bc7f3..00da5fa 100644 --- a/roles/node_backup/molecule/default/collections.yml +++ b/roles/node_backup/molecule/default/collections.yml @@ -1,4 +1,5 @@ +--- collections: - name: https://github.com/paritytech/ansible-galaxy.git type: git - version: main \ No newline at end of file + version: main diff --git a/roles/node_backup/molecule/default/converge.yml b/roles/node_backup/molecule/default/converge.yml index e860493..e951dcf 100644 --- a/roles/node_backup/molecule/default/converge.yml +++ b/roles/node_backup/molecule/default/converge.yml @@ -2,6 +2,6 @@ - name: Converge hosts: all tasks: - - name: "Include node backup" + - name: Include node backup ansible.builtin.include_role: - name: "node_backup" + name: node_backup diff --git a/roles/node_backup/molecule/default/group_vars/all.yml b/roles/node_backup/molecule/default/group_vars/all.yml index 578dd8b..016c908 100644 --- a/roles/node_backup/molecule/default/group_vars/all.yml +++ b/roles/node_backup/molecule/default/group_vars/all.yml @@ -1,35 +1,36 @@ +--- ## Molecule ansible_user: root ## prepare.yml #node_legacy_rpc_flags: false -node_binary: "https://github.com/paritytech/polkadot/releases/download/v0.9.42/polkadot" -node_chain: "rococo-local" -node_data_root_path: "/opt/{{node_app_name}}" -node_chain_backup_restoring_type: "none" +node_binary: https://github.com/paritytech/polkadot/releases/download/v0.9.42/polkadot +node_chain: rococo-local +node_data_root_path: /opt/{{ node_app_name }} +node_chain_backup_restoring_type: none node_pruning: 256 # node_binary_deployment: false # node_backup _gcp_bucket: test-blockstore-backups -node_backup_user: "parity" -node_backup_r2_access_key_id: "abc" -node_backup_r2_secret_access_key: "cba" -node_backup_r2_api_url: "https://a.b" +node_backup_user: parity +node_backup_r2_access_key_id: abc +node_backup_r2_secret_access_key: cba +node_backup_r2_api_url: https://a.b node_backup_targets: - - service_name: rococo-alice-rocksdb-prune - local_path: /opt/rococo-alice-rocksdb-prune/chains/rococo_local_testnet/db - rpc_port: 9933 - bucket_name: "{{ _gcp_bucket }}" - type: "gcp-native" - - service_name: rococo-bob-paritydb-prune - local_path: /opt/rococo-bob-paritydb-prune/chains/rococo_local_testnet/paritydb - rpc_port: 9934 - bucket_name: "{{ _gcp_bucket }}" - type: "gcp-rclone" - - service_name: rococo-bob-paritydb-prune - local_path: /opt/rococo-bob-paritydb-prune/chains/rococo_local_testnet/paritydb - rpc_port: 9934 - bucket_name: "{{ _gcp_bucket }}" - type: "r2-rclone" - bucket_domain: "c.d" \ No newline at end of file + - service_name: rococo-alice-rocksdb-prune + local_path: /opt/rococo-alice-rocksdb-prune/chains/rococo_local_testnet/db + rpc_port: 9933 + bucket_name: "{{ _gcp_bucket }}" + type: gcp-native + - service_name: rococo-bob-paritydb-prune + local_path: /opt/rococo-bob-paritydb-prune/chains/rococo_local_testnet/paritydb + rpc_port: 9934 + bucket_name: "{{ _gcp_bucket }}" + type: gcp-rclone + - service_name: rococo-bob-paritydb-prune + local_path: /opt/rococo-bob-paritydb-prune/chains/rococo_local_testnet/paritydb + rpc_port: 9934 + bucket_name: "{{ _gcp_bucket }}" + type: r2-rclone + bucket_domain: c.d diff --git a/roles/node_backup/molecule/default/molecule.yml b/roles/node_backup/molecule/default/molecule.yml index 04195d1..2dcf72f 100644 --- a/roles/node_backup/molecule/default/molecule.yml +++ b/roles/node_backup/molecule/default/molecule.yml @@ -9,7 +9,7 @@ platforms: source: alias: debian/bullseye/amd64 # DOCKER - image: "paritytech/debian11:latest" + image: paritytech/debian11:latest command: ${MOLECULE_DOCKER_COMMAND:-""} privileged: true pre_build_image: true @@ -17,11 +17,11 @@ platforms: provisioner: name: ansible options: - diff: True + diff: true config_options: defaults: callbacks_enabled: timer verifier: name: ansible options: - diff: True + diff: true diff --git a/roles/node_backup/molecule/default/prepare.yml b/roles/node_backup/molecule/default/prepare.yml index 15e45c7..f8c8ac2 100644 --- a/roles/node_backup/molecule/default/prepare.yml +++ b/roles/node_backup/molecule/default/prepare.yml @@ -1,3 +1,4 @@ +--- - name: Prepare hosts: all gather_facts: false @@ -9,23 +10,23 @@ ansible.builtin.user: name: parity tasks: - - name: "rococo-alice local" + - name: rococo-alice local ansible.builtin.include_role: name: parity.chain.node vars: node_rpc_port: 9933 - node_app_name: "rococo-alice-rocksdb-prune" + node_app_name: rococo-alice-rocksdb-prune node_custom_options: - - "--alice" - - name: "rococo-bob local" + - --alice + - name: rococo-bob local ansible.builtin.include_role: name: parity.chain.node vars: node_rpc_port: 9934 node_paritydb_enable: true - node_app_name: "rococo-bob-paritydb-prune" + node_app_name: rococo-bob-paritydb-prune node_custom_options: - - "--bob" + - --bob - name: Pretend we are in gcp | Install cron, gnupg ansible.builtin.package: name: diff --git a/roles/node_backup/molecule/default/verify.yml b/roles/node_backup/molecule/default/verify.yml index 8161fd5..10846bb 100644 --- a/roles/node_backup/molecule/default/verify.yml +++ b/roles/node_backup/molecule/default/verify.yml @@ -3,25 +3,25 @@ hosts: all gather_facts: false tasks: - - name: wait until ~10 blocks created - ansible.builtin.uri: - url: "http://127.0.0.1:9933" - method: "POST" - body_format: "json" - body: - id: 1 - jsonrpc: "2.0" - method: "chain_getHeader" - params: [] - return_content: true - register: _node_backup_register_header - until: _node_backup_register_header.json.result.number | int(base=16) > 10 - retries: 10 - delay: 10 + - name: wait until ~10 blocks created + ansible.builtin.uri: + url: http://127.0.0.1:9933 + method: POST + body_format: json + body: + id: 1 + jsonrpc: "2.0" + method: chain_getHeader + params: [] + return_content: true + register: _node_backup_register_header + until: _node_backup_register_header.json.result.number | int(base=16) > 10 + retries: 10 + delay: 10 - - name: Print current block - ansible.builtin.debug: - var: _node_backup_register_header.json.result.number | int(base=16) + - name: Print current block + ansible.builtin.debug: + var: _node_backup_register_header.json.result.number | int(base=16) # # todo add tests # ## a) upload to gcp diff --git a/roles/node_backup/tasks/exporter.yml b/roles/node_backup/tasks/exporter.yml index 8e1e659..e6637d9 100644 --- a/roles/node_backup/tasks/exporter.yml +++ b/roles/node_backup/tasks/exporter.yml @@ -1,5 +1,4 @@ --- - - name: node-backup | exporter | remove the cache file ansible.builtin.file: path: "{{ _node_backup_exporter_cache_file }}" @@ -9,29 +8,29 @@ - name: node-backup | exporter | copy exporter file ansible.builtin.copy: - src: "exporter.py" + src: exporter.py dest: "{{ _node_backup_exporter_file }}" - mode: 0755 + mode: "0755" owner: "{{ node_backup_user }}" group: "{{ node_backup_user }}" notify: restart node-backup exporter - name: node-backup | exporter | copy exporter systemd unit file ansible.builtin.template: - src: "node-backup-exporter.service.j2" - dest: "/etc/systemd/system/node-backup-exporter.service" - owner: "root" - group: "root" + src: node-backup-exporter.service.j2 + dest: /etc/systemd/system/node-backup-exporter.service + owner: root + group: root mode: "0644" notify: restart node-backup exporter - # to avoid 2 restarts during the first deploy +# to avoid 2 restarts during the first deploy - name: node-backup | exporter | flush handlers ansible.builtin.meta: flush_handlers - name: node-backup | exporter | start exporter service ansible.builtin.systemd: - name: "node-backup-exporter" + name: node-backup-exporter state: started enabled: true daemon_reload: true diff --git a/roles/node_backup/tasks/job.yml b/roles/node_backup/tasks/job.yml index f0379e4..ad9dd70 100644 --- a/roles/node_backup/tasks/job.yml +++ b/roles/node_backup/tasks/job.yml @@ -1,51 +1,49 @@ --- - - name: node-backup | job | set _node_backup_targets variable 1 ansible.builtin.set_fact: _node_backup_targets: [] - name: node-backup | job | set _node_backup_targets variable 2 ansible.builtin.set_fact: - _node_backup_targets: "{{ _node_backup_targets + - [ item | combine({'id': _node_backup_id}, recursive=True) ] }}" + _node_backup_targets: "{{ _node_backup_targets + [item | combine({'id': _node_backup_id}, recursive=True)] }}" vars: - _node_backup_id: "{{ (_node_backup_storages[item.type] + '-' + item.bucket_name + '-' + item.service_name) | regex_replace('[^0-9a-zA-Z]+', '-') }}" + _node_backup_id: "{{ (_node_backup_storages[item.type] + '-' + item.bucket_name + '-' + item.service_name) | regex_replace('[^0-9a-zA-Z]+', '-') }}" loop: "{{ node_backup_targets }}" - name: node-backup | job | copy single backup scripts ansible.builtin.template: - src: "single-backup.sh.j2" + src: single-backup.sh.j2 dest: "{{ _node_backup_scripts_path }}/{{ item.id }}.sh" - mode: 0755 - owner: "root" - group: "root" + mode: "0755" + owner: root + group: root loop: "{{ _node_backup_targets }}" - tags: ['node-backup-test'] + tags: [node-backup-test] - name: node-backup | job | copy common backup script ansible.builtin.template: - src: "common-backup.sh.j2" + src: common-backup.sh.j2 dest: "{{ _node_backup_scripts_path }}/common.sh" - mode: 0755 - owner: "root" - group: "root" - tags: ['node-backup-test'] + mode: "0755" + owner: root + group: root + tags: [node-backup-test] - name: node-backup | job | copy backup systemd unit files ansible.builtin.template: src: "{{ item }}.j2" - dest: "/etc/systemd/system/{{ item }}" - owner: "root" - group: "root" + dest: /etc/systemd/system/{{ item }} + owner: root + group: root mode: "0644" loop: - - "node-backup.service" - - "node-backup.timer" + - node-backup.service + - node-backup.timer notify: restart node-backup timer - name: node-backup | job | enable timer ansible.builtin.systemd: - name: "node-backup.timer" + name: node-backup.timer state: started enabled: true daemon_reload: true diff --git a/roles/node_backup/tasks/main.yml b/roles/node_backup/tasks/main.yml index 38a617d..fa8ef5a 100644 --- a/roles/node_backup/tasks/main.yml +++ b/roles/node_backup/tasks/main.yml @@ -1,11 +1,10 @@ --- - - name: node-backup | tests ansible.builtin.include_tasks: file: tests.yml apply: - tags: ['node-backup', 'node-backup-tests'] - tags: ['node-backup', 'node-backup-tests'] + tags: [node-backup, node-backup-tests] + tags: [node-backup, node-backup-tests] - name: node-backup | create directories ansible.builtin.file: @@ -25,25 +24,25 @@ user: root - path: "{{ _node_backup_venv_path }}" user: root - tags: [ 'node-backup' ] + tags: [node-backup] - name: node-backup | requirements ansible.builtin.include_tasks: file: requirements.yml apply: - tags: [ 'node-backup', 'node-backup-requirements' ] - tags: [ 'node-backup', 'node-backup-requirements' ] + tags: [node-backup, node-backup-requirements] + tags: [node-backup, node-backup-requirements] - name: node-backup | job ansible.builtin.include_tasks: file: job.yml apply: - tags: [ 'node-backup', 'node-backup-job' ] - tags: [ 'node-backup', 'node-backup-job' ] + tags: [node-backup, node-backup-job] + tags: [node-backup, node-backup-job] - name: node-backup | exporter ansible.builtin.include_tasks: file: exporter.yml apply: - tags: [ 'node-backup', 'node-backup-exporter' ] - tags: [ 'node-backup', 'node-backup-exporter' ] + tags: [node-backup, node-backup-exporter] + tags: [node-backup, node-backup-exporter] diff --git a/roles/node_backup/tasks/requirements.yml b/roles/node_backup/tasks/requirements.yml index 9207623..1e93e2f 100644 --- a/roles/node_backup/tasks/requirements.yml +++ b/roles/node_backup/tasks/requirements.yml @@ -1,5 +1,4 @@ --- - - name: node-backup | requirements | install packages ansible.builtin.package: name: "{{ packages }}" @@ -7,42 +6,40 @@ update_cache: true vars: packages: - - "curl" - - "jq" - - "expect" - - "moreutils" - - "python3-venv" - - "python3-setuptools" - + - curl + - jq + - expect + - moreutils + - python3-venv + - python3-setuptools - name: node-backup | requirements | install Python modules ansible.builtin.pip: name: - - "prometheus-client==0.17.0" + - prometheus-client==0.17.0 virtualenv: "{{ _node_backup_venv_path }}" - virtualenv_command: "python3 -m venv" + virtualenv_command: python3 -m venv notify: restart node-backup exporter - name: node-backup | requirements | configure rclone + when: node_backup_targets | json_query('[].type') | intersect(_node_backup_rclone_types) | length > 0 block: + - name: node-backup | requirements | install rclone + ansible.builtin.apt: + deb: "{{ _node_backup_rclone_deb }}" - - name: node-backup | requirements | install rclone - ansible.builtin.apt: - deb: "{{ _node_backup_rclone_deb }}" + - name: node backup | requirements | create rclone config directory + ansible.builtin.file: + path: /root/.config/rclone + state: directory + mode: "0700" + owner: root + group: root - - name: node backup | requirements | create rclone config directory - ansible.builtin.file: - path: "/root/.config/rclone" - state: directory - mode: 0700 - owner: "root" - group: "root" - - - name: node-backup | requirements | copy R2 config - ansible.builtin.template: - src: "rclone/rclone.conf.j2" - dest: "/root/.config/rclone/rclone.conf" - owner: "root" - group: "root" - mode: 0600 - when: node_backup_targets | json_query('[].type') | intersect(_node_backup_rclone_types) | length > 0 + - name: node-backup | requirements | copy R2 config + ansible.builtin.template: + src: rclone/rclone.conf.j2 + dest: /root/.config/rclone/rclone.conf + owner: root + group: root + mode: "0600" diff --git a/roles/node_backup/tasks/tests.yml b/roles/node_backup/tasks/tests.yml index 167d119..d828dbb 100644 --- a/roles/node_backup/tasks/tests.yml +++ b/roles/node_backup/tasks/tests.yml @@ -1,26 +1,19 @@ --- - - name: node-backup | test | check R2 configuration ansible.builtin.fail: - msg: "If the R2 backups are used, 'node_backup_r2_access_key_id', 'node_backup_r2_secret_access_key' and 'node_backup_r2_api_url' variables have to be specified" - when: node_backup_targets | json_query('[].type') | intersect(_node_backup_r2_types) | length > 0 and - ( node_backup_r2_access_key_id == '' or - node_backup_r2_secret_access_key == '' or - node_backup_r2_api_url == '' - ) + msg: If the R2 backups are used, 'node_backup_r2_access_key_id', 'node_backup_r2_secret_access_key' and 'node_backup_r2_api_url' variables have to be specified + when: node_backup_targets | json_query('[].type') | intersect(_node_backup_r2_types) | length > 0 and ( node_backup_r2_access_key_id == '' or node_backup_r2_secret_access_key + == '' or node_backup_r2_api_url == '' ) - name: node-backup | test | check variables ansible.builtin.fail: msg: "'service_name', 'rpc_port', 'type' and 'bucket_name' fields have to be specified for each item in 'node_backup_targets'" - when: item.service_name == '' or - item.rpc_port == '' or - item.type == '' or - item.bucket_name == '' + when: item.service_name == '' or item.rpc_port == '' or item.type == '' or item.bucket_name == '' loop: "{{ node_backup_targets }}" - name: node-backup | test | check R2 backups ansible.builtin.fail: - msg: "the 'bucket_domain' field has to be specified for R2 backups" + msg: the 'bucket_domain' field has to be specified for R2 backups when: item.type in _node_backup_r2_types and item.bucket_domain == '' loop: "{{ node_backup_targets }}" diff --git a/roles/node_backup/vars/main.yml b/roles/node_backup/vars/main.yml index 6c7a881..dadc940 100644 --- a/roles/node_backup/vars/main.yml +++ b/roles/node_backup/vars/main.yml @@ -6,12 +6,12 @@ _node_backup_venv_path: "{{ node_backup_base_path }}/venv" _node_backup_exporter_path: "{{ node_backup_base_path }}/exporter" _node_backup_exporter_file: "{{ _node_backup_exporter_path }}/exporter.py" _node_backup_exporter_cache_file: "{{ _node_backup_exporter_path }}/exporter.cache" -_node_backup_rclone_deb: "https://downloads.rclone.org/v1.63.1/rclone-v1.63.1-linux-amd64.deb" +_node_backup_rclone_deb: https://downloads.rclone.org/v1.63.1/rclone-v1.63.1-linux-amd64.deb -_node_backup_r2_types: ["r2-rclone"] -_node_backup_gcp_types: ["gcp-native", "gcp-rclone"] -_node_backup_rclone_types: ["gcp-rclone", "r2-rclone"] +_node_backup_r2_types: [r2-rclone] +_node_backup_gcp_types: [gcp-native, gcp-rclone] +_node_backup_rclone_types: [gcp-rclone, r2-rclone] _node_backup_storages: r2-rclone: r2 gcp-rclone: gcp - gcp-native: gcp \ No newline at end of file + gcp-native: gcp diff --git a/roles/secure_apt/.ansible-lint b/roles/secure_apt/.ansible-lint new file mode 100644 index 0000000..c175a9a --- /dev/null +++ b/roles/secure_apt/.ansible-lint @@ -0,0 +1,3 @@ +--- +skip_list: + - name[casing] diff --git a/roles/secure_apt/defaults/main.yml b/roles/secure_apt/defaults/main.yml index 58e678e..c02fb75 100644 --- a/roles/secure_apt/defaults/main.yml +++ b/roles/secure_apt/defaults/main.yml @@ -4,5 +4,4 @@ secure_apt_keyserver: keyserver.ubuntu.com secure_apt_key: "" secure_apt_repositories: [] - secure_apt_update_cache: true diff --git a/roles/secure_apt/tasks/main.yml b/roles/secure_apt/tasks/main.yml index 6a42224..9ebcef0 100644 --- a/roles/secure_apt/tasks/main.yml +++ b/roles/secure_apt/tasks/main.yml @@ -1,10 +1,9 @@ --- - - name: Create custom keyring directory ansible.builtin.file: path: "{{ _secure_apt_keyring_folder }}" state: directory - mode: 0755 + mode: "0755" - name: Add APT key ansible.builtin.apt_key: @@ -18,12 +17,12 @@ - name: Set restrictive permissions for key file ansible.builtin.file: path: "{{ _secure_apt_keyring_folder }}/{{ secure_apt_key }}.gpg" - mode: 0444 + mode: "0444" # Causes a fatal error in check mode ignore_errors: "{{ ansible_check_mode }}" - name: Add APT repository ansible.builtin.apt_repository: - repo: "deb [arch=amd64 signed-by={{ _secure_apt_keyring_folder }}/{{ secure_apt_key }}.gpg] {{ item }}" + repo: deb [arch=amd64 signed-by={{ _secure_apt_keyring_folder }}/{{ secure_apt_key }}.gpg] {{ item }} update_cache: "{{ secure_apt_update_cache }}" loop: "{{ secure_apt_repositories }}" diff --git a/roles/state_exporter/.ansible-lint b/roles/state_exporter/.ansible-lint new file mode 100644 index 0000000..c175a9a --- /dev/null +++ b/roles/state_exporter/.ansible-lint @@ -0,0 +1,3 @@ +--- +skip_list: + - name[casing] diff --git a/roles/state_exporter/defaults/main.yml b/roles/state_exporter/defaults/main.yml index ba4a080..c44ae51 100644 --- a/roles/state_exporter/defaults/main.yml +++ b/roles/state_exporter/defaults/main.yml @@ -1,6 +1,6 @@ --- -state_exporter_name: "state-exporter" -state_exporter_user: "parity" -state_exporter_file: "/home/{{ state_exporter_user }}/bin/{{ state_exporter_name }}.py" +state_exporter_name: state-exporter +state_exporter_user: parity +state_exporter_file: /home/{{ state_exporter_user }}/bin/{{ state_exporter_name }}.py state_exporter_debug: false diff --git a/roles/state_exporter/handlers/main.yml b/roles/state_exporter/handlers/main.yml index 199da3e..5af760a 100644 --- a/roles/state_exporter/handlers/main.yml +++ b/roles/state_exporter/handlers/main.yml @@ -1,5 +1,4 @@ --- - - name: restart state-exporter ansible.builtin.systemd: name: "{{ state_exporter_name }}" diff --git a/roles/state_exporter/tasks/main.yml b/roles/state_exporter/tasks/main.yml index 968a8a1..476e994 100644 --- a/roles/state_exporter/tasks/main.yml +++ b/roles/state_exporter/tasks/main.yml @@ -1,53 +1,51 @@ --- - -- block: - - - name: Exporter | Install apt packages +- name: state_exporter + tags: [state-exporter] + block: + - name: state_exporter | Install apt packages ansible.builtin.package: name: "{{ packages }}" state: present update_cache: true vars: packages: - - "python3-prometheus-client" - - "python3-schedule" - - "python3-psutil" + - python3-prometheus-client + - python3-schedule + - python3-psutil - - name: Exporter | Create directory + - name: state_exporter | Create directory ansible.builtin.file: path: "{{ state_exporter_file | dirname }}" state: directory - mode: 0755 + mode: "0755" owner: "{{ state_exporter_user }}" group: "{{ state_exporter_user }}" - - name: Exporter | Copy exporter + - name: state_exporter | Copy exporter ansible.builtin.copy: - src: "exporter.py" + src: exporter.py dest: "{{ state_exporter_file }}" - mode: 0755 + mode: "0755" owner: "{{ state_exporter_user }}" group: "{{ state_exporter_user }}" notify: restart state-exporter - - name: Exporter | Copy exporter systemd unit file + - name: state_exporter | Copy exporter systemd unit file ansible.builtin.template: - src: ".service.j2" - dest: "/etc/systemd/system/{{ state_exporter_name }}.service" - owner: "root" - group: "root" + src: .service.j2 + dest: /etc/systemd/system/{{ state_exporter_name }}.service + owner: root + group: root mode: "0600" notify: restart state-exporter - # to avoid 2 restarts during the first deploy - - name: Exporter | Flush handlers + # to avoid 2 restarts during the first deploy + - name: state_exporter | Flush handlers ansible.builtin.meta: flush_handlers - - name: Exporter | Start exporter service + - name: state_exporter | Start exporter service ansible.builtin.systemd: name: "{{ state_exporter_name }}" state: started enabled: true daemon_reload: true - - tags: ['state-exporter'] diff --git a/roles/ws_health_exporter/.ansible-lint b/roles/ws_health_exporter/.ansible-lint new file mode 100644 index 0000000..c175a9a --- /dev/null +++ b/roles/ws_health_exporter/.ansible-lint @@ -0,0 +1,3 @@ +--- +skip_list: + - name[casing] diff --git a/roles/ws_health_exporter/defaults/main.yml b/roles/ws_health_exporter/defaults/main.yml index 63a4b23..ee432ca 100644 --- a/roles/ws_health_exporter/defaults/main.yml +++ b/roles/ws_health_exporter/defaults/main.yml @@ -1,19 +1,19 @@ --- -ws_health_exporter_url: "https://raw.githubusercontent.com/paritytech/scripts/0a2e6e9ba37799de60949acc5d7b4a0e875ae732/dockerfiles/ws-health-exporter/exporter.py" -ws_health_exporter_base_path: "/opt/{{ _ws_health_exporter_name }}" +ws_health_exporter_url: https://raw.githubusercontent.com/paritytech/scripts/0a2e6e9ba37799de60949acc5d7b4a0e875ae732/dockerfiles/ws-health-exporter/exporter.py +ws_health_exporter_base_path: /opt/{{ _ws_health_exporter_name }} # user has to be created by the role user -ws_health_exporter_user: "polkadot" +ws_health_exporter_user: polkadot # you can find more details here # https://github.com/paritytech/scripts/blob/master/dockerfiles/ws-health-exporter/README.md ws_health_exporter_host: "0.0.0.0" ws_health_exporter_port: 8001 -ws_health_exporter_log_level: "INFO" +ws_health_exporter_log_level: INFO ws_health_exporter_ws_check_interval: 10 ws_health_exporter_ws_timeout: 60 ws_health_exporter_node_max_unsynchronized_block_drift: 0 ws_health_exporter_node_min_peers: 10 ws_health_exporter_ws_urls: - - "ws://127.0.0.1:9944" + - ws://127.0.0.1:9944 diff --git a/roles/ws_health_exporter/handlers/main.yml b/roles/ws_health_exporter/handlers/main.yml index 187469f..15e39bb 100644 --- a/roles/ws_health_exporter/handlers/main.yml +++ b/roles/ws_health_exporter/handlers/main.yml @@ -1,8 +1,7 @@ --- - - name: restart ws-health-exporter ansible.builtin.systemd: name: "{{ _ws_health_exporter_name }}" state: restarted - enabled: yes - daemon_reload: yes + enabled: true + daemon_reload: true diff --git a/roles/ws_health_exporter/molecule/default/converge.yml b/roles/ws_health_exporter/molecule/default/converge.yml index ab56182..8d0c50a 100644 --- a/roles/ws_health_exporter/molecule/default/converge.yml +++ b/roles/ws_health_exporter/molecule/default/converge.yml @@ -3,6 +3,6 @@ hosts: all gather_facts: false tasks: - - name: "Include ws_health_exporter" + - name: Include ws_health_exporter ansible.builtin.include_role: - name: "ws_health_exporter" + name: ws_health_exporter diff --git a/roles/ws_health_exporter/molecule/default/group_vars/all.yml b/roles/ws_health_exporter/molecule/default/group_vars/all.yml index 41c7f32..d2ea45c 100644 --- a/roles/ws_health_exporter/molecule/default/group_vars/all.yml +++ b/roles/ws_health_exporter/molecule/default/group_vars/all.yml @@ -1,22 +1,23 @@ +--- ## Molecule ansible_user: root ## node -node_binary_version: "v0.9.42" -node_user: "polkadot" -node_binary: "https://github.com/paritytech/polkadot/releases/download/{{ node_binary_version }}/polkadot" +node_binary_version: v0.9.42 +node_user: polkadot +node_binary: https://github.com/paritytech/polkadot/releases/download/{{ node_binary_version }}/polkadot node_chain: rococo-local -node_chain_backup_restoring_type: "none" -node_parachain_chain_backup_restoring_type: "none" -node_ansible_annotation_path: "/tmp/substrate.prom" +node_chain_backup_restoring_type: none +node_parachain_chain_backup_restoring_type: none +node_ansible_annotation_path: /tmp/substrate.prom ## ws_health_exporter ws_health_exporter_user: "{{ node_user }}" -ws_health_exporter_log_level: "DEBUG" +ws_health_exporter_log_level: DEBUG ws_health_exporter_port: 8001 ws_health_exporter_node_max_unsynchronized_block_drift: 2 ws_health_exporter_node_min_peers: 1 ws_health_exporter_ws_urls: - - "ws://127.0.0.1:9944" - - "ws://127.0.0.1:9945" + - ws://127.0.0.1:9944 + - ws://127.0.0.1:9945 diff --git a/roles/ws_health_exporter/molecule/default/molecule.yml b/roles/ws_health_exporter/molecule/default/molecule.yml index 5f36ff9..adc3f6c 100644 --- a/roles/ws_health_exporter/molecule/default/molecule.yml +++ b/roles/ws_health_exporter/molecule/default/molecule.yml @@ -9,7 +9,7 @@ platforms: source: alias: debian/bullseye/amd64 # DOCKER - image: "paritytech/debian11:latest" + image: paritytech/debian11:latest command: ${MOLECULE_DOCKER_COMMAND:-""} # need this for systemctl to work in Docker privileged: true @@ -19,11 +19,11 @@ platforms: provisioner: name: ansible options: - D: True + D: true config_options: defaults: callbacks_enabled: timer verifier: name: ansible options: - D: True + D: true diff --git a/roles/ws_health_exporter/molecule/default/prepare.yml b/roles/ws_health_exporter/molecule/default/prepare.yml index 80a18fb..f879773 100644 --- a/roles/ws_health_exporter/molecule/default/prepare.yml +++ b/roles/ws_health_exporter/molecule/default/prepare.yml @@ -1,3 +1,4 @@ +--- - name: Prepare hosts: all gather_facts: false @@ -6,27 +7,27 @@ ansible.builtin.raw: apt -y update && apt install -y python3 changed_when: false tasks: - - name: "Include node alice" + - name: Include node alice # use include role to skip ansible-lint ansible.builtin.include_role: name: node vars: node_app_name: rococo-alice - node_data_root_path: "/opt/{{ node_app_name }}" + node_data_root_path: /opt/{{ node_app_name }} node_custom_options: - - "--alice" + - --alice node_p2p_port: "30333" node_prometheus_port: "9615" node_rpc_port: "9933" node_rpc_ws_port: "9944" - - name: "Include node bob" + - name: Include node bob ansible.builtin.include_role: name: node vars: node_app_name: rococo-bob - node_data_root_path: "/opt/{{ node_app_name }}" + node_data_root_path: /opt/{{ node_app_name }} node_custom_options: - - "--bob" + - --bob node_p2p_port: "30334" node_prometheus_port: "9616" node_rpc_port: "9934" diff --git a/roles/ws_health_exporter/molecule/default/verify.yml b/roles/ws_health_exporter/molecule/default/verify.yml index d6ace21..d01c460 100644 --- a/roles/ws_health_exporter/molecule/default/verify.yml +++ b/roles/ws_health_exporter/molecule/default/verify.yml @@ -3,32 +3,32 @@ hosts: all gather_facts: true tasks: - - name: Collect service facts - ansible.builtin.service_facts: + - name: Collect service facts + ansible.builtin.service_facts: - - name: print service facts - ansible.builtin.debug: - var: ansible_facts.services[item+'.service'] - loop: - - "rococo-alice" - - "rococo-bob" + - name: print service facts + ansible.builtin.debug: + var: ansible_facts.services[item+'.service'] + loop: + - rococo-alice + - rococo-bob - - name: check service - ansible.builtin.assert: - that: ansible_facts.services[item+'.service'].state == 'running' - loop: - - "rococo-alice" - - "rococo-bob" + - name: check service + ansible.builtin.assert: + that: ansible_facts.services[item+'.service'].state == 'running' + loop: + - rococo-alice + - rococo-bob - - name: check ws health exporter - ansible.builtin.uri: - url: "http://127.0.0.1:{{ ws_health_exporter_port }}/health/readiness" - use_proxy: false - register: _ws_health_exporter - until: _ws_health_exporter.status == 200 - retries: 10 # 10 * 5 seconds = 50 sec - delay: 5 + - name: check ws health exporter + ansible.builtin.uri: + url: http://127.0.0.1:{{ ws_health_exporter_port }}/health/readiness + use_proxy: false + register: _ws_health_exporter + until: _ws_health_exporter.status == 200 + retries: 10 # 10 * 5 seconds = 50 sec + delay: 5 - - name: Print service facts - ansible.builtin.debug: - var: _ws_health_exporter + - name: Print service facts + ansible.builtin.debug: + var: _ws_health_exporter diff --git a/roles/ws_health_exporter/tasks/main.yml b/roles/ws_health_exporter/tasks/main.yml index 11895c4..4b3f91e 100644 --- a/roles/ws_health_exporter/tasks/main.yml +++ b/roles/ws_health_exporter/tasks/main.yml @@ -1,73 +1,71 @@ --- +- name: ws_health_exporter + tags: [ws_health_exporter, ws-health-exporter] + block: + - name: ws_health_exporter | install apt packages + ansible.builtin.package: + name: "{{ packages }}" + state: present + update_cache: true + vars: + packages: + - python3-venv + - python3-setuptools -- block: + - name: ws_health_exporter | create base directory + ansible.builtin.file: + path: "{{ ws_health_exporter_base_path }}" + state: directory + mode: "0755" + owner: root + group: root - - name: ws_health_exporter | install apt packages - ansible.builtin.package: - name: "{{ packages }}" - state: present - update_cache: yes - vars: - packages: - - "python3-venv" - - "python3-setuptools" + - name: ws_health_exporter | download exporter file + ansible.builtin.get_url: + url: "{{ ws_health_exporter_url }}" + dest: "{{ _ws_health_exporter_file }}" + mode: "0755" + owner: root + group: root + timeout: 30 + notify: restart ws-health-exporter - - name: ws_health_exporter | create base directory - ansible.builtin.file: - path: "{{ ws_health_exporter_base_path }}" - state: directory - mode: 0755 - owner: "root" - group: "root" + - name: ws_health_exporter | install Python modules + ansible.builtin.pip: + name: + - prometheus-client==0.16.0 + - websocket-client==1.5.1 + - apscheduler==3.10.1 + - flask==3.0.0 + - environs==9.5.0 + - waitress==2.1.2 + virtualenv: "{{ _ws_health_exporter_venv }}" + virtualenv_command: python3 -m venv - - name: ws_health_exporter | download exporter file - ansible.builtin.get_url: - url: "{{ ws_health_exporter_url }}" - dest: "{{ _ws_health_exporter_file }}" - mode: 0755 - owner: "root" - group: "root" - timeout: 30 - notify: restart ws-health-exporter + - name: ws_health_exporter | set root as owner of the venv directory + ansible.builtin.file: + path: "{{ _ws_health_exporter_venv }}" + state: directory + recurse: true + owner: root + group: root - - name: ws_health_exporter | install Python modules - ansible.builtin.pip: - name: - - "prometheus-client==0.16.0" - - "websocket-client==1.5.1" - - "apscheduler==3.10.1" - - "flask==3.0.0" - - "environs==9.5.0" - - "waitress==2.1.2" - virtualenv: "{{ _ws_health_exporter_venv }}" - virtualenv_command: "python3 -m venv" - - - name: ws_health_exporter | set root as owner of the venv directory - ansible.builtin.file: - path: "{{ _ws_health_exporter_venv }}" - state: directory - recurse: true - owner: "root" - group: "root" - - - name: ws_health_exporter | copy exporter systemd unit file - ansible.builtin.template: - src: ".service.j2" - dest: "/etc/systemd/system/{{ _ws_health_exporter_name }}.service" - owner: "root" - group: "root" - mode: "0600" - notify: restart ws-health-exporter + - name: ws_health_exporter | copy exporter systemd unit file + ansible.builtin.template: + src: .service.j2 + dest: /etc/systemd/system/{{ _ws_health_exporter_name }}.service + owner: root + group: root + mode: "0600" + notify: restart ws-health-exporter # to avoid 2 restarts during the first deploy - - name: ws_health_exporter | Flush handlers - ansible.builtin.meta: flush_handlers - - - name: ws_health_exporter | start exporter service - ansible.builtin.systemd: - name: "{{ _ws_health_exporter_name }}" - state: started - enabled: yes - daemon_reload: yes + - name: ws_health_exporter | Flush handlers + ansible.builtin.meta: flush_handlers - tags: ['ws_health_exporter', 'ws-health-exporter'] + - name: ws_health_exporter | start exporter service + ansible.builtin.systemd: + name: "{{ _ws_health_exporter_name }}" + state: started + enabled: true + daemon_reload: true diff --git a/roles/ws_health_exporter/vars/main.yml b/roles/ws_health_exporter/vars/main.yml index 4afb0a4..727f201 100644 --- a/roles/ws_health_exporter/vars/main.yml +++ b/roles/ws_health_exporter/vars/main.yml @@ -1,5 +1,5 @@ --- -_ws_health_exporter_name: "ws-health-exporter" +_ws_health_exporter_name: ws-health-exporter _ws_health_exporter_file: "{{ ws_health_exporter_base_path }}/exporter.py" _ws_health_exporter_venv: "{{ ws_health_exporter_base_path }}/venv"