diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 10669eeba..4b109e985 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -42,7 +42,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v1 + uses: github/codeql-action/init@v2 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -53,7 +53,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v1 + uses: github/codeql-action/autobuild@v2 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -67,4 +67,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v1 + uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/test-install.yml b/.github/workflows/test-install.yml index 6027c87aa..9eb0b10d4 100644 --- a/.github/workflows/test-install.yml +++ b/.github/workflows/test-install.yml @@ -36,7 +36,8 @@ jobs: steps: - uses: actions/checkout@v3 - name: do test install in case of merged pull request - run: cd /home/runner/work/firewall-orchestrator/firewall-orchestrator && ansible-playbook -e run_on_github=yes --skip-tags test site.yml -K + run: cd /home/runner/work/firewall-orchestrator/firewall-orchestrator && ansible-playbook -e run_on_github=yes site.yml -K +# run: cd /home/runner/work/firewall-orchestrator/firewall-orchestrator && ansible-playbook -e run_on_github=yes --skip-tags test site.yml -K # test_ubuntu_22: # name: test build on ubuntu_22 @@ -44,4 +45,5 @@ jobs: # steps: # - uses: actions/checkout@v3 # - name: do test install in case of merged pull request - # run: cd /home/runner/work/firewall-orchestrator/firewall-orchestrator && ansible-playbook -e run_on_github=yes --skip-tags test site.yml -K + # run: cd /home/runner/work/firewall-orchestrator/firewall-orchestrator && ansible-playbook -e run_on_github=yes site.yml -K + # run: cd /home/runner/work/firewall-orchestrator/firewall-orchestrator && ansible-playbook -e run_on_github=yes --skip-tags test site.yml -K diff --git a/README.md b/README.md index c09e06a76..3b298e7d1 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,11 @@ - Regularly re-certify firewall rules to clean up your rulebase - Use the built-in GraphQL API to integrate with your existing infrastructure (Directory Service, ITSM, IPAM, ...) -Demo: if you want to see what it looks like in advance, visit (user: test, password: drive2). +Reporting Demo: +![fwo-demo-reporting-vsmall](https://github.com/CactuseSecurity/firewall-orchestrator/assets/19877770/f9ffe37f-b059-44cf-b056-30a8f3e008a6) + + +Further Demo: if you want to see what it looks like in advance, visit (user: test, password: drive2). ## Installation instructions diff --git a/ansible.cfg b/ansible.cfg index ef5ce7703..4da73e6e4 100644 --- a/ansible.cfg +++ b/ansible.cfg @@ -5,8 +5,8 @@ inventory = inventory force_handlers = True stdout_callback = yaml -gathering = smart -gather_subset = !hardware,!facter,!ohai +# gathering = smart +# gather_subset = !hardware,!facter,!ohai ansible_conditional_bare_vars=false diff --git a/documentation/installer/basic-installation.md b/documentation/installer/basic-installation.md index 86503d9e9..9caefdf42 100644 --- a/documentation/installer/basic-installation.md +++ b/documentation/installer/basic-installation.md @@ -28,11 +28,15 @@ possibly followed by a reboot. git clone https://github.com/CactuseSecurity/firewall-orchestrator.git ``` -3) Operating specific ansible adjustments - - Ubuntu 18.04, Debian 10: install latest ansible before firewall orchestrator installation: +3) Ansible Installation + - Ubuntu 18.04, Debian 10 only: install latest ansible before firewall orchestrator installation cd firewall-orchestrator; ansible-playbook scripts/install-latest-ansible.yml -K + - All platforms: install galaxy collections + + ansible-galaxy collection install community.postgresql + 4) install (on localhost) ```console diff --git a/documentation/revision-history-develop.md b/documentation/revision-history-develop.md new file mode 100644 index 000000000..fb12ae013 --- /dev/null +++ b/documentation/revision-history-develop.md @@ -0,0 +1,80 @@ +# Firewall Orchestrator Revision History for DEVELOP branch only + +pre-5, a product called IT Security Organizer and was closed source. It was developed starting in 2005. +In 2020 we decided to re-launch a new + +### 6.1.0 - 16.11.2022 DEVELOP +- interactive network analysis prototype in UI +- integrate path analysis to workflow + +### 6.1.1 - 15.12.2022 DEVELOP +- recertification on owner base +- preparation of new task types + +### 6.1.2 - 20.12.2022 DEVELOP +- start of Palo Alto import module + +### 6.1.3 - xx.01.2023 DEVELOP +- enhance recertification + +### 6.1.4 - 27.01.2023 DEVELOP +- prepare delete rule requests + +### 6.2.2 22.03.2023 DEVELOP +- adding last hit of each rule for check point and FortiManager to recertification (report) + +### 6.3.3 09.05.2023 DEVELOP +- new importer module for importing FortiGate directly via FortiOS REST API + +### 6.4.4 19.06.2023 DEVELOP +- CPR8x importer: basic support for inline layers + +### 6.4.5 22.06.2023 DEVELOP +- Fortigate API importer: hotfix NAT rules +- upgrade to hasura API 2.28.0 + +### 6.4.6 23.06.2023 DEVELOP +- new email notification on import changes + +### 6.4.7 26.06.2023 DEVELOP +- hotfix fortiOS importer NAT IP addresses +- fixing issue during ubuntu OS upgrade with ldap +- unifying all buttons in UI + +### 6.4.8 29.06.2023 DEVELOP +- hotfix fortiOS importer: replacing ambiguous import statement + +### 6.4.9 03.07.2023 DEVELOP +- fix sample group role path + +### 6.4.10 07.07.2023 DEVELOP +- fixes in importer change mail notification for encrypted mails +- fixes for report links to objects +- fix template name display issue +- fix UI visibility for fw-admin role (multiple pages) +- UI login page: allow enter as submit +- UI reporting: filter objects in rule report +- adding demo video in github README.MD + +### 6.4.11 10.07.2023 DEVELOP +- bugfix in importer change mail notification for missing mail server config + +### 6.4.12 14.07.2023 DEVELOP +- UI settings: hotfix email port (default 25) was not written to config before +- splitting revision history into develop and main +- installer: supress csharp test results on success + +### 6.4.13 20.07.2023 DEVELOP +- re-login now also with enter key +- fixing help pages (email & importer settings, archive, scheduling) [#2162](https://github.com/CactuseSecurity/firewall-orchestrator/issues/2162) + +### 6.5.0 24.07.2023 DEVELOP +- UI: adding compliance matrix module +- UI: fix browser session persistence causing subscriptions to remain open after user logout; now api connection and web socket are disposed on logout +- API: removing obsolete graphql query repos +- API: upgrading hasura api to 2.30.0 +- installer: replacing deprecated path_to_script option in postgresql_query + +### 6.5.1 24.07.2023 DEVELOP +- New report type Unused Rules + diff --git a/documentation/revision-history.md b/documentation/revision-history-main.md similarity index 90% rename from documentation/revision-history.md rename to documentation/revision-history-main.md index df4adf9e4..b16e04c6e 100644 --- a/documentation/revision-history.md +++ b/documentation/revision-history-main.md @@ -1,4 +1,4 @@ -# Firewall Orchestrator Revision History +# Firewall Orchestrator Revision History MAIN branch pre-5, a product called IT Security Organizer and was closed source. It was developed starting in 2005. In 2020 we decided to re-launch a new @@ -253,23 +253,6 @@ adding report template format fk and permissions ### 6.0.2 - 24.12.2022 - bugfix release with hasura API upgrade due to security bug in hasura -### 6.1.0 - 16.11.2022 DEVELOP -- interactive network analysis prototype in UI -- integrate path analysis to workflow - -### 6.1.1 - 15.12.2022 DEVELOP -- recertification on owner base -- preparation of new task types - -### 6.1.2 - 20.12.2022 DEVELOP -- start of Palo Alto import module - -### 6.1.3 - xx.01.2023 DEVELOP -- enhance recertification - -### 6.1.4 - 27.01.2023 DEVELOP -- prepare delete rule requests - ### 6.2 - 16.03.2023 MAIN - enhanced recertification module: adding ip-base recertification - adding import modules for Palo Alto and Azure Firewall @@ -280,9 +263,6 @@ adding report template format fk and permissions - reduced logging in release mode - hasura v2.21.0 upgrade -### 6.2.2 22.03.2023 DEVELOP -- adding last hit of each rule for check point and FortiManager to recertification (report) - ### 6.3 24.04.2023 MAIN - adding CP R8X object types - application categories @@ -297,9 +277,6 @@ adding report template format fk and permissions - checkpoint R8X importer adding support for Internet object type - reporting - CSV export for change report -### 6.3.3 09.05.2023 DEVELOP -- new importer module for importing FortiGate directly via FortiOS REST API - ### 6.4 25.05.2023 MAIN - New importer module for importing FortiGate directly via FortiOS REST API - Reporting: new lean export format JSON for resolved and tech reports @@ -315,5 +292,22 @@ adding report template format fk and permissions ### 6.4.3 05.06.2023 MAIN - Hotfix - global config subsription timout after 12h -### 6.4.4 xx.06.2023 DEVELOP -- CPR8x importer: basic support for inline layers +### 7.0 26.07.2023 MAIN +- new features + - UI adding compliance matrix module + - UI Reporting - unused rules report including delete ticket integration + - importer new email notification on security relevant import changes + - importer CPR8x: basic support for importing inline layers + +- maintenance / bug-fixing + - API: upgrading hasura api to 2.30.1 + - importer Fortigate API: hotfix NAT rules + - UI: cleanup around buttons and logout session handling + - UI Reporting: fixes links to objects, template name display, UI visibility for fw-admin role (multiple pages) + - UI (re-)login: allow enter as submit + - UI reporting: filter objects properly in rule report + - UI updating help pages: email & importer settings, archive, scheduling) + - installer: supress csharp test results on success + - demo data: fix sample group role path + - adding demo video in github README.MD + - splitting revision history into develop and main diff --git a/inventory/group_vars/all.yml b/inventory/group_vars/all.yml index f5856a8e2..8e349757b 100644 --- a/inventory/group_vars/all.yml +++ b/inventory/group_vars/all.yml @@ -1,5 +1,5 @@ ### general settings -product_version: "6.4.3" +product_version: "7.0" ansible_user: "{{ lookup('env', 'USER') }}" ansible_become_method: sudo ansible_python_interpreter: /usr/bin/python3 @@ -40,6 +40,7 @@ fworch_secrets_dir: "{{ fworch_conf_dir }}/secrets" # setting default proxy (may be overwritten via --extra-vars) http_proxy: "{{ lookup('env','http_proxy') }}" https_proxy: "{{ lookup('env','https_proxy') }}" +no_proxy: "{{ lookup('env','no_proxy') }}" proxy_exceptions: "{{ lookup('env','no_proxy') }}" proxy_env: http_proxy: "{{ http_proxy }}" @@ -53,8 +54,9 @@ http_proxy_import_parameter: "" # use the following syntax for authenticated proxy access: # http_proxy=http://USERNAME:PASSWORD@1.2.3.4:8080/ - -debian_testing_version: "11" +# OS +debian_testing_version: "12" +debian_testing_release_name: trixie arch: x86_64 redhat_major_version: "8" redhat_arch: "{{ redhat_major_version }}-{{ arch }}" diff --git a/inventory/group_vars/apiserver.yml b/inventory/group_vars/apiserver.yml index 36b473d53..1dae9d01b 100644 --- a/inventory/group_vars/apiserver.yml +++ b/inventory/group_vars/apiserver.yml @@ -8,7 +8,7 @@ api_hasura_admin_test_password: "not4production" api_user_email: "{{ api_user }}@{{ api_network_listening_ip_address }}" api_home: "{{ fworch_home }}/api" api_hasura_cli_bin: "{{ fworch_home }}/api/bin/hasura" -api_hasura_version: "v2.26.0" +api_hasura_version: "v2.30.1" api_project_name: api api_no_metadata: false api_rollback_is_running: false diff --git a/inventory/group_vars/cloud.yml b/inventory/group_vars/cloud.yml new file mode 100644 index 000000000..745bbc536 --- /dev/null +++ b/inventory/group_vars/cloud.yml @@ -0,0 +1,12 @@ +################## cloud ########################### + +cloud_vm_name: fworch-vm1 +cloud_admin_name: cadmin +# cloud_admin_ssh_public_key: "" +cloud_network: "10.5.0.0/16" +cloud_subnet: "10.5.1.0/24" +cloud_location: northcentral +cloud_image_publisher: canonical +cloud_image_sku: "20_04-lts" +cloud_vm_size: "Standard_B2s" +cloud_resource_group: "fworch_rg" diff --git a/inventory/group_vars/databaseserver.yml b/inventory/group_vars/databaseserver.yml index c0e91e77d..3f57662ae 100644 --- a/inventory/group_vars/databaseserver.yml +++ b/inventory/group_vars/databaseserver.yml @@ -3,7 +3,7 @@ postgresql_package: postgresql postgresql_test_package: pgtap postgresql_c_client_library_header_files: libpq-dev postgresql_dev_package_prefix: postgresql-server-dev -postgresql_query_as_single_query: no +postgresql_query_as_single_query: false database_install_dir: "{{ fworch_home }}/database" # table_space variable can be used to create database in another place where there is enough space diff --git a/roles/api/files/replace_metadata.json b/roles/api/files/replace_metadata.json index 72d98431e..791ca896a 100644 --- a/roles/api/files/replace_metadata.json +++ b/roles/api/files/replace_metadata.json @@ -9,6 +9,839 @@ "name": "default", "kind": "postgres", "tables": [ + { + "table": { + "name": "ip_range", + "schema": "compliance" + }, + "object_relationships": [ + { + "name": "network_zone", + "using": { + "foreign_key_constraint_on": "network_zone_id" + } + } + ], + "insert_permissions": [ + { + "role": "fw-admin", + "permission": { + "check": {}, + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ] + } + }, + { + "role": "importer", + "permission": { + "check": {}, + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ] + } + }, + { + "role": "middleware-server", + "permission": { + "check": {}, + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ] + } + } + ], + "select_permissions": [ + { + "role": "approver", + "permission": { + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "auditor", + "permission": { + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "fw-admin", + "permission": { + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "implementer", + "permission": { + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "importer", + "permission": { + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "middleware-server", + "permission": { + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "planner", + "permission": { + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "recertifier", + "permission": { + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "reporter", + "permission": { + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "reporter-viewall", + "permission": { + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "requester", + "permission": { + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "reviewer", + "permission": { + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ], + "filter": {}, + "allow_aggregations": true + } + } + ], + "update_permissions": [ + { + "role": "fw-admin", + "permission": { + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ], + "filter": {}, + "check": {} + } + }, + { + "role": "importer", + "permission": { + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ], + "filter": {}, + "check": {} + } + }, + { + "role": "middleware-server", + "permission": { + "columns": [ + "network_zone_id", + "ip_range_end", + "ip_range_start" + ], + "filter": {}, + "check": {} + } + } + ], + "delete_permissions": [ + { + "role": "fw-admin", + "permission": { + "filter": {} + } + }, + { + "role": "importer", + "permission": { + "filter": {} + } + }, + { + "role": "middleware-server", + "permission": { + "filter": {} + } + } + ] + }, + { + "table": { + "name": "network_zone", + "schema": "compliance" + }, + "object_relationships": [ + { + "name": "super_network_zone", + "using": { + "foreign_key_constraint_on": "super_network_zone_id" + } + } + ], + "array_relationships": [ + { + "name": "ip_ranges", + "using": { + "foreign_key_constraint_on": { + "column": "network_zone_id", + "table": { + "name": "ip_range", + "schema": "compliance" + } + } + } + }, + { + "name": "network_zone_communication_destinations", + "using": { + "foreign_key_constraint_on": { + "column": "from_network_zone_id", + "table": { + "name": "network_zone_communication", + "schema": "compliance" + } + } + } + }, + { + "name": "network_zone_communication_sources", + "using": { + "foreign_key_constraint_on": { + "column": "to_network_zone_id", + "table": { + "name": "network_zone_communication", + "schema": "compliance" + } + } + } + }, + { + "name": "sub_network_zones", + "using": { + "foreign_key_constraint_on": { + "column": "super_network_zone_id", + "table": { + "name": "network_zone", + "schema": "compliance" + } + } + } + } + ], + "insert_permissions": [ + { + "role": "fw-admin", + "permission": { + "check": {}, + "columns": [ + "id", + "owner_id", + "super_network_zone_id", + "description", + "name" + ] + } + }, + { + "role": "importer", + "permission": { + "check": {}, + "columns": [ + "id", + "owner_id", + "super_network_zone_id", + "description", + "name" + ] + } + }, + { + "role": "middleware-server", + "permission": { + "check": {}, + "columns": [ + "id", + "owner_id", + "super_network_zone_id", + "description", + "name" + ] + } + } + ], + "select_permissions": [ + { + "role": "approver", + "permission": { + "columns": [ + "id", + "name", + "description", + "super_network_zone_id", + "owner_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "auditor", + "permission": { + "columns": [ + "id", + "name", + "description", + "super_network_zone_id", + "owner_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "fw-admin", + "permission": { + "columns": [ + "id", + "name", + "description", + "super_network_zone_id", + "owner_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "implementer", + "permission": { + "columns": [ + "id", + "name", + "description", + "super_network_zone_id", + "owner_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "importer", + "permission": { + "columns": [ + "id", + "name", + "description", + "super_network_zone_id", + "owner_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "middleware-server", + "permission": { + "columns": [ + "id", + "name", + "description", + "super_network_zone_id", + "owner_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "planner", + "permission": { + "columns": [ + "id", + "name", + "description", + "super_network_zone_id", + "owner_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "recertifier", + "permission": { + "columns": [ + "id", + "name", + "description", + "super_network_zone_id", + "owner_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "reporter", + "permission": { + "columns": [ + "id", + "name", + "description", + "super_network_zone_id", + "owner_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "reporter-viewall", + "permission": { + "columns": [ + "id", + "name", + "description", + "super_network_zone_id", + "owner_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "requester", + "permission": { + "columns": [ + "id", + "name", + "description", + "super_network_zone_id", + "owner_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "reviewer", + "permission": { + "columns": [ + "id", + "name", + "description", + "super_network_zone_id", + "owner_id" + ], + "filter": {}, + "allow_aggregations": true + } + } + ], + "update_permissions": [ + { + "role": "fw-admin", + "permission": { + "columns": [ + "id", + "owner_id", + "super_network_zone_id", + "description", + "name" + ], + "filter": {}, + "check": {} + } + }, + { + "role": "importer", + "permission": { + "columns": [ + "id", + "owner_id", + "super_network_zone_id", + "description", + "name" + ], + "filter": {}, + "check": {} + } + }, + { + "role": "middleware-server", + "permission": { + "columns": [ + "id", + "owner_id", + "super_network_zone_id", + "description", + "name" + ], + "filter": {}, + "check": {} + } + } + ], + "delete_permissions": [ + { + "role": "fw-admin", + "permission": { + "filter": {} + } + }, + { + "role": "importer", + "permission": { + "filter": {} + } + }, + { + "role": "middleware-server", + "permission": { + "filter": {} + } + } + ] + }, + { + "table": { + "name": "network_zone_communication", + "schema": "compliance" + }, + "object_relationships": [ + { + "name": "from_network_zone", + "using": { + "foreign_key_constraint_on": "from_network_zone_id" + } + }, + { + "name": "to_network_zone", + "using": { + "foreign_key_constraint_on": "to_network_zone_id" + } + } + ], + "insert_permissions": [ + { + "role": "fw-admin", + "permission": { + "check": {}, + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ] + } + }, + { + "role": "importer", + "permission": { + "check": {}, + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ] + } + }, + { + "role": "middleware-server", + "permission": { + "check": {}, + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ] + } + } + ], + "select_permissions": [ + { + "role": "approver", + "permission": { + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "auditor", + "permission": { + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "fw-admin", + "permission": { + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "implementer", + "permission": { + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "importer", + "permission": { + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "middleware-server", + "permission": { + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "planner", + "permission": { + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "recertifier", + "permission": { + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "reporter", + "permission": { + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "reporter-viewall", + "permission": { + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "requester", + "permission": { + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ], + "filter": {}, + "allow_aggregations": true + } + }, + { + "role": "reviewer", + "permission": { + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ], + "filter": {}, + "allow_aggregations": true + } + } + ], + "update_permissions": [ + { + "role": "fw-admin", + "permission": { + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ], + "filter": {}, + "check": {} + } + }, + { + "role": "importer", + "permission": { + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ], + "filter": {}, + "check": {} + } + }, + { + "role": "middleware-server", + "permission": { + "columns": [ + "from_network_zone_id", + "to_network_zone_id" + ], + "filter": {}, + "check": {} + } + } + ], + "delete_permissions": [ + { + "role": "fw-admin", + "permission": { + "filter": {} + } + }, + { + "role": "importer", + "permission": { + "filter": {} + } + }, + { + "role": "middleware-server", + "permission": { + "filter": {} + } + } + ] + }, { "table": { "name": "alert", @@ -19007,4 +19840,4 @@ ] } } -} +} \ No newline at end of file diff --git a/roles/api/tasks/grants-ansible-2.10.yml b/roles/api/tasks/grants-ansible-2.10.yml deleted file mode 100644 index 595b2879d..000000000 --- a/roles/api/tasks/grants-ansible-2.10.yml +++ /dev/null @@ -1,11 +0,0 @@ - -- name: set grants hasura schema from ansible 2.10 - community.postgresql.postgresql_query: - db: "{{ fworch_db_name }}" - query: "GRANT USAGE ON SCHEMA {{ item }} TO dbbackupusers; Grant select on ALL TABLES in SCHEMA {{ item }} to group dbbackupusers; ALTER DEFAULT PRIVILEGES IN SCHEMA {{ item }} GRANT SELECT ON TABLES TO group dbbackupusers;" - as_single_query: "{{ postgresql_query_as_single_query }}" - become: true - become_user: postgres - loop: - - hdb_catalog - \ No newline at end of file diff --git a/roles/api/tasks/grants-ansible-pre2.10.yml b/roles/api/tasks/grants-ansible-pre2.10.yml deleted file mode 100644 index 47d7f129b..000000000 --- a/roles/api/tasks/grants-ansible-pre2.10.yml +++ /dev/null @@ -1,9 +0,0 @@ - -- name: set grants for schemas before ansible 2.10 - postgresql_query: - db: "{{ fworch_db_name }}" - query: "GRANT USAGE ON SCHEMA {{ item }} TO dbbackupusers; Grant select on ALL TABLES in SCHEMA {{ item }} to group dbbackupusers; ALTER DEFAULT PRIVILEGES IN SCHEMA {{ item }} GRANT SELECT ON TABLES TO group dbbackupusers;" - become: true - become_user: postgres - loop: - - hdb_catalog diff --git a/roles/api/tasks/hasura-install.yml b/roles/api/tasks/hasura-install.yml index 4b1d24eff..36f47d053 100644 --- a/roles/api/tasks/hasura-install.yml +++ b/roles/api/tasks/hasura-install.yml @@ -109,6 +109,8 @@ HTTPS_PROXY: "{{ https_proxy }}" http_proxy: "{{ http_proxy }}" https_proxy: "{{ https_proxy }}" + no_proxy: "{{ no_proxy }}" + NO_PROXY: "{{ no_proxy }}" - name: show hasura env for debugging debug: diff --git a/roles/api/tasks/main.yml b/roles/api/tasks/main.yml index 0c885851f..b9b332e69 100644 --- a/roles/api/tasks/main.yml +++ b/roles/api/tasks/main.yml @@ -85,10 +85,11 @@ when: installation_mode == "upgrade" become: true -- name: settings grants for hasura schema old ansible to allow dbbackupusers access to all database shemas - include_tasks: grants-ansible-pre2.10.yml - when: ansible_version.full is version('2.10', '<') - -- name: settings grants for hasura schema new ansible to allow dbbackupusers access to all database shemas - include_tasks: grants-ansible-2.10.yml - when: ansible_version.full is version('2.10', '>=') +- name: set grants for hasura schemas (after hasura install) + postgresql_query: + db: "{{ fworch_db_name }}" + query: "GRANT USAGE ON SCHEMA {{ item }} TO dbbackupusers; Grant select on ALL TABLES in SCHEMA {{ item }} to group dbbackupusers; ALTER DEFAULT PRIVILEGES IN SCHEMA {{ item }} GRANT SELECT ON TABLES TO group dbbackupusers;" + become: true + become_user: postgres + loop: + - hdb_catalog diff --git a/roles/api/templates/fworch-hasura-docker-api.service.j2 b/roles/api/templates/fworch-hasura-docker-api.service.j2 index bef0c2039..6c0bc9c94 100644 --- a/roles/api/templates/fworch-hasura-docker-api.service.j2 +++ b/roles/api/templates/fworch-hasura-docker-api.service.j2 @@ -9,8 +9,8 @@ After=network.target remote-fs.target nss-lookup.target WorkingDirectory={{ fworch_home }} ExecStartPre=/bin/sleep 10 ExecStart=/usr/bin/docker start {{ api_container_name }} -StandardOutput=syslog -StandardError=syslog +StandardOutput=journal +StandardError=journal SyslogIdentifier={{ product_name }}-api User={{ fworch_user }} Environment= diff --git a/roles/common/tasks/main.yml b/roles/common/tasks/main.yml index 3b4729b03..7af3c0996 100644 --- a/roles/common/tasks/main.yml +++ b/roles/common/tasks/main.yml @@ -169,7 +169,7 @@ - set_fact: wsgi_package_name: "{{ wsgi_package_name }}-py3" when: | - (ansible_facts['distribution_release']|lower == 'bookworm') + (ansible_facts['distribution_release']|lower == debian_testing_release_name) or (ansible_facts['distribution']|lower == 'debian' and ansible_facts['distribution_major_version']|int is version('10', '>')) or diff --git a/roles/database/files/sql/creation/fworch-create-constraints.sql b/roles/database/files/sql/creation/fworch-create-constraints.sql index 6c1ce07a5..6e6246b58 100755 --- a/roles/database/files/sql/creation/fworch-create-constraints.sql +++ b/roles/database/files/sql/creation/fworch-create-constraints.sql @@ -20,3 +20,12 @@ Alter Table "zone" add Constraint "Alter_Key10" UNIQUE ("mgm_id","zone_name"); create unique index if not exists only_one_future_recert_per_owner_per_rule on recertification(owner_id,rule_metadata_id,recert_date) where recert_date IS NULL; + +--- compliance +CREATE EXTENSION IF NOT EXISTS btree_gist; +ALTER TABLE compliance.ip_range ADD CONSTRAINT "exclude_overlapping_ip_ranges" +EXCLUDE USING gist ( + network_zone_id WITH =, + numrange(ip_range_start - '0.0.0.0'::inet, ip_range_end - '0.0.0.0'::inet, '[]') WITH && +); + diff --git a/roles/database/files/sql/creation/fworch-create-foreign-keys.sql b/roles/database/files/sql/creation/fworch-create-foreign-keys.sql index b18149cb1..2d2366494 100755 --- a/roles/database/files/sql/creation/fworch-create-foreign-keys.sql +++ b/roles/database/files/sql/creation/fworch-create-foreign-keys.sql @@ -266,7 +266,16 @@ ALTER TABLE request.impltask ADD CONSTRAINT request_impltask_object_foreign_key ALTER TABLE request.impltask ADD CONSTRAINT request_impltask_usergrp_foreign_key FOREIGN KEY (user_grp_id) REFERENCES usr(user_id) ON UPDATE RESTRICT ON DELETE CASCADE; ALTER TABLE request.impltask ADD CONSTRAINT request_impltask_current_handler_foreign_key FOREIGN KEY (current_handler) REFERENCES uiuser(uiuser_id) ON UPDATE RESTRICT ON DELETE CASCADE; ALTER TABLE request.impltask ADD CONSTRAINT request_impltask_recent_handler_foreign_key FOREIGN KEY (recent_handler) REFERENCES uiuser(uiuser_id) ON UPDATE RESTRICT ON DELETE CASCADE; - --- recertification --- ALTER TABLE recertification ADD CONSTRAINT recertification_rule_metadata_foreign_key FOREIGN KEY (rule_metadata_id) REFERENCES rule_metadata(rule_metadata_id) ON UPDATE RESTRICT ON DELETE CASCADE; ALTER TABLE recertification ADD CONSTRAINT recertification_owner_foreign_key FOREIGN KEY (owner_id) REFERENCES owner(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +--- compliance.ip_range --- +ALTER TABLE compliance.ip_range ADD CONSTRAINT compliance_ip_range_network_zone_foreign_key FOREIGN KEY (network_zone_id) REFERENCES compliance.network_zone(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +--- compliance.network_zone --- +ALTER TABLE compliance.network_zone ADD CONSTRAINT compliance_super_zone_foreign_key FOREIGN KEY (super_network_zone_id) REFERENCES compliance.network_zone(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +--- compliance.network_zone_communication --- +ALTER TABLE compliance.network_zone_communication ADD CONSTRAINT compliance_from_network_zone_communication_foreign_key FOREIGN KEY (from_network_zone_id) REFERENCES compliance.network_zone(id) ON UPDATE RESTRICT ON DELETE CASCADE; +ALTER TABLE compliance.network_zone_communication ADD CONSTRAINT compliance_to_network_zone_communication_foreign_key FOREIGN KEY (to_network_zone_id) REFERENCES compliance.network_zone(id) ON UPDATE RESTRICT ON DELETE CASCADE; diff --git a/roles/database/files/sql/creation/fworch-create-indices.sql b/roles/database/files/sql/creation/fworch-create-indices.sql index 8f12dd62b..96917ff51 100755 --- a/roles/database/files/sql/creation/fworch-create-indices.sql +++ b/roles/database/files/sql/creation/fworch-create-indices.sql @@ -116,3 +116,8 @@ Create index "IX_Relationship179" on "zone" ("zone_last_seen"); create unique index if not exists only_one_default_owner on owner(is_default) where is_default = true; + +-- compliance +Create index IF NOT EXISTS idx_fkey_network_zone_id on compliance.ip_range USING HASH (network_zone_id); +Create index IF NOT EXISTS idx_fkey_network_zone_from on compliance.network_zone_communication USING HASH (from_network_zone_id); +Create index IF NOT EXISTS idx_fkey_network_zone_to on compliance.network_zone_communication USING HASH (to_network_zone_id); diff --git a/roles/database/files/sql/creation/fworch-create-tables.sql b/roles/database/files/sql/creation/fworch-create-tables.sql index ad7ba09b8..b0c05ac96 100755 --- a/roles/database/files/sql/creation/fworch-create-tables.sql +++ b/roles/database/files/sql/creation/fworch-create-tables.sql @@ -1,6 +1,6 @@ /* Created 29.04.2005 -Last modified 13.12.2020 +Last modified 14.07.2023 Project Firewall Orchestrator Contact https://cactus.de/fworch Database PostgreSQL 9-13 @@ -1249,3 +1249,31 @@ create table request.impltask target_begin_date Timestamp, target_end_date Timestamp ); + + +--- Compliance --- +create schema compliance; + +create table compliance.network_zone +( + id BIGSERIAL PRIMARY KEY, + name VARCHAR NOT NULL, + description VARCHAR NOT NULL, + super_network_zone_id bigint, + owner_id bigint +); + +create table compliance.network_zone_communication +( + from_network_zone_id bigint NOT NULL, + to_network_zone_id bigint NOT NULL +); + +create table compliance.ip_range +( + network_zone_id bigint NOT NULL, + ip_range_start inet NOT NULL, + ip_range_end inet NOT NULL, + PRIMARY KEY(network_zone_id, ip_range_start, ip_range_end) +); + diff --git a/roles/database/files/sql/creation/fworch-fill-stm.sql b/roles/database/files/sql/creation/fworch-fill-stm.sql index 27560273c..e8447690b 100644 --- a/roles/database/files/sql/creation/fworch-fill-stm.sql +++ b/roles/database/files/sql/creation/fworch-fill-stm.sql @@ -68,6 +68,8 @@ insert into config (config_key, config_value, config_user) VALUES ('reqAutoCreat insert into config (config_key, config_value, config_user) VALUES ('reqAllowObjectSearch', 'False', 0); insert into config (config_key, config_value, config_user) VALUES ('reqAllowManualOwnerAdmin', 'False', 0); insert into config (config_key, config_value, config_user) VALUES ('reqActivatePathAnalysis', 'True', 0); +insert into config (config_key, config_value, config_user) VALUES ('unusedTolerance', '400', 0); +insert into config (config_key, config_value, config_user) VALUES ('creationTolerance', '90', 0); INSERT INTO "report_format" ("report_format_name") VALUES ('json'); INSERT INTO "report_format" ("report_format_name") VALUES ('pdf'); diff --git a/roles/database/files/sql/idempotent/fworch-grants.sql b/roles/database/files/sql/idempotent/fworch-grants.sql index b62db7b3b..bb003757e 100644 --- a/roles/database/files/sql/idempotent/fworch-grants.sql +++ b/roles/database/files/sql/idempotent/fworch-grants.sql @@ -12,6 +12,12 @@ Grant select on ALL TABLES in SCHEMA request to group dbbackupusers; ALTER DEFAULT PRIVILEGES IN SCHEMA request GRANT SELECT ON SEQUENCES TO group "dbbackupusers"; ALTER DEFAULT PRIVILEGES IN SCHEMA request GRANT SELECT ON TABLES TO group dbbackupusers; +GRANT USAGE ON SCHEMA compliance TO dbbackupusers; +GRANT SELECT ON ALL SEQUENCES IN SCHEMA compliance TO group "dbbackupusers"; +Grant select on ALL TABLES in SCHEMA compliance to group dbbackupusers; +ALTER DEFAULT PRIVILEGES IN SCHEMA compliance GRANT SELECT ON SEQUENCES TO group "dbbackupusers"; +ALTER DEFAULT PRIVILEGES IN SCHEMA compliance GRANT SELECT ON TABLES TO group dbbackupusers; + -- grants for all (implicit) sequences GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA public TO group "secuadmins"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT USAGE, SELECT ON SEQUENCES TO group "secuadmins"; diff --git a/roles/database/files/sql/idempotent/fworch-texts.sql b/roles/database/files/sql/idempotent/fworch-texts.sql index 648bf897e..7d1ecdc4c 100644 --- a/roles/database/files/sql/idempotent/fworch-texts.sql +++ b/roles/database/files/sql/idempotent/fworch-texts.sql @@ -29,6 +29,8 @@ INSERT INTO txt VALUES ('set', 'German', 'Setzen'); INSERT INTO txt VALUES ('set', 'English', 'Set'); INSERT INTO txt VALUES ('add', 'German', 'Hinzufügen'); INSERT INTO txt VALUES ('add', 'English', 'Add'); +INSERT INTO txt VALUES ('commit_changes', 'German', 'Änderungen übernehmen'); +INSERT INTO txt VALUES ('commit_changes', 'English', 'Commit changes'); INSERT INTO txt VALUES ('autodiscover', 'German', 'Sync'); INSERT INTO txt VALUES ('autodiscover', 'English', 'Sync'); INSERT INTO txt VALUES ('assign', 'German', 'Zuordnen'); @@ -145,24 +147,26 @@ INSERT INTO txt VALUES ('whats_new_in_version', 'German', 'Was ist neu in Firew INSERT INTO txt VALUES ('whats_new_in_version', 'English', 'Release notes Firewall Orchestrator version'); INSERT INTO txt VALUES ('whats_new_facts', 'German', ' '); INSERT INTO txt VALUES ('whats_new_facts', 'English', ' '); @@ -170,10 +174,12 @@ INSERT INTO txt VALUES ('getting_started', 'German', 'Einstiegshilfe'); INSERT INTO txt VALUES ('getting_started', 'English', 'Quick start'); INSERT INTO txt VALUES ('getting_started_facts', 'German', ' Die folgenden Hauptmenüpunkte stehen (je nach Rollenzugehörigkeit) zur Verfügung: '); @@ -2199,6 +2355,7 @@ INSERT INTO txt VALUES ('H1111', 'German', '
  • gateway (gw, firewall, fw, devi
  • remove: Mögliche Werte: true/false. Wenn "true", werden nur dezertifizierte Regeln gesucht
  • recertdisplay (recertdisp): Definiert den Zeitraum für die Vorausschau (in Tagen) für die nächste Rezertifizierung. Nur Regeln in diesem Zeitfenster werden gesucht.
  • lasthit (last-hit, last-used, last-usage, last-use): Filtern nach Regel-Nutzung - aktuell unterstützt für FortiManager und Check Point >=R80.
  • +
  • not-used-for-days (unused, unused-days, not-used): nicht genutzt seit der vorgegebenen Anzahl von Tagen oder gar nicht
  • fulltext (full, fulltextsearch, fts, text, textsearch)
  • '); INSERT INTO txt VALUES ('H1111', 'English', '
  • gateway (gw, firewall, fw, device, dev): Additionally to the specific device selection in the left sidebar @@ -2214,6 +2371,7 @@ INSERT INTO txt VALUES ('H1111', 'English', '
  • gateway (gw, firewall, fw, devi
  • remove: Possible Values: true/false. If "true", only decertified rules are searched
  • recertdisplay (recertdisp): Defines the lookahead period (in days) for next recertification. Only rules in this time range are searched.
  • lasthit (last-hit, last-used, last-usage, last-use): filter by rule usage - supported for FortiManager and Check Point >=R80 only.
  • +
  • not-used-for-days (unused, unused-days, not-used): not used for the given number of days or never
  • fulltext (full, fulltextsearch, fts, text, textsearch)
  • '); INSERT INTO txt VALUES ('H1131', 'German', '
  • and (&)
  • or (|)
  • not (!)
  • eq (=, :)
  • neq
  • (
  • )
  • '); @@ -2276,18 +2434,18 @@ INSERT INTO txt VALUES ('H1214', 'German', 'Regeländerungen des aktuellen INSERT INTO txt VALUES ('H1214', 'English', 'This year's Rule Changes: All rule change performed in the current year in the selected devices.'); INSERT INTO txt VALUES ('H1215', 'German', 'Aktuelle NAT Regeln: Aktuell aktive NAT-Regeln aller ausgewählten Devices.'); INSERT INTO txt VALUES ('H1215', 'English', 'Current NAT Rules: Currently active NAT rules of all selected devices.'); -INSERT INTO txt VALUES ('H1301', 'German', 'Direkt nach der Erzeugung oder vom Archiv aus können Reports in verschiedenen Ausgabeformaten exportiert werden:'); -INSERT INTO txt VALUES ('H1301', 'English', 'Directly after creation or from the archive reports can be exported to different output formats:'); +INSERT INTO txt VALUES ('H1301', 'German', 'Direkt nach der Erzeugung oder vom Archiv aus können Reports in verschiedenen Ausgabeformaten exportiert werden:'); +INSERT INTO txt VALUES ('H1301', 'English', 'Directly after creation or from the archive reports can be exported to different output formats:'); INSERT INTO txt VALUES ('H1302', 'German', '
  • pdf
  • html
  • csv (aktuell nur für die aufgelösten und technischen Report-Typen unterstützt)
  • json
  • '); INSERT INTO txt VALUES ('H1302', 'English', '
  • pdf
  • html
  • csv (currently only supported for resolved and technical report types)
  • json
  • '); INSERT INTO txt VALUES ('H1303', 'German', 'Nach betätigen des "Report exportieren"-Auswahlfeldes kann eines oder mehrere dieser Formate ausgewählt werden. Bei Aktivierung der pdf-Ausgabe wird desweiteren das Seitenformat zur Auswahl angeboten.
    - Auch kann der Report mit einem Namen versehen und archiviert werden. + Auch kann der Report mit einem Namen versehen und archiviert werden. Ein weiteres Ausgabefenster erlaubt dann das separate Abholen der ausgewählten Ausgabedateien. '); INSERT INTO txt VALUES ('H1303', 'English', 'After clicking the "Export Report" button one or more of them can be selected. When selecting the pdf export, the page format is offered for selection.
    - Also the possibility to name and save the report in the archive is given. + Also the possibility to name and save the report in the archive is given. Another Popup allows then to download the selected output files separately. '); INSERT INTO txt VALUES ('H1401', 'German', 'Im unteren Teil der Hauptseite werden die Ausgabedaten des generierten Reports dargestellt. @@ -2306,6 +2464,23 @@ INSERT INTO txt VALUES ('H1402', 'German', '
  • Nummer
  • Name
  • Que INSERT INTO txt VALUES ('H1402', 'English', '
  • Number
  • Name
  • Source Zone
  • Source
  • Destination Zone
  • Destination
  • Services
  • Action
  • Logging
  • Enabled
  • UID
  • Comment
  • '); +INSERT INTO txt VALUES ('H1403', 'German', 'Zusätzlich werden in einzelnen Reporttypen weitere Spalten dargestellt: + +'); +INSERT INTO txt VALUES ('H1403', 'English', 'Additionally in the different Report Types further columns are displayed: + +'); + INSERT INTO txt VALUES ('H1501', 'German', 'Hier werden die fixen Kriterien für die Auswahl zur Reporterstellung dargestellt. Weiteren Kriterien können über die Filterleiste hinzugefügt werden. '); @@ -2318,11 +2493,13 @@ INSERT INTO txt VALUES ('H1503', 'German', 'Auflistung aller verfügbaren D Die Ansicht kann für unterschiedliche Nutzer entsprechend der Mandantenzuordnung variieren. Für eine Reporterstellung muss hier eine Auswahl getroffen werden. Die dargestellten Devices können ein- oder ausgeklappt werden. Ab welcher Mindestanzahl die Darstellung zu Beginn eingeklappt ist, kann individuell in den Reporting-Einstellungen definiert werden. + Im Unbenutzte-Regel-Report werden Devices, die keine Nutztungsinformationen liefern, bei Reporterstellung automatisch deselektiert. '); INSERT INTO txt VALUES ('H1503', 'English', 'Display of all available devices. This view may differ for the different users according to the tenant assignments. For the creation of a report a selection out of them has to be done. The displayed devices can be collapsed or expanded. In the Report Settings it is possible to define the minimum number, where the display starts collapsed. + In the Unused Rules Report devices not delivering usage information are deselected automatically during report creation. '); INSERT INTO txt VALUES ('H1504', 'German', 'Anzeige der gewählten Reportzeit bzw. des gewählten Reportzeitraums in Abhängigkeit vom gewählten Report-Typ. Vorgabewerte sind "jetzt" bzw. "dieses Jahr". Über die "Ändern"-Schaltfläche kann dies in einem entsprechenden Popup-Fenster angepasst werden: @@ -2336,14 +2513,38 @@ INSERT INTO txt VALUES ('H1505', 'German', 'Für Report-Typen, welche die A INSERT INTO txt VALUES ('H1505', 'English', 'For report types requiring a report time there are two options: Selecting a particular time with the date/time picker or using the default value "now". '); -INSERT INTO txt VALUES ('H1506', 'German', 'Für Report-Typen, die Zeitintervalle benötigen, kann gewählt werden zwischen:'); -INSERT INTO txt VALUES ('H1506', 'English', 'For report types requiring a time range a selection can be done between:'); +INSERT INTO txt VALUES ('H1506', 'German', 'Für Report-Typen, die Zeitintervalle benötigen (nicht Unbenutzte-Regel- und Rezertifizierungs-Report), kann gewählt werden zwischen:'); +INSERT INTO txt VALUES ('H1506', 'English', 'For report types requiring a time range (not Unused Rule or Recertification Report) a selection can be done between:'); INSERT INTO txt VALUES ('H1507', 'German', 'Vordefinierte Abkürzungen "dieses Jahr", "letztes Jahr", "dieser Monat", "letzter Monat", "diese Woche", "letzte Woche", "heute" oder "gestern"'); INSERT INTO txt VALUES ('H1507', 'English', 'Predefined shortcuts "this year", "last year", "this month", "last month", "this week", "last week", "today" or "yesterday"'); INSERT INTO txt VALUES ('H1508', 'German', 'Zeitintervalle in Tagen, Wochen, Monaten oder Jahren relativ zum aktuellen Zeitpunkt'); INSERT INTO txt VALUES ('H1508', 'English', 'Time intervals in days, weeks, months or years in relation to the actual time'); INSERT INTO txt VALUES ('H1509', 'German', 'Absolute Start- und Endezeiten. Beide Grenzen können durch setzen der "offen"-Markierung ausser Kraft gesetzt werden.'); INSERT INTO txt VALUES ('H1509', 'English', 'Absolute start and end times. Both limits can be separately omitted by setting the "open" checkbox.'); +INSERT INTO txt VALUES ('H1510', 'German', 'Nur beim Unbenutzte-Regel-Report: Unbenutzt seit: Hier wird die Anzahl von Tagen eingegeben, seitdem die anzuzeigenden Regeln nicht mehr benutzt wurden. + Regeln, die noch keine letzte Nutzung protokolliert haben, werden ebenfalls dargestellt, falls sie älter als eine in den Reporting-Einstellungen definierte Toleranzzeit sind. + Dort kann auch der Default-Wert für den Zeitraum der Nichtbenutzung gesetzt werden. +'); +INSERT INTO txt VALUES ('H1510', 'English', 'Only for Unused Rules Report: Unused since: Here the number of days is given, where the rules to be displayed have not be used. + Rules never used are also displayed if their creation date is older than a tolerance interval defined in the Report Settings. + There also the default value for the unused interval can be defined. +'); +INSERT INTO txt VALUES ('H1511', 'German', 'Nur beim Rezertifizierungs-Report: Rezertifizierungsparameter +
  • Fällig in: Hier wird festgelegt, wie weit die Suche nach zu rezertifizierenden Regeln gehen soll (in Tagen). + Der Default-Wert kann sowohl vom Administrator in den Allgemeinen + als auch vom jeweiligen Nutzer in den Persönlichen Rezertifizierungseinstellungen festgelegt werden.
  • +
  • Eigentümer: Hier kann aus den dem Nutzer zugeordneten Eigentümerschaften ausgewählt werden.
  • +
  • Any-Regeln anzeigen: Wenn das Häkchen gesetzt ist, werden auch Regeln mit Ip 0.0.0.0 in Quelle oder Ziel dargestellt. + Beim deselektieren wird ein exkludierender Ausdruck zur Filterzeile hinzugefügt.
  • +'); +INSERT INTO txt VALUES ('H1511', 'English', 'Only for Recertification Report: Recertification Parameters +
  • Due within: Select how far ahead should be searched for rules to be recertified (in days). + The default value can be set by the administrator in the General + as well as in the Personal Recertification Settings by the user
  • . +
  • Owner: Select the certifying owner out of the ownerships related to the user.
  • +
  • Show any rules: If flag is set, rules with Ip 0.0.0.0 in source or destination are shown. + When deselecting an excluding statement is added to the filter line.
  • +'); INSERT INTO txt VALUES ('H1601', 'German', 'Die rechte Randleiste hat zwei Reiter: Unter "Alle" werden alle aktuell abgeholten Objekte dargestellt, während unter "Regel" nur die in der Reportausgabe ausgewählten Regeln gezeigt werden.
    Folgende Daten werden dargestellt, gruppiert nach den ausgewählten Devices: @@ -2361,8 +2562,8 @@ INSERT INTO txt VALUES ('H2001', 'German', 'Es können Reports für ein INSERT INTO txt VALUES ('H2001', 'English', 'Reports can be scheduled for a given time or as recurring tasks. Every user can administrate his own report schedules. '); -INSERT INTO txt VALUES ('H2011', 'German', 'Name: Der Reportname, der im Archiv wiederzufinden ist.'); -INSERT INTO txt VALUES ('H2011', 'English', 'Name: The report name to be found in the Archive.'); +INSERT INTO txt VALUES ('H2011', 'German', 'Name: Der Reportname, der im Archiv wiederzufinden ist.'); +INSERT INTO txt VALUES ('H2011', 'English', 'Name: The report name to be found in the Archive.'); INSERT INTO txt VALUES ('H2012', 'German', 'Startdatum und -zeit: Erste Ausführung des Terminauftrags. Bitte einige Minuten im voraus wählen, wenn die Ausführung noch heute erfolgen soll, da es einen Zeitverzug von einigen Minuten durch den Timer geben kann. '); @@ -2397,13 +2598,13 @@ INSERT INTO txt VALUES ('H2018', 'English', 'Count: Counts how many reports have INSERT INTO txt VALUES ('H3001', 'German', 'Hier sind die archivierten Reports mit Name sowie Informationen zu Erzeugungsdatum, Typ, Vorlage (nur bei termingesteuerten Reports), Eigentümer sowie eine kurze Beschreibung des Inhalts zu finden. Sie können zum einen durch Export manuell erzeugter Reports durch Setzen des "Archiv"-Kennzeichens in Export Report erzeugt werden. - Zum anderen finden sich hier auch die durch das Scheduling erzeugten Reports. + Zum anderen finden sich hier auch die durch das Scheduling erzeugten Reports. Die archivierten Reports können von hier heruntergeladen oder gelöscht werden. '); INSERT INTO txt VALUES ('H3001', 'English', 'Here the archived reports can be found with name and information about creation date, type, template (only at scheduled reports), owner and a short description about the content. They may be created on the one hand by exporting manually created reports with setting the flag "Archive" in Export Report. - On the other hand here also the reports created by the Scheduling can be found. + On the other hand here also the reports created by the Scheduling can be found. It is possible to download or delete these archived reports. '); @@ -2846,10 +3047,11 @@ INSERT INTO txt VALUES ('H5012', 'English', 'The chapter "Authorization" offers and Roles, additionally there is an overview of the owners. '); INSERT INTO txt VALUES ('H5013', 'German', 'Im Kapitel "Voreinstellungen" kann der Administrator Standardeinstellungen vornehmen, - die für alle Nutzer gelten, sowie die Passworteinstellungen definieren, welche für alle Passwortänderungen gültig sind. + die für alle Nutzer gelten, sowie die Email-, Importer- und + Passworteinstellungen definieren. '); INSERT INTO txt VALUES ('H5013', 'English', 'In the "Defaults" chapter the administrator can define Default Values applicable to all users - and set a Password Policy valid for all password changes. + and define email-, importer- and Password Policy settings. '); INSERT INTO txt VALUES ('H5014', 'German', 'Das Kapitel "Persönlich" ist für alle Nutzer zugänglich. Hier können das individuelle Password, die bevorzugte Sprache und Reporting-Einstellungen gesetzt werden. @@ -3331,10 +3533,10 @@ INSERT INTO txt VALUES ('H5411', 'German', 'Standardsprache: Die Sprache, die n INSERT INTO txt VALUES ('H5411', 'English', 'Default Language: The language which every user gets at first login. After login each user can define its own preferred language. '); -INSERT INTO txt VALUES ('H5412', 'German', 'Pro Abruf geholte Elemente: Definiert die (maximale) Anzahl der Objekte, die bei der Reporterzeugung und beim Aufbau der rechten Randleiste in einem Schritt geholt werden. +INSERT INTO txt VALUES ('H5412', 'German', 'UI - Pro Abruf geholte Elemente: Definiert die (maximale) Anzahl der Objekte, die bei der Reporterzeugung und beim Aufbau der rechten Randleiste in einem Schritt geholt werden. Dies kann genutzt werden, um die Performanz zu optimieren, wenn nötig. '); -INSERT INTO txt VALUES ('H5412', 'English', 'Elements per fetch: Defines the (maximum) number of objects which are fetched in one step for the report creation and the build up of the right sidebar. +INSERT INTO txt VALUES ('H5412', 'English', 'UI - Elements per fetch: Defines the (maximum) number of objects which are fetched in one step for the report creation and the build up of the right sidebar. This can be used to optimize performance if necessary. '); INSERT INTO txt VALUES ('H5413', 'German', 'Max initiale Abrufe rechte Randleiste: Definiert die (maximale) Anzahl an Abrufen während der Initialisierung der rechten Randleiste. @@ -3351,8 +3553,49 @@ INSERT INTO txt VALUES ('H5414', 'English', 'Completely auto-fill right sidebar: '); INSERT INTO txt VALUES ('H5415', 'German', 'Datenaufbewahrungszeit (in Tagen): Legt fest, wie lange die Daten in der Datenbank gehalten werden (wird noch nicht unterstützt).'); INSERT INTO txt VALUES ('H5415', 'English', 'Data retention time (in days): Defines how long the data is kept in the database (currently not supported).'); -INSERT INTO txt VALUES ('H5416', 'German', 'Importintervall (in Sekunden): Zeitintervall zwischen zwei Imports (wird noch nicht unterstützt)'); -INSERT INTO txt VALUES ('H5416', 'English', 'Import sleep time (in seconds): Time between import loops (currently not supported).'); +INSERT INTO txt VALUES ('H5416', 'German', ' + +'); +INSERT INTO txt VALUES ('H5416', 'English', ' + +'); INSERT INTO txt VALUES ('H5417', 'German', 'Rezertifizierungsintervall (in Tagen): Maximale Zeit, nach der eine Regel rezertifiziert werden soll.'); INSERT INTO txt VALUES ('H5417', 'English', 'Recertification Period (in days): Maximum time, after when a rule should be recertified.'); INSERT INTO txt VALUES ('H5418', 'German', 'Rezertifizierungserinnerungsintervall (in Tagen): Zeit vor dem Fälligkeitsdatum, ab der eine Regel als fällig hervorgehoben werden soll.'); @@ -3363,15 +3606,30 @@ INSERT INTO txt VALUES ('H5420', 'German', 'Frist zum Löschen der Regeln ( INSERT INTO txt VALUES ('H5420', 'English', 'Rule Removal Grace Period (in days): Maximum time the fwadmin has to remove the decertified rules.'); INSERT INTO txt VALUES ('H5421', 'German', 'Kommentar Pflichtfeld: Legt fest, dass das Kommentarfeld für Re- und Dezertifizierungen gefüllt sein muss.'); INSERT INTO txt VALUES ('H5421', 'English', 'Comment Required: A non-empty comment for the re- or decertification is required.'); - +INSERT INTO txt VALUES ('H5422', 'German', 'Devices zu Beginn eingeklappt ab: Legt fest, ab wievielen Devices (Managements + Gateways) diese in der linken Randleiste zunächst eingeklappt dargestellt werden.'); +INSERT INTO txt VALUES ('H5422', 'English', 'Devices collapsed at beginning from: defines from which number of devices (managements + gateways) they are displayed collapsed in the left sidebar at beginning.'); +INSERT INTO txt VALUES ('H5423', 'German', 'Nachrichten-Anzeigedauer (in Sekunden): legt fest, wie lange Erfolgs-Nachrichten dargestellt werden, bis sie automatisch ausgeblendet werden. + Fehler-Nachrichten erscheinen dreimal so lange. Beim Wert 0 werden die Nachrichten nicht automatisch ausgeblendet. + Die Nutzer-Meldungen können auch danach noch unter UI-Nachrichten eingesehen werden. +'); +INSERT INTO txt VALUES ('H5423', 'English', 'Message view time (in seconds): defines how long success messages are displayed, until they fade out automatically. + Error messages are displayed 3 times as long. Value 0 means that the messages do not fade out. + All user messages can still be reviewed at UI Messages. +'); +INSERT INTO txt VALUES ('H5424', 'German', 'Startzeit täglicher Check: legt die Zeit fest, wann der tägliche Check durchgeführt werden soll.'); +INSERT INTO txt VALUES ('H5424', 'English', 'Daily check start at: defines the time when the daily check should happen.'); +INSERT INTO txt VALUES ('H5426', 'German', 'Autodiscover-Intervall (in Stunden): legt das Intervall fest, in dem die Autodiscovery durchgeführt werden soll.'); +INSERT INTO txt VALUES ('H5426', 'English', 'Auto-discovery sleep time (in hours): defines the interval in which the autodiscovery should be performed.'); +INSERT INTO txt VALUES ('H5427', 'German', 'Autodiscover-Start: legt eine Bezugszeit fest, ab dem die Intervalle für die Autodiscovery gerechnet werden.'); +INSERT INTO txt VALUES ('H5427', 'English', 'Auto-discovery start at: defines a referential time from which the autodiscovery intervals are calculated.'); INSERT INTO txt VALUES ('H5428', 'German', 'Rezert Check - aktiv: aktviere bzw. deaktiviere regelmäßige Prüfungen zur Versendung von Benachrichtigungs- oder Eskalations-Emails an die Eigentümer.'); INSERT INTO txt VALUES ('H5428', 'English', 'Recert Check - active: enable or disable recurring recertification checks to send out notification or escalation emails to owners.'); INSERT INTO txt VALUES ('H5429', 'German', 'Rezert Check alle: Abstand der Prüfungen für den Versand von Benachrichtigungs- oder Eskalations-Emails an die Eigentümer.'); INSERT INTO txt VALUES ('H5429', 'English', 'Recert Check every: Interval between checks for recertification notifications.'); INSERT INTO txt VALUES ('H5430', 'German', 'Rezert Check - Email Titel: Titel der Benachrichtigungs-Email.'); INSERT INTO txt VALUES ('H5430', 'English', 'Recert Check - Email subject: Subject line of the notification email.'); -INSERT INTO txt VALUES ('H5446', 'German', 'Rezert Check - Text anstehend: Textinhalt der Benachrichtigungsmail bei demnächst anstehenden Rezertifizierungen.'); -INSERT INTO txt VALUES ('H5446', 'English', 'Recert Check - text upcoming: Email body of the notification email for upcoming recertifications.'); +INSERT INTO txt VALUES ('H5431', 'German', 'Der Administrator kann Vorgaben für Passwörter definieren, gegen die alle neuen Passwörter aller (internen) Nutzer geprüft werden.'); +INSERT INTO txt VALUES ('H5431', 'English', 'The admin user can define a password policy, against which all new passwords of all (internal) users are checked.'); INSERT INTO txt VALUES ('H5432', 'German', 'Rezert Check - Text überfällig: Textinhalt der Benachrichtigungsmail bei überfälligen Rezertifizierungen (Eskalation).'); INSERT INTO txt VALUES ('H5432', 'English', 'Recert Check - text overdue: Email body of the notification email for overdue recertifications (escalation).'); @@ -3392,30 +3650,6 @@ INSERT INTO txt VALUES ('H5439', 'English', 'Initial state for delete rule ticke INSERT INTO txt VALUES ('H5440', 'German', 'Neuberechnen offene Rezertifizierungen: Auswahl, wann die Neuberechnung durchgeführt werden soll - beim Hochfahren, täglich via Scheduler oder jetzt (kann mehrere Minuten dauern).'); INSERT INTO txt VALUES ('H5440', 'English', 'Recalculate open recertifications: Choose, when to do this: at startup, daily via scheduler or now (this may take several minutes).'); -INSERT INTO txt VALUES ('H5422', 'German', 'Devices zu Beginn eingeklappt ab: Legt fest, ab wievielen Devices (Managements + Gateways) diese in der linken Randleiste zunächst eingeklappt dargestellt werden.'); -INSERT INTO txt VALUES ('H5422', 'English', 'Devices collapsed at beginning from: defines from which number of devices (managements + gateways) they are displayed collapsed in the left sidebar at beginning.'); -INSERT INTO txt VALUES ('H5423', 'German', 'Nachrichten-Anzeigedauer (in Sekunden): legt fest, wie lange Erfolgs-Nachrichten dargestellt werden, bis sie automatisch ausgeblendet werden. - Fehler-Nachrichten erscheinen dreimal so lange. Beim Wert 0 werden die Nachrichten nicht automatisch ausgeblendet. - Die Nutzer-Meldungen können auch danach noch unter UI-Nachrichten eingesehen werden. -'); -INSERT INTO txt VALUES ('H5423', 'English', 'Message view time (in seconds): defines how long success messages are displayed, until they fade out automatically. - Error messages are displayed 3 times as long. Value 0 means that the messages do not fade out. - All user messages can still be reviewed at UI Messages. -'); -INSERT INTO txt VALUES ('H5424', 'German', 'Startzeit täglicher Check: legt die Zeit fest, wann der tägliche Check durchgeführt werden soll.'); -INSERT INTO txt VALUES ('H5424', 'English', 'Daily check start at: defines the time when the daily check should happen.'); -INSERT INTO txt VALUES ('H5425', 'German', 'FW API - Pro Abruf geholte Elemente: Definiert die (maximale) Anzahl der Objekte, die beim Import über die FWO-API in einem Schritt geholt werden. - Dies kann genutzt werden, um die Performanz zu optimieren, wenn nötig. -'); -INSERT INTO txt VALUES ('H5425', 'English', 'FW API - Elements per fetch: Defines the (maximum) number of objects which are fetched in one step during import via the FWO-API. - This can be used to optimize performance if necessary. -'); -INSERT INTO txt VALUES ('H5426', 'German', 'Autodiscover-Intervall (in Stunden): legt das Intervall fest, in dem die Autodiscovery durchgeführt werden soll.'); -INSERT INTO txt VALUES ('H5426', 'English', 'Auto-discovery sleep time (in hours): defines the interval in which the autodiscovery should be performed.'); -INSERT INTO txt VALUES ('H5427', 'German', 'Autodiscover-Start: legt eine Bezugszeit fest, ab dem die Intervalle für die Autodiscovery gerechnet werden.'); -INSERT INTO txt VALUES ('H5427', 'English', 'Auto-discovery start at: defines a referential time from which the autodiscovery intervals are calculated.'); -INSERT INTO txt VALUES ('H5431', 'German', 'Der Administrator kann Vorgaben für Passwörter definieren, gegen die alle neuen Passwörter aller (internen) Nutzer geprüft werden.'); -INSERT INTO txt VALUES ('H5431', 'English', 'The admin user can define a password policy, against which all new passwords of all (internal) users are checked.'); INSERT INTO txt VALUES ('H5441', 'German', 'Mindestlänge: Minimale Länge des Passworts'); INSERT INTO txt VALUES ('H5441', 'English', 'Min Length: Minimal length of the password.'); INSERT INTO txt VALUES ('H5442', 'German', 'Grossbuchstaben enthalten: Das Passwort muss mindestens einen Grossbuchstaben enthalten.'); @@ -3426,6 +3660,16 @@ INSERT INTO txt VALUES ('H5444', 'German', 'Ziffern enthalten: Das Passwort mus INSERT INTO txt VALUES ('H5444', 'English', 'Number Required: There has to be at least one number in the password.'); INSERT INTO txt VALUES ('H5445', 'German', 'Sonderzeichen enthalten: Das Passwort muss mindestens ein Sonderzeichen enthalten. Mögliche Werte: !?(){}=~$%&#*-+.,_'); INSERT INTO txt VALUES ('H5445', 'English', 'Special Characters Required: There has to be at least one special character in the password. Possible values are: !?(){}=~$%&#*-+.,_'); +INSERT INTO txt VALUES ('H5446', 'German', 'Rezert Check - Text anstehend: Textinhalt der Benachrichtigungsmail bei demnächst anstehenden Rezertifizierungen.'); +INSERT INTO txt VALUES ('H5446', 'English', 'Recert Check - text upcoming: Email body of the notification email for upcoming recertifications.'); +INSERT INTO txt VALUES ('H5447', 'German', 'Als unbenutzt gewertet nach (in Tagen): Gibt den Zeitpunkt an, vor dem die letzte Nutzung der Regel für den Unbenutzte-Regel-Report in der Vergangenheit liegen muss.'); +INSERT INTO txt VALUES ('H5447', 'English', 'Regarded as unused from (in days): Defines the point in time, before which the last usage has to be in the past for the Unused Rules Report.'); +INSERT INTO txt VALUES ('H5448', 'German', 'Toleranz ab Erzeugungsdatum (in Tagen): Noch niemals benutzte Regeln werden im Unbenutzte-Regel-Report nur berücksichtigt, wenn sie vor dem durch den hier definierten Toleranzwert festgelegten Zeitpunkt erzeugt wurden.'); +INSERT INTO txt VALUES ('H5448', 'English', 'Tolerance from creation date (in days): Never used rules are only regarded in the Unused Rules Report, if they have been created before the point in time defined by this tolerance value.'); +INSERT INTO txt VALUES ('H5449', 'German', 'Sitzungs-Timeout (in Minuten): Zeit, nach der ein Nutzer automatisch aus der Sitzung ausgeloggt wird.'); +INSERT INTO txt VALUES ('H5449', 'English', 'Session timeout (in minutes): Time after which a user is logged out automatically.'); +INSERT INTO txt VALUES ('H5450', 'German', 'Benachrichtigung vor Sitzungs-Timeout (in Minuten): Intervall vor dem automatischen Logout, in dem eine Warnung ausgegeben wird.'); +INSERT INTO txt VALUES ('H5450', 'English', 'Warning before session timeout (in minutes): Interval before automatic logout when a warning message is displayed.'); INSERT INTO txt VALUES ('H5451', 'German', 'Jeder Nutzer (ausser Demo-Nutzer) kann sein eigenes Passwort ändern.
    Bitte das alte Passwort einmal und das neue Passwort zweimal eingeben, um Eingabefehler zu vermeiden. Das neue Passwort muss sich vom alten unterscheiden und wird gegen die Passworteinstellungen geprüft. @@ -3434,6 +3678,11 @@ INSERT INTO txt VALUES ('H5451', 'English', 'Every user (except demo user) can c Please insert the old password once and the new password twice to avoid input mistakes. The new password has to be different from the old one and is checked against the Password Policy. '); +INSERT INTO txt VALUES ('H5452', 'German', 'Max erlaubte Importdauer (in Stunden): Obergrenze, welche Importdauer im täglichen Check noch als akzeptabel gewertet wird.'); +INSERT INTO txt VALUES ('H5452', 'English', 'Max allowed import duration (in hours): Upper limit for the accepted import duration in the daily check.'); +INSERT INTO txt VALUES ('H5453', 'German', 'Max erlaubtes Importintervall (in Stunden): Obergrenze, welcher Abstand zwischen zwei Imports im täglichen Check noch akzeptiert wird.'); +INSERT INTO txt VALUES ('H5453', 'English', 'Max import interval (in hours): Upper limit for the accepted interval between two imports in the daily check.'); + INSERT INTO txt VALUES ('H5461', 'German', 'Jeder Nutzer kann seine eigene bevorzugte Sprache für die Anwendung einstellen.
    Alle Texte werden in dieser Sprache dargestellt, soweit verfügbar. Wenn nicht, wird die Standardsprache verwendet. Wenn der Text auch dort nicht verfügbar ist, wird Englisch genutzt. Die Standardsprache beim ersten Anmelden kann vom Admin für alle Nutzer in den Standardeinstellungen definiert werden.

    @@ -3456,7 +3705,27 @@ INSERT INTO txt VALUES ('H5481', 'German', 'Ein Rezertifizierer kann einige per INSERT INTO txt VALUES ('H5481', 'English', 'A recertifier can overwrite some personal settings for the recertification report. The default value is set by the admin in the Default Settings. '); - +INSERT INTO txt VALUES ('H5491', 'German', 'Firewall Orchestrator kann Benachrichtigungen versenden, z.B. für anstehende Rezertifizierungen oder wenn beim Import + Änderungen festgestellt wurden. + +'); +INSERT INTO txt VALUES ('H5491', 'English', 'Firewall Orchestrator is able to send out notifications, e.g. for upcoming recertifications or when an import found changes in the firewall configuration.
    + +'); +INSERT INTO txt VALUES ('H5495', 'German', 'Die folgenden Einstellungen wirken sich auf das Import-Modul (python) aus.'); +INSERT INTO txt VALUES ('H5495', 'English', 'The following settings apply to the Import Module (python)'); INSERT INTO txt VALUES ('H5501', 'German', 'Aktionen müssen zuerst in den Einstellungen definiert werden und können dann den jeweiligen Stati zugeordnet werden. Die Aktion wird dann bei Eintreffen der hier definierten Bedingungen angeboten bzw. ausgeführt. '); diff --git a/roles/database/files/upgrade/6.5.0.sql b/roles/database/files/upgrade/6.5.0.sql new file mode 100644 index 000000000..156e57279 --- /dev/null +++ b/roles/database/files/upgrade/6.5.0.sql @@ -0,0 +1,53 @@ +--- Compliance Tables --- +create schema if not exists compliance; + +create table if not exists compliance.network_zone +( + id BIGSERIAL PRIMARY KEY, + name VARCHAR NOT NULL, + description VARCHAR NOT NULL, + super_network_zone_id bigint, + owner_id bigint +); + +create table if not exists compliance.network_zone_communication +( + from_network_zone_id bigint NOT NULL, + to_network_zone_id bigint NOT NULL +); + +create table if not exists compliance.ip_range +( + network_zone_id bigint NOT NULL, + ip_range_start inet NOT NULL, + ip_range_end inet NOT NULL, + PRIMARY KEY(network_zone_id, ip_range_start, ip_range_end) +); + + +--- Compliance Foreign Keys --- + +--- compliance.ip_range --- +ALTER TABLE compliance.ip_range DROP CONSTRAINT IF EXISTS compliance_ip_range_network_zone_foreign_key; +ALTER TABLE compliance.ip_range ADD CONSTRAINT compliance_ip_range_network_zone_foreign_key FOREIGN KEY (network_zone_id) REFERENCES compliance.network_zone(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +--- compliance.network_zone --- +ALTER TABLE compliance.network_zone DROP CONSTRAINT IF EXISTS compliance_super_zone_foreign_key; +ALTER TABLE compliance.network_zone ADD CONSTRAINT compliance_super_zone_foreign_key FOREIGN KEY (super_network_zone_id) REFERENCES compliance.network_zone(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +--- compliance.network_zone_communication --- +ALTER TABLE compliance.network_zone_communication DROP CONSTRAINT IF EXISTS compliance_from_network_zone_communication_foreign_key; +ALTER TABLE compliance.network_zone_communication DROP CONSTRAINT IF EXISTS compliance_to_network_zone_communication_foreign_key; +ALTER TABLE compliance.network_zone_communication ADD CONSTRAINT compliance_from_network_zone_communication_foreign_key FOREIGN KEY (from_network_zone_id) REFERENCES compliance.network_zone(id) ON UPDATE RESTRICT ON DELETE CASCADE; +ALTER TABLE compliance.network_zone_communication ADD CONSTRAINT compliance_to_network_zone_communication_foreign_key FOREIGN KEY (to_network_zone_id) REFERENCES compliance.network_zone(id) ON UPDATE RESTRICT ON DELETE CASCADE; + + +--- Compliance Constraints --- +CREATE EXTENSION IF NOT EXISTS btree_gist; +--- prevent overlapping ip address ranges in the same zone +ALTER TABLE compliance.ip_range DROP CONSTRAINT IF EXISTS exclude_overlapping_ip_ranges; +ALTER TABLE compliance.ip_range ADD CONSTRAINT exclude_overlapping_ip_ranges +EXCLUDE USING gist ( + network_zone_id WITH =, + numrange(ip_range_start - '0.0.0.0'::inet, ip_range_end - '0.0.0.0'::inet, '[]') WITH && +); diff --git a/roles/database/files/upgrade/6.5.1.sql b/roles/database/files/upgrade/6.5.1.sql new file mode 100644 index 000000000..a36e914c7 --- /dev/null +++ b/roles/database/files/upgrade/6.5.1.sql @@ -0,0 +1,2 @@ +insert into config (config_key, config_value, config_user) VALUES ('unusedTolerance', '400', 0) ON CONFLICT DO NOTHING; +insert into config (config_key, config_value, config_user) VALUES ('creationTolerance', '90', 0) ON CONFLICT DO NOTHING; diff --git a/roles/database/tasks/install-database.yml b/roles/database/tasks/install-database.yml index 261957446..54feb32d4 100644 --- a/roles/database/tasks/install-database.yml +++ b/roles/database/tasks/install-database.yml @@ -85,7 +85,7 @@ - name: include table creation pre ansible 2.10 include_tasks: install-db-base-ansible-pre2.10.yml when: ansible_version.full is version('2.10', '<') - + - name: create db users with group memberships import_tasks: create-users.yml when: installation_mode == "new" diff --git a/roles/database/tasks/main.yml b/roles/database/tasks/main.yml index dbed28527..34af40108 100644 --- a/roles/database/tasks/main.yml +++ b/roles/database/tasks/main.yml @@ -147,6 +147,7 @@ loop: - csv - sql + tags: [ 'test' ] - name: create tablespace directory file: diff --git a/roles/database/tasks/upgrade_database_new.yml b/roles/database/tasks/upgrade_database_new.yml index 605ba5689..a34bd5708 100644 --- a/roles/database/tasks/upgrade_database_new.yml +++ b/roles/database/tasks/upgrade_database_new.yml @@ -4,8 +4,6 @@ db: "{{ fworch_db_name }}" path_to_script: "{{ database_install_dir }}/upgrade/{{ item }}.sql" as_single_query: "{{ postgresql_query_as_single_query }}" - # register: res - # when: not (res.changed|d(false)) loop: "{{ upgrade_files | sort }}" become: true ignore_errors: false diff --git a/roles/database/tasks/upgrade_database_old.yml b/roles/database/tasks/upgrade_database_old.yml index d6a46e5d4..d7586b00a 100644 --- a/roles/database/tasks/upgrade_database_old.yml +++ b/roles/database/tasks/upgrade_database_old.yml @@ -3,8 +3,6 @@ postgresql_query: db: "{{ fworch_db_name }}" path_to_script: "{{ database_install_dir }}/upgrade/{{ item }}.sql" - # register: res - # when: not (res.changed|d(false)) loop: "{{ upgrade_files | sort }}" ignore_errors: false become: true diff --git a/roles/importer/files/importer/checkpointR8x/cp_const.py b/roles/importer/files/importer/checkpointR8x/cp_const.py index e44efd6e3..7c38cccc6 100644 --- a/roles/importer/files/importer/checkpointR8x/cp_const.py +++ b/roles/importer/files/importer/checkpointR8x/cp_const.py @@ -28,7 +28,7 @@ api_obj_types = nw_obj_table_names + svc_obj_table_names # all obj table names to look at during import cp_specific_object_types = [ # used for fetching enrichment data via "get object" separately (no specific API call) - 'simple-gateway', 'simple-cluster', 'CpmiVsClusterNetobj', 'CpmiVsxClusterNetobj', 'CpmiVsxClusterMember', + 'simple-gateway', 'simple-cluster', 'CpmiVsClusterNetobj', 'CpmiVsxClusterNetobj', 'CpmiVsxClusterMember', 'CpmiVsNetobj', 'CpmiAnyObject', 'CpmiClusterMember', 'CpmiGatewayPlain', 'CpmiHostCkp', 'CpmiGatewayCluster', 'checkpoint-host', 'cluster-member' ] diff --git a/roles/importer/files/importer/checkpointR8x/cp_enrich.py b/roles/importer/files/importer/checkpointR8x/cp_enrich.py new file mode 100644 index 000000000..f54e21ba7 --- /dev/null +++ b/roles/importer/files/importer/checkpointR8x/cp_enrich.py @@ -0,0 +1,168 @@ +import sys +from common import importer_base_dir +from fwo_log import getFwoLogger +sys.path.append(importer_base_dir + '/checkpointR8x') +import time +import cp_getter +import fwo_globals +import cp_const +import cp_network + + +################# enrich ####################### +def enrich_config (config, mgm_details, limit=150, details_level=cp_const.details_level, noapi=False, sid=None): + + logger = getFwoLogger() + base_url = 'https://' + mgm_details['hostname'] + ':' + str(mgm_details['port']) + '/web_api/' + nw_objs_from_obj_tables = [] + svc_objs_from_obj_tables = [] + starttime = int(time.time()) + + # do nothing for empty configs + if config == {}: + return 0 + + ################################################################################# + # get object data which is only contained as uid in config by making additional api calls + # get all object uids (together with type) from all rules in fields src, dst, svc + nw_uids_from_rulebase = [] + svc_uids_from_rulebase = [] + + for rulebase in config['rulebases'] + config['nat_rulebases']: + if fwo_globals.debug_level>5: + if 'layername' in rulebase: + logger.debug ( "Searching for all uids in rulebase: " + rulebase['layername'] ) + cp_getter.collect_uids_from_rulebase(rulebase, nw_uids_from_rulebase, svc_uids_from_rulebase, "top_level") + + # remove duplicates from uid lists + nw_uids_from_rulebase = list(set(nw_uids_from_rulebase)) + svc_uids_from_rulebase = list(set(svc_uids_from_rulebase)) + + # get all uids in objects tables + for obj_table in config['object_tables']: + nw_objs_from_obj_tables.extend(cp_getter.get_all_uids_of_a_type(obj_table, cp_const.nw_obj_table_names)) + svc_objs_from_obj_tables.extend(cp_getter.get_all_uids_of_a_type(obj_table, cp_const.svc_obj_table_names)) + + # identify all objects (by type) that are missing in objects tables but present in rulebase + missing_nw_object_uids = cp_getter.get_broken_object_uids(nw_objs_from_obj_tables, nw_uids_from_rulebase) + missing_svc_object_uids = cp_getter.get_broken_object_uids(svc_objs_from_obj_tables, svc_uids_from_rulebase) + + # adding the uid of the Original object for natting: + missing_nw_object_uids.append(cp_const.original_obj_uid) + missing_svc_object_uids.append(cp_const.original_obj_uid) + + if fwo_globals.debug_level>4: + logger.debug ( "found missing nw objects: '" + ",".join(missing_nw_object_uids) + "'" ) + logger.debug ( "found missing svc objects: '" + ",".join(missing_svc_object_uids) + "'" ) + + if noapi == False: + # if sid is None: + # TODO: why is the re-genereation of a new sid necessary here? + # if mgm_details['domainUid'] != None: + # api_domain = mgm_details['domainUid'] + # else: + # api_domain = mgm_details['configPath'] + + # sid = cp_getter.login(mgm_details['import_credential']['user'],mgm_details['import_credential']['secret'],mgm_details['hostname'],mgm_details['port'],api_domain) + # logger.debug ( "re-logged into api" ) + + # if an object is not there: + # make api call: show object details-level full uid "" and add object to respective json + for missing_obj in missing_nw_object_uids: + show_params_host = {'details-level':cp_const.details_level,'uid':missing_obj} + logger.debug ( "fetching obj with uid: " + missing_obj) + obj = cp_getter.cp_api_call(base_url, 'show-object', show_params_host, sid) + if 'object' in obj: + obj = obj['object'] + if (obj['type'] == 'CpmiAnyObject'): + json_obj = {"object_type": "hosts", "object_chunks": [ { + "objects": [ { + 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], + 'comments': 'any nw object checkpoint (hard coded)', + 'type': 'CpmiAnyObject', 'ipv4-address': '0.0.0.0/0', + } ] } ] } + config['object_tables'].append(json_obj) + elif (obj['type'] == 'simple-gateway' or obj['type'] == 'CpmiGatewayPlain' or obj['type'] == 'interop'): + json_obj = {"object_type": "hosts", "object_chunks": [ { + "objects": [ { + 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], + 'comments': obj['comments'], 'type': 'host', 'ipv4-address': cp_network.get_ip_of_obj(obj), + } ] } ] } + config['object_tables'].append(json_obj) + elif obj['type'] == 'multicast-address-range': + logger.debug("found multicast-address-range: " + obj['name'] + " (uid:" + obj['uid']+ ")") + json_obj = {"object_type": "hosts", "object_chunks": [ { + "objects": [ { + 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], + 'comments': obj['comments'], 'type': 'host', 'ipv4-address': cp_network.get_ip_of_obj(obj), + } ] } ] } + config['object_tables'].append(json_obj) + elif (obj['type'] == 'CpmiVsClusterMember' or obj['type'] == 'CpmiVsxClusterMember'): + json_obj = {"object_type": "hosts", "object_chunks": [ { + "objects": [ { + 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], + 'comments': obj['comments'], 'type': 'host', 'ipv4-address': cp_network.get_ip_of_obj(obj), + } ] } ] } + config['object_tables'].append(json_obj) + logger.debug ('missing obj: ' + obj['name'] + obj['type']) + elif (obj['type'] == 'Global'): + json_obj = {"object_type": "hosts", "object_chunks": [ { + "objects": [ { + 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], + 'comments': obj['comments'], 'type': 'host', 'ipv4-address': '0.0.0.0/0', + } ] } ] } + config['object_tables'].append(json_obj) + logger.debug ('missing obj: ' + obj['name'] + obj['type']) + elif (obj['type'] == 'updatable-object'): + json_obj = {"object_type": "hosts", "object_chunks": [ { + "objects": [ { + 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], + 'comments': obj['comments'], 'type': 'group' #, 'ipv4-address': '0.0.0.0/0', + } ] } ] } + config['object_tables'].append(json_obj) + logger.debug ('missing obj: ' + obj['name'] + obj['type']) + elif (obj['type'] == 'Internet'): + json_obj = {"object_type": "hosts", "object_chunks": [ { + "objects": [ { + 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], + 'comments': obj['comments'], 'type': 'network', 'ipv4-address': '0.0.0.0/0', + } ] } ] } + config['object_tables'].append(json_obj) + elif (obj['type'] == 'access-role'): + pass # ignorning user objects + else: + logger.warning ( "missing nw obj of unexpected type '" + obj['type'] + "': " + missing_obj ) + logger.debug ( "missing nw obj: " + missing_obj + " added" ) + else: + logger.warning("could not get the missing object with uid=" + missing_obj + " from CP API") + + for missing_obj in missing_svc_object_uids: + show_params_host = {'details-level':cp_const.details_level,'uid':missing_obj} + obj = cp_getter.cp_api_call(base_url, 'show-object', show_params_host, sid) + if 'object' in obj: + obj = obj['object'] + if (obj['type'] == 'CpmiAnyObject'): + json_obj = {"object_type": "services-other", "object_chunks": [ { + "objects": [ { + 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], + 'comments': 'any svc object checkpoint (hard coded)', + 'type': 'service-other', 'ip-protocol': '0' + } ] } ] } + config['object_tables'].append(json_obj) + elif (obj['type'] == 'Global'): + json_obj = {"object_type": "services-other", "object_chunks": [ { + "objects": [ { + 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], + 'comments': 'Original svc object checkpoint (hard coded)', + 'type': 'service-other', 'ip-protocol': '0' + } ] } ] } + config['object_tables'].append(json_obj) + else: + logger.warning ( "missing svc obj (uid=" + missing_obj + ") of unexpected type \"" + obj['type'] +"\"" ) + logger.debug ( "missing svc obj: " + missing_obj + " added") + + # logout_result = cp_getter.cp_api_call(base_url, 'logout', {}, sid) + + logger.debug ( "checkpointR8x/enrich_config - duration: " + str(int(time.time()) - starttime) + "s" ) + + return 0 diff --git a/roles/importer/files/importer/checkpointR8x/getter.py b/roles/importer/files/importer/checkpointR8x/cp_getter.py similarity index 89% rename from roles/importer/files/importer/checkpointR8x/getter.py rename to roles/importer/files/importer/checkpointR8x/cp_getter.py index 1a9afa8e8..6c455c12e 100644 --- a/roles/importer/files/importer/checkpointR8x/getter.py +++ b/roles/importer/files/importer/checkpointR8x/cp_getter.py @@ -193,7 +193,10 @@ def collect_uids_from_rulebase(rulebase, nw_uids_found, svc_uids_found, debug_te chunk_name = 'nat_rule_chunks' else: for rule in rulebase: - collect_uids_from_rule(rule, nw_uids_found, svc_uids_found) + if 'rulebase' in rule: + collect_uids_from_rulebase(rule['rulebase'], nw_uids_found, svc_uids_found, debug_text + '.') + else: + collect_uids_from_rule(rule, nw_uids_found, svc_uids_found) return for layer_chunk in rulebase[chunk_name]: if 'rulebase' in layer_chunk: @@ -239,37 +242,21 @@ def get_broken_object_uids(all_uids_from_obj_tables, all_uids_from_rules): return list(set(broken_uids)) -def get_inline_layer_names_from_rulebase(rulebase, inline_layers): - logger = getFwoLogger() - if 'layerchunks' in rulebase: - for chunk in rulebase['layerchunks']: - if 'rulebase' in chunk: - for rules_chunk in chunk['rulebase']: - get_inline_layer_names_from_rulebase(rules_chunk, inline_layers) - else: - if 'rulebase' in rulebase: - # add section header, but only if it does not exist yet (can happen by chunking a section) - for rule in rulebase['rulebase']: - if 'inline-layer' in rule: - inline_layers.append(rule['inline-layer']['name']) - if 'name' in rule and rule['name'] == "Placeholder for domain rules": - logger.debug ("getter - found domain rules reference with uid " + rule["uid"]) - - if 'rule-number' in rulebase: # not a rulebase but a single rule - if 'inline-layer' in rulebase: - inline_layers.append(rulebase['inline-layer']['name']) - # get_inline_layer_names_from_rulebase(rulebase, inline_layers) - - -def get_layer_from_api_as_dict (api_v_url, sid, show_params_rules, layername): +def get_layer_from_api_as_dict (api_v_url, sid, show_params_rules, layername, access_type='access', collection_type='rulebase'): + # access_type: access / nat + # collection_type: rulebase / layer logger = getFwoLogger() current_layer_json = { "layername": layername, "layerchunks": [] } current=0 total=current+1 while (current6: - logger.debug ( "get_layer_from_api_as_dict current offset: "+ str(current) ) + + ################################################################################# + # adding inline and domain layers (if they exist) + add_inline_layers (current_layer_json, api_v_url, sid, show_params_rules) + return current_layer_json -def get_nat_rules_from_api_as_dict (api_host, api_port, api_v_url, sid, show_params_rules): +def add_inline_layers (rulebase, api_v_url, sid, show_params_rules, access_type='access', collection_type='layer'): + + if 'layerchunks' in rulebase: + for chunk in rulebase['layerchunks']: + if 'rulebase' in chunk: + for rules_chunk in chunk['rulebase']: + add_inline_layers(rules_chunk, api_v_url, sid, show_params_rules) + else: + if 'rulebase' in rulebase: + rulebase_idx = 0 + for rule in rulebase['rulebase']: + if 'inline-layer' in rule: + inline_layer_name = rule['inline-layer']['name'] + if fwo_globals.debug_level>5: + logger.debug ( "found inline layer " + inline_layer_name ) + inline_layer = get_layer_from_api_as_dict (api_v_url, sid, show_params_rules, inline_layer_name, access_type=access_type, collection_type=collection_type) + rulebase['rulebase'][rulebase_idx+1:rulebase_idx+1] = inline_layer['layerchunks'] #### insert inline layer here + rulebase_idx += len(inline_layer['layerchunks']) + + if 'name' in rule and rule['name'] == "Placeholder for domain rules": + logger.debug ("getter - found domain rules reference with uid " + rule["uid"]) + rulebase_idx += 1 + + +def get_nat_rules_from_api_as_dict (api_v_url, sid, show_params_rules): logger = getFwoLogger() nat_rules = { "nat_rule_chunks": [] } current=0 diff --git a/roles/importer/files/importer/checkpointR8x/parse_network.py b/roles/importer/files/importer/checkpointR8x/cp_network.py similarity index 70% rename from roles/importer/files/importer/checkpointR8x/parse_network.py rename to roles/importer/files/importer/checkpointR8x/cp_network.py index 721930aef..be58428ba 100644 --- a/roles/importer/files/importer/checkpointR8x/parse_network.py +++ b/roles/importer/files/importer/checkpointR8x/cp_network.py @@ -1,11 +1,12 @@ from fwo_log import getFwoLogger import json import cp_const -from cpcommon import get_ip_of_obj from fwo_const import list_delimiter +import fwo_alert, fwo_api +import ipaddress -def parse_network_objects_to_json(full_config, config2import, import_id, mgm_id=0, debug_level=0): +def normalize_network_objects(full_config, config2import, import_id, mgm_id=0, debug_level=0): nw_objects = [] for obj_table in full_config['object_tables']: @@ -114,3 +115,44 @@ def add_member_names_for_nw_group(idx, nw_objects): member_names += member_name + list_delimiter group['obj_member_names'] = member_names[:-1] nw_objects.insert(idx, group) + + +def validate_ip_address(address): + try: + # ipaddress.ip_address(address) + ipaddress.ip_network(address) + return True + # print("IP address {} is valid. The object returned is {}".format(address, ip)) + except ValueError: + return False + # print("IP address {} is not valid".format(address)) + + +def get_ip_of_obj(obj, mgm_id=None): + if 'ipv4-address' in obj: + ip_addr = obj['ipv4-address'] + elif 'ipv6-address' in obj: + ip_addr = obj['ipv6-address'] + elif 'subnet4' in obj: + ip_addr = obj['subnet4'] + '/' + str(obj['mask-length4']) + elif 'subnet6' in obj: + ip_addr = obj['subnet6'] + '/' + str(obj['mask-length6']) + elif 'ipv4-address-first' in obj and 'ipv4-address-last' in obj: + ip_addr = obj['ipv4-address-first'] + '-' + str(obj['ipv4-address-last']) + elif 'ipv6-address-first' in obj and 'ipv6-address-last' in obj: + ip_addr = obj['ipv6-address-first'] + '-' + str(obj['ipv6-address-last']) + else: + ip_addr = None + + ## fix malformed ip addresses (should not regularly occur and constitutes a data issue in CP database) + if ip_addr is None or ('type' in obj and (obj['type'] == 'address-range' or obj['type'] == 'multicast-address-range')): + pass # ignore None and ranges here + elif not validate_ip_address(ip_addr): + alerter = fwo_alert.getFwoAlerter() + alert_description = "object is not a valid ip address (" + str(ip_addr) + ")" + fwo_api.create_data_issue(alerter['fwo_api_base_url'], alerter['jwt'], severity=2, obj_name=obj['name'], object_type=obj['type'], description=alert_description, mgm_id=mgm_id) + alert_description = "object '" + obj['name'] + "' (type=" + obj['type'] + ") is not a valid ip address (" + str(ip_addr) + ")" + fwo_api.setAlert(alerter['fwo_api_base_url'], alerter['jwt'], title="import error", severity=2, role='importer', \ + description=alert_description, source='import', alertCode=17, mgm_id=mgm_id) + ip_addr = '0.0.0.0/32' # setting syntactically correct dummy ip + return ip_addr diff --git a/roles/importer/files/importer/checkpointR8x/parse_rule.py b/roles/importer/files/importer/checkpointR8x/cp_rule.py similarity index 68% rename from roles/importer/files/importer/checkpointR8x/parse_rule.py rename to roles/importer/files/importer/checkpointR8x/cp_rule.py index b674cc437..b52664a1b 100644 --- a/roles/importer/files/importer/checkpointR8x/parse_rule.py +++ b/roles/importer/files/importer/checkpointR8x/cp_rule.py @@ -1,60 +1,52 @@ from asyncio.log import logger from fwo_log import getFwoLogger import json -import cp_const, cpcommon +import cp_const import fwo_const -from fwo_const import list_delimiter +import fwo_globals +from fwo_const import list_delimiter, default_section_header_text from fwo_base import sanitize from fwo_exception import ImportRecursionLimitReached +uid_to_name_map = {} -def add_section_header_rule_in_json(rulebase, section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid): - section_header_uids.append(sanitize(rule_uid)) - rule = { - "control_id": int(import_id), - "rule_num": int(rule_num), - "rulebase_name": sanitize(layer_name), - # rule_ruleid - "rule_disabled": False, - "rule_src_neg": False, - "rule_src": "Any", - "rule_src_refs": sanitize(cp_const.any_obj_uid), - "rule_dst_neg": False, - "rule_dst": "Any", - "rule_dst_refs": sanitize(cp_const.any_obj_uid), - "rule_svc_neg": False, - "rule_svc": "Any", - "rule_svc_refs": sanitize(cp_const.any_obj_uid), - "rule_action": "Accept", - "rule_track": "Log", - "rule_installon": "Policy Targets", - "rule_time": "Any", - "rule_implied": False, - # "rule_comment": None, - # rule_name - "rule_uid": sanitize(rule_uid), - "rule_head_text": sanitize(section_name), - # rule_from_zone - # rule_to_zone - # rule_last_change_admin - "parent_rule_uid": sanitize(parent_uid) - } - rulebase.append(rule) - - -def add_domain_rule_header_rule_in_json(rulebase, section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid): - add_section_header_rule_in_json(rulebase, section_name, layer_name, - import_id, rule_uid, rule_num, section_header_uids, parent_uid) +def normalize_rulebases_top_level (full_config, current_import_id, config2import): + logger = getFwoLogger() + target_rulebase = [] + rule_num = 0 + parent_uid="" + section_header_uids=[] -def resolve_uid_to_name(nw, config2import): + # fill uid_to_name_map: for nw_obj in config2import['network_objects']: - if nw_obj['obj_uid']==nw: - return nw_obj['obj_name'] - return nw + uid_to_name_map[nw_obj['obj_uid']] = nw_obj['obj_name'] + + rb_range = range(len(full_config['rulebases'])) + for rb_id in rb_range: + # if current_layer_name == args.rulebase: + if fwo_globals.debug_level>3: + logger.debug("parsing layer " + full_config['rulebases'][rb_id]['layername']) + + # parse access rules + rule_num = parse_rulebase( + full_config['rulebases'][rb_id], target_rulebase, full_config['rulebases'][rb_id]['layername'], + current_import_id, rule_num, section_header_uids, parent_uid, config2import) + # now parse the nat rulebase + + # parse nat rules + if len(full_config['nat_rulebases'])>0: + if len(full_config['nat_rulebases']) != len(rb_range): + logger.warning('get_config - found ' + str(len(full_config['nat_rulebases'])) + + ' nat rulebases and ' + str(len(rb_range)) + ' access rulebases') + else: + rule_num = parse_nat_rulebase( + full_config['nat_rulebases'][rb_id], target_rulebase, full_config['rulebases'][rb_id]['layername'], + current_import_id, rule_num, section_header_uids, parent_uid, config2import) + return target_rulebase -def parse_single_rule_to_json(src_rule, rulebase, layer_name, import_id, rule_num, parent_uid, config2import, debug_level=0): +def parse_single_rule(src_rule, rulebase, layer_name, import_id, rule_num, parent_uid, config2import, debug_level=0): logger = getFwoLogger() # reference to domain rule layer, filling up basic fields if 'type' in src_rule and src_rule['type'] != 'place-holder': @@ -75,8 +67,11 @@ def parse_single_rule_to_json(src_rule, rulebase, layer_name, import_id, rule_nu src['networks'] + list_delimiter else: # more than one source for nw in src['networks']: - nw_resolved = resolve_uid_to_name(nw, config2import) - rule_src_name += src["name"] + '@' + nw_resolved + list_delimiter + nw_resolved = resolve_uid_to_name(nw) + if nw_resolved == "": + rule_src_name += src["name"] + list_delimiter + else: + rule_src_name += src["name"] + '@' + nw_resolved + list_delimiter else: # standard network objects as source rule_src_name += src["name"] + list_delimiter else: @@ -234,7 +229,6 @@ def parse_single_rule_to_json(src_rule, rulebase, layer_name, import_id, rule_nu "rule_track": sanitize(src_rule['track']['type']['name']), "rule_installon": sanitize(src_rule['install-on'][0]['name']), "rule_time": sanitize(src_rule['time'][0]['name']), - "rule_comment": sanitize(comments), "rule_name": sanitize(rule_name), "rule_uid": sanitize(src_rule['uid']), "rule_implied": False, @@ -246,68 +240,127 @@ def parse_single_rule_to_json(src_rule, rulebase, layer_name, import_id, rule_nu "parent_rule_uid": sanitize(parent_rule_uid), "last_hit": sanitize(last_hit) } + if comments is not None: + rule['rule_comment'] = sanitize(comments) rulebase.append(rule) + return rule_num + 1 + return rule_num -def parse_rulebase_json(src_rulebase, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=0, recursion_level=1): +def resolve_uid_to_name(nw_obj_uid): + if nw_obj_uid in uid_to_name_map: + return uid_to_name_map[nw_obj_uid] + else: + logger = getFwoLogger() + logger.warning("could not resolve network object with uid " + nw_obj_uid) + return "" + + +def insert_section_header_rule(rulebase, section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid): + section_header_uids.append(sanitize(rule_uid)) + rule = { + "control_id": int(import_id), + "rule_num": int(rule_num), + "rulebase_name": sanitize(layer_name), + # rule_ruleid + "rule_disabled": False, + "rule_src_neg": False, + "rule_src": "Any", + "rule_src_refs": sanitize(cp_const.any_obj_uid), + "rule_dst_neg": False, + "rule_dst": "Any", + "rule_dst_refs": sanitize(cp_const.any_obj_uid), + "rule_svc_neg": False, + "rule_svc": "Any", + "rule_svc_refs": sanitize(cp_const.any_obj_uid), + "rule_action": "Accept", + "rule_track": "Log", + "rule_installon": "Policy Targets", + "rule_time": "Any", + "rule_implied": False, + # "rule_comment": None, + # rule_name + "rule_uid": sanitize(rule_uid), + "rule_head_text": sanitize(section_name), + # rule_from_zone + # rule_to_zone + # rule_last_change_admin + "parent_rule_uid": sanitize(parent_uid) + } + rulebase.append(rule) + return rule_num + 1 + + +def add_domain_rule_header_rule(rulebase, section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid): + return insert_section_header_rule(rulebase, section_name, layer_name, + import_id, rule_uid, rule_num, section_header_uids, parent_uid) + + +def check_and_add_section_header(src_rulebase, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=0, recursion_level=1): + # if current rulebase starts a new section, add section header, but only if it does not exist yet (can happen by chunking a section) + if 'type' in src_rulebase and src_rulebase['type'] == 'access-section' and 'uid' in src_rulebase: # and not src_rulebase['uid'] in section_header_uids: + section_name = default_section_header_text + if 'name' in src_rulebase: + section_name = src_rulebase['name'] + if 'parent_rule_uid' in src_rulebase: + parent_uid = src_rulebase['parent_rule_uid'] + else: + parent_uid = "" + rule_num = insert_section_header_rule(target_rulebase, section_name, layer_name, import_id, src_rulebase['uid'], rule_num, section_header_uids, parent_uid) + parent_uid = src_rulebase['uid'] + return rule_num - if (recursion_level > fwo_const.max_recursion_level): - raise ImportRecursionLimitReached( - "parse_rulebase_json") from None +def parse_rulebase(src_rulebase, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, + debug_level=0, recursion_level=1, layer_disabled=False): logger = getFwoLogger() - if 'layerchunks' in src_rulebase: + if (recursion_level > fwo_const.max_recursion_level): + raise ImportRecursionLimitReached("parse_rulebase") from None + + # parse chunks + if 'layerchunks' in src_rulebase: # found chunks of layers which need to be parsed separately for chunk in src_rulebase['layerchunks']: if 'rulebase' in chunk: for rules_chunk in chunk['rulebase']: - rule_num = parse_rulebase_json(rules_chunk, target_rulebase, layer_name, import_id, rule_num, - section_header_uids, parent_uid, config2import, debug_level=debug_level, recursion_level=recursion_level+1) + rule_num = parse_rulebase(rules_chunk, target_rulebase, layer_name, import_id, rule_num, + section_header_uids, parent_uid, config2import, debug_level=debug_level, recursion_level=recursion_level+1) else: - logger.warning("found no rulebase in chunk:\n" + - json.dumps(chunk, indent=2)) - else: - if 'rulebase' in src_rulebase: - # add section header, but only if it does not exist yet (can happen by chunking a section) - if src_rulebase['type'] == 'access-section' and not src_rulebase['uid'] in section_header_uids: - section_name = "section without name" - if 'name' in src_rulebase: - section_name = src_rulebase['name'] - if 'parent_rule_uid' in src_rulebase: - parent_uid = src_rulebase['parent_rule_uid'] - else: - parent_uid = "" - add_section_header_rule_in_json(target_rulebase, section_name, layer_name, - import_id, src_rulebase['uid'], rule_num, section_header_uids, parent_uid) - rule_num += 1 - parent_uid = src_rulebase['uid'] - for rule in src_rulebase['rulebase']: + rule_num = parse_rulebase(chunk, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=debug_level, recursion_level=recursion_level+1) + + check_and_add_section_header(src_rulebase, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=debug_level, recursion_level=recursion_level+1) + + # parse layered rulebase + if 'rulebase' in src_rulebase: + # layer_disabled = not src_rulebase['enabled'] + for rule in src_rulebase['rulebase']: + if 'type' in rule: if rule['type'] == 'place-holder': # add domain rules section_name = "" if 'name' in src_rulebase: section_name = rule['name'] - add_domain_rule_header_rule_in_json( + rule_num = add_domain_rule_header_rule( target_rulebase, section_name, layer_name, import_id, rule['uid'], rule_num, section_header_uids, parent_uid) else: # parse standard sections - parse_single_rule_to_json( + rule_num = parse_single_rule( rule, target_rulebase, layer_name, import_id, rule_num, parent_uid, config2import, debug_level=debug_level) - rule_num += 1 - - if src_rulebase['type'] == 'place-holder': # add domain rules - logger.debug('found domain rule ref: ' + src_rulebase['uid']) - section_name = "" - if 'name' in src_rulebase: - section_name = src_rulebase['name'] - add_domain_rule_header_rule_in_json( - target_rulebase, section_name, layer_name, import_id, src_rulebase['uid'], rule_num, section_header_uids, parent_uid) - rule_num += 1 - if 'rule-number' in src_rulebase: # rulebase is just a single rule - parse_single_rule_to_json( - src_rulebase, target_rulebase, layer_name, import_id, rule_num, parent_uid, config2import) - rule_num += 1 + if 'rulebase' in rule: # alsways check if a rule contains another layer + rule_num = parse_rulebase(rule, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=debug_level, recursion_level=recursion_level+1) + + if 'type' in src_rulebase and src_rulebase['type'] == 'place-holder': # add domain rules + logger.debug('found domain rule ref: ' + src_rulebase['uid']) + section_name = "" + if 'name' in src_rulebase: + section_name = src_rulebase['name'] + rule_num = add_domain_rule_header_rule( + target_rulebase, section_name, layer_name, import_id, src_rulebase['uid'], rule_num, section_header_uids, parent_uid) + + if 'rule-number' in src_rulebase: # rulebase is just a single rule + rule_num = parse_single_rule(src_rulebase, target_rulebase, layer_name, import_id, rule_num, parent_uid, config2import) + return rule_num -def parse_nat_rulebase_json(src_rulebase, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=0, recursion_level=1): +def parse_nat_rulebase(src_rulebase, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=0, recursion_level=1): if (recursion_level > fwo_const.max_recursion_level): raise ImportRecursionLimitReached( @@ -318,39 +371,29 @@ def parse_nat_rulebase_json(src_rulebase, target_rulebase, layer_name, import_id for chunk in src_rulebase['nat_rule_chunks']: if 'rulebase' in chunk: for rules_chunk in chunk['rulebase']: - rule_num = parse_nat_rulebase_json(rules_chunk, target_rulebase, layer_name, import_id, rule_num, + rule_num = parse_nat_rulebase(rules_chunk, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=debug_level, recursion_level=recursion_level+1) else: logger.warning( "parse_rule: found no rulebase in chunk:\n" + json.dumps(chunk, indent=2)) else: if 'rulebase' in src_rulebase: - # add section header, but only if it does not exist yet (can happen by chunking a section) - if src_rulebase['type'] == 'access-section' and not src_rulebase['uid'] in section_header_uids: - section_name = "" - if 'name' in src_rulebase: - section_name = src_rulebase['name'] - parent_uid = "" - add_section_header_rule_in_json(target_rulebase, section_name, layer_name, - import_id, src_rulebase['uid'], rule_num, section_header_uids, parent_uid) - rule_num += 1 - parent_uid = src_rulebase['uid'] + check_and_add_section_header(src_rulebase, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=debug_level, recursion_level=recursion_level+1) + for rule in src_rulebase['rulebase']: (rule_match, rule_xlate) = parse_nat_rule_transform(rule, rule_num) - parse_single_rule_to_json( + rule_num = parse_single_rule( rule_match, target_rulebase, layer_name, import_id, rule_num, parent_uid, config2import) - parse_single_rule_to_json( + parse_single_rule( # do not increase rule_num here rule_xlate, target_rulebase, layer_name, import_id, rule_num, parent_uid, config2import) - rule_num += 1 - if 'rule-number' in src_rulebase: # rulebase is just a single rule + if 'rule-number' in src_rulebase: # rulebase is just a single rule (xlate rules do not count) (rule_match, rule_xlate) = parse_nat_rule_transform( src_rulebase, rule_num) - parse_single_rule_to_json( + rule_num = parse_single_rule( rule_match, target_rulebase, layer_name, import_id, rule_num, parent_uid, config2import) - parse_single_rule_to_json( + parse_single_rule( # do not increase rule_num here (xlate rules do not count) rule_xlate, target_rulebase, layer_name, import_id, rule_num, parent_uid, config2import) - rule_num += 1 return rule_num @@ -392,3 +435,4 @@ def parse_nat_rule_transform(xlate_rule_in, rule_num): 'rule_type': 'xlate' } return (rule_match, rule_xlate) + diff --git a/roles/importer/files/importer/checkpointR8x/parse_service.py b/roles/importer/files/importer/checkpointR8x/cp_service.py similarity index 98% rename from roles/importer/files/importer/checkpointR8x/parse_service.py rename to roles/importer/files/importer/checkpointR8x/cp_service.py index 2302c8e8b..294ac93be 100644 --- a/roles/importer/files/importer/checkpointR8x/parse_service.py +++ b/roles/importer/files/importer/checkpointR8x/cp_service.py @@ -121,7 +121,7 @@ def add_member_names_for_svc_group(idx, svc_objects): svc_objects.insert(idx, group) -def parse_service_objects_to_json(full_config, config2import, import_id, debug_level=0): +def normalize_service_objects(full_config, config2import, import_id, debug_level=0): svc_objects = [] for svc_table in full_config['object_tables']: collect_svc_objects(svc_table, svc_objects) diff --git a/roles/importer/files/importer/checkpointR8x/parse_user.py b/roles/importer/files/importer/checkpointR8x/cp_user.py similarity index 99% rename from roles/importer/files/importer/checkpointR8x/parse_user.py rename to roles/importer/files/importer/checkpointR8x/cp_user.py index 0551e8ed3..c92fc49a9 100644 --- a/roles/importer/files/importer/checkpointR8x/parse_user.py +++ b/roles/importer/files/importer/checkpointR8x/cp_user.py @@ -52,9 +52,8 @@ def collect_users_from_rulebase(rulebase, users): for rule in rulebase: collect_users_from_rule(rule, users) -# the following is only used within new python-only importer: - +# the following is only used within new python-only importer: def parse_user_objects_from_rulebase(rulebase, users, import_id): collect_users_from_rulebase(rulebase, users) for user_name in users.keys(): diff --git a/roles/importer/files/importer/checkpointR8x/cpcommon.py b/roles/importer/files/importer/checkpointR8x/cpcommon.py deleted file mode 100644 index b04a8c264..000000000 --- a/roles/importer/files/importer/checkpointR8x/cpcommon.py +++ /dev/null @@ -1,344 +0,0 @@ -from distutils.log import debug -import sys -from common import importer_base_dir -from fwo_log import getFwoLogger -sys.path.append(importer_base_dir + '/checkpointR8x') -import json -import time -import getter -import fwo_alert, fwo_api -import ipaddress -import fwo_globals -import cp_const -from cp_const import details_level - - -def validate_ip_address(address): - try: - # ipaddress.ip_address(address) - ipaddress.ip_network(address) - return True - # print("IP address {} is valid. The object returned is {}".format(address, ip)) - except ValueError: - return False - # print("IP address {} is not valid".format(address)) - - -def get_ip_of_obj(obj, mgm_id=None): - if 'ipv4-address' in obj: - ip_addr = obj['ipv4-address'] - elif 'ipv6-address' in obj: - ip_addr = obj['ipv6-address'] - elif 'subnet4' in obj: - ip_addr = obj['subnet4'] + '/' + str(obj['mask-length4']) - elif 'subnet6' in obj: - ip_addr = obj['subnet6'] + '/' + str(obj['mask-length6']) - elif 'ipv4-address-first' in obj and 'ipv4-address-last' in obj: - ip_addr = obj['ipv4-address-first'] + '-' + str(obj['ipv4-address-last']) - elif 'ipv6-address-first' in obj and 'ipv6-address-last' in obj: - ip_addr = obj['ipv6-address-first'] + '-' + str(obj['ipv6-address-last']) - else: - ip_addr = None - - ## fix malformed ip addresses (should not regularly occur and constitutes a data issue in CP database) - if ip_addr is None or ('type' in obj and (obj['type'] == 'address-range' or obj['type'] == 'multicast-address-range')): - pass # ignore None and ranges here - elif not validate_ip_address(ip_addr): - alerter = fwo_alert.getFwoAlerter() - alert_description = "object is not a valid ip address (" + str(ip_addr) + ")" - fwo_api.create_data_issue(alerter['fwo_api_base_url'], alerter['jwt'], severity=2, obj_name=obj['name'], object_type=obj['type'], description=alert_description, mgm_id=mgm_id) - alert_description = "object '" + obj['name'] + "' (type=" + obj['type'] + ") is not a valid ip address (" + str(ip_addr) + ")" - fwo_api.setAlert(alerter['fwo_api_base_url'], alerter['jwt'], title="import error", severity=2, role='importer', \ - description=alert_description, source='import', alertCode=17, mgm_id=mgm_id) - ip_addr = '0.0.0.0/32' # setting syntactically correct dummy ip - return ip_addr - -##################### 2nd-level functions ################################### - -def get_basic_config (config_json, mgm_details, force=False, config_filename=None, - limit=150, details_level=cp_const.details_level, test_version='off', debug_level=0, ssl_verification=True, sid=None): - logger = getFwoLogger() - - api_host = mgm_details['hostname'] - api_user = mgm_details['import_credential']['user'] - if mgm_details['domainUid'] != None: - api_domain = mgm_details['domainUid'] - else: - api_domain = mgm_details['configPath'] - api_port = str(mgm_details['port']) - api_password = mgm_details['import_credential']['secret'] - base_url = 'https://' + api_host + ':' + str(api_port) + '/web_api/' - - # top level dict start, sid contains the domain information, so only sending domain during login - if sid is None: # if sid was not passed, login and get it - sid = getter.login(api_user,api_password,api_host,api_port,api_domain,ssl_verification) - v_url = getter.get_api_url (sid, api_host, api_port, api_user, base_url, limit, test_version, ssl_verification, debug_level=debug_level) - - config_json.update({'rulebases': [], 'nat_rulebases': [] }) - - with_hits = True - show_params_rules = {'limit':limit,'use-object-dictionary':cp_const.use_object_dictionary,'details-level':cp_const.details_level, 'show-hits' : with_hits} - - # read all rulebases: handle per device details - for device in mgm_details['devices']: - if device['global_rulebase_name'] != None and device['global_rulebase_name']!='': - show_params_rules['name'] = device['global_rulebase_name'] - # get global layer rulebase - logger.debug ( "getting layer: " + show_params_rules['name'] ) - current_layer_json = getter.get_layer_from_api_as_dict (v_url, sid, show_params_rules, layername=device['global_rulebase_name']) - if current_layer_json is None: - return 1 - # now also get domain rules - show_params_rules['name'] = device['local_rulebase_name'] - current_layer_json['layername'] = device['local_rulebase_name'] - logger.debug ( "getting domain rule layer: " + show_params_rules['name'] ) - domain_rules = getter.get_layer_from_api_as_dict (v_url, sid, show_params_rules, layername=device['local_rulebase_name']) - if current_layer_json is None: - return 1 - - # now handling possible reference to domain rules within global rules - # if we find the reference, replace it with the domain rules - if 'layerchunks' in current_layer_json: - for chunk in current_layer_json["layerchunks"]: - for rule in chunk['rulebase']: - if "type" in rule and rule["type"] == "place-holder": - logger.debug ("found domain rules place-holder: " + str(rule) + "\n\n") - current_layer_json = getter.insert_layer_after_place_holder(current_layer_json, domain_rules, rule['uid']) - else: # no global rules, just get local ones - show_params_rules['name'] = device['local_rulebase_name'] - logger.debug ( "getting layer: " + show_params_rules['name'] ) - current_layer_json = getter.get_layer_from_api_as_dict (v_url, sid, show_params_rules, layername=device['local_rulebase_name']) - if current_layer_json is None: - return 1 - - config_json['rulebases'].append(current_layer_json) - - # getting NAT rules - need package name for nat rule retrieval - # todo: each gateway/layer should have its own package name (pass management details instead of single data?) - if device['package_name'] != None and device['package_name'] != '': - show_params_rules = {'limit':limit,'use-object-dictionary':cp_const.use_object_dictionary,'details-level':cp_const.details_level, 'package': device['package_name'] } - if debug_level>3: - logger.debug ( "getting nat rules for package: " + device['package_name'] ) - nat_rules = getter.get_nat_rules_from_api_as_dict (api_host, api_port, v_url, sid, show_params_rules) - if len(nat_rules)>0: - config_json['nat_rulebases'].append(nat_rules) - else: - config_json['nat_rulebases'].append({ "nat_rule_chunks": [] }) - else: # always making sure we have an (even empty) nat rulebase per device - config_json['nat_rulebases'].append({ "nat_rule_chunks": [] }) - - # leaving rules, moving on to objects - config_json["object_tables"] = [] - show_params_objs = {'limit':limit,'details-level': cp_const.details_level} - - for obj_type in cp_const.api_obj_types: - object_table = { "object_type": obj_type, "object_chunks": [] } - current=0 - total=current+1 - show_cmd = 'show-' + obj_type - if debug_level>5: - logger.debug ( "obj_type: "+ obj_type ) - while (current5: - logger.debug ( obj_type +" current:"+ str(current) + " of a total " + str(total) ) - else : - current = total - if debug_level>5: - logger.debug ( obj_type +" total:"+ str(total) ) - config_json["object_tables"].append(object_table) - logout_result = getter.cp_api_call(v_url, 'logout', {}, sid) - - # only write config to file if config_filename is given - if config_filename != None and len(config_filename)>1: - with open(config_filename, "w") as configfile_json: - configfile_json.write(json.dumps(config_json)) - return 0 - - -################# enrich ####################### -def enrich_config (config, mgm_details, limit=150, details_level=cp_const.details_level, noapi=False, sid=None): - - logger = getFwoLogger() - base_url = 'https://' + mgm_details['hostname'] + ':' + str(mgm_details['port']) + '/web_api/' - nw_objs_from_obj_tables = [] - svc_objs_from_obj_tables = [] - starttime = int(time.time()) - - # do nothing for empty configs - if config == {}: - return 0 - - ################################################################################# - # adding inline and domain layers - found_new_inline_layers = True - old_inline_layers = [] - while found_new_inline_layers: - # sweep existing rules for inline layer links - inline_layers = [] - for rulebase in config['rulebases'] + config['nat_rulebases']: - getter.get_inline_layer_names_from_rulebase(rulebase, inline_layers) - - if len(inline_layers) == len(old_inline_layers): - found_new_inline_layers = False - else: - old_inline_layers = inline_layers - for layer in inline_layers: - if fwo_globals.debug_level>5: - logger.debug ( "found inline layer " + layer ) - # enrich config --> get additional layers referenced in top level layers by name - # also handle possible recursion (inline layer containing inline layer(s)) - # get layer rules from api - # add layer rules to config - - # next phase: how to logically link layer guard with rules in layer? --> AND of src, dst & svc between layer guard and each rule in layer? - - ################################################################################# - # get object data which is only contained as uid in config by making additional api calls - # get all object uids (together with type) from all rules in fields src, dst, svc - nw_uids_from_rulebase = [] - svc_uids_from_rulebase = [] - - for rulebase in config['rulebases'] + config['nat_rulebases']: - if fwo_globals.debug_level>5: - if 'layername' in rulebase: - logger.debug ( "Searching for all uids in rulebase: " + rulebase['layername'] ) - getter.collect_uids_from_rulebase(rulebase, nw_uids_from_rulebase, svc_uids_from_rulebase, "top_level") - - # remove duplicates from uid lists - nw_uids_from_rulebase = list(set(nw_uids_from_rulebase)) - svc_uids_from_rulebase = list(set(svc_uids_from_rulebase)) - - # get all uids in objects tables - for obj_table in config['object_tables']: - nw_objs_from_obj_tables.extend(getter.get_all_uids_of_a_type(obj_table, cp_const.nw_obj_table_names)) - svc_objs_from_obj_tables.extend(getter.get_all_uids_of_a_type(obj_table, cp_const.svc_obj_table_names)) - - # identify all objects (by type) that are missing in objects tables but present in rulebase - missing_nw_object_uids = getter.get_broken_object_uids(nw_objs_from_obj_tables, nw_uids_from_rulebase) - missing_svc_object_uids = getter.get_broken_object_uids(svc_objs_from_obj_tables, svc_uids_from_rulebase) - - # adding the uid of the Original object for natting: - missing_nw_object_uids.append(cp_const.original_obj_uid) - missing_svc_object_uids.append(cp_const.original_obj_uid) - - if fwo_globals.debug_level>4: - logger.debug ( "found missing nw objects: '" + ",".join(missing_nw_object_uids) + "'" ) - logger.debug ( "found missing svc objects: '" + ",".join(missing_svc_object_uids) + "'" ) - - if noapi == False: - # if sid is None: - # TODO: why is the re-genereation of a new sid necessary here? - - if mgm_details['domainUid'] != None: - api_domain = mgm_details['domainUid'] - else: - api_domain = mgm_details['configPath'] - - sid = getter.login(mgm_details['import_credential']['user'],mgm_details['import_credential']['secret'],mgm_details['hostname'],mgm_details['port'],api_domain) - logger.debug ( "re-logged into api" ) - - # if an object is not there: - # make api call: show object details-level full uid "" and add object to respective json - for missing_obj in missing_nw_object_uids: - show_params_host = {'details-level':cp_const.details_level,'uid':missing_obj} - logger.debug ( "fetching obj with uid: " + missing_obj) - obj = getter.cp_api_call(base_url, 'show-object', show_params_host, sid) - if 'object' in obj: - obj = obj['object'] - if (obj['type'] == 'CpmiAnyObject'): - json_obj = {"object_type": "hosts", "object_chunks": [ { - "objects": [ { - 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], - 'comments': 'any nw object checkpoint (hard coded)', - 'type': 'CpmiAnyObject', 'ipv4-address': '0.0.0.0/0', - } ] } ] } - config['object_tables'].append(json_obj) - elif (obj['type'] == 'simple-gateway' or obj['type'] == 'CpmiGatewayPlain' or obj['type'] == 'interop'): - json_obj = {"object_type": "hosts", "object_chunks": [ { - "objects": [ { - 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], - 'comments': obj['comments'], 'type': 'host', 'ipv4-address': get_ip_of_obj(obj), - } ] } ] } - config['object_tables'].append(json_obj) - elif obj['type'] == 'multicast-address-range': - logger.debug("found multicast-address-range: " + obj['name'] + " (uid:" + obj['uid']+ ")") - json_obj = {"object_type": "hosts", "object_chunks": [ { - "objects": [ { - 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], - 'comments': obj['comments'], 'type': 'host', 'ipv4-address': get_ip_of_obj(obj), - } ] } ] } - config['object_tables'].append(json_obj) - elif (obj['type'] == 'CpmiVsClusterMember' or obj['type'] == 'CpmiVsxClusterMember'): - json_obj = {"object_type": "hosts", "object_chunks": [ { - "objects": [ { - 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], - 'comments': obj['comments'], 'type': 'host', 'ipv4-address': get_ip_of_obj(obj), - } ] } ] } - config['object_tables'].append(json_obj) - logger.debug ('missing obj: ' + obj['name'] + obj['type']) - elif (obj['type'] == 'Global'): - json_obj = {"object_type": "hosts", "object_chunks": [ { - "objects": [ { - 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], - 'comments': obj['comments'], 'type': 'host', 'ipv4-address': '0.0.0.0/0', - } ] } ] } - config['object_tables'].append(json_obj) - logger.debug ('missing obj: ' + obj['name'] + obj['type']) - elif (obj['type'] == 'updatable-object'): - json_obj = {"object_type": "hosts", "object_chunks": [ { - "objects": [ { - 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], - 'comments': obj['comments'], 'type': 'group' #, 'ipv4-address': '0.0.0.0/0', - } ] } ] } - config['object_tables'].append(json_obj) - logger.debug ('missing obj: ' + obj['name'] + obj['type']) - elif (obj['type'] == 'Internet'): - json_obj = {"object_type": "hosts", "object_chunks": [ { - "objects": [ { - 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], - 'comments': obj['comments'], 'type': 'network', 'ipv4-address': '0.0.0.0/0', - } ] } ] } - config['object_tables'].append(json_obj) - elif (obj['type'] == 'access-role'): - pass # ignorning user objects - else: - logger.warning ( "missing nw obj of unexpected type '" + obj['type'] + "': " + missing_obj ) - logger.debug ( "missing nw obj: " + missing_obj + " added" ) - else: - logger.warning("could not get the missing object with uid=" + missing_obj + " from CP API") - - for missing_obj in missing_svc_object_uids: - show_params_host = {'details-level':cp_const.details_level,'uid':missing_obj} - obj = getter.cp_api_call(base_url, 'show-object', show_params_host, sid) - obj = obj['object'] - if (obj['type'] == 'CpmiAnyObject'): - json_obj = {"object_type": "services-other", "object_chunks": [ { - "objects": [ { - 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], - 'comments': 'any svc object checkpoint (hard coded)', - 'type': 'service-other', 'ip-protocol': '0' - } ] } ] } - config['object_tables'].append(json_obj) - elif (obj['type'] == 'Global'): - json_obj = {"object_type": "services-other", "object_chunks": [ { - "objects": [ { - 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], - 'comments': 'Original svc object checkpoint (hard coded)', - 'type': 'service-other', 'ip-protocol': '0' - } ] } ] } - config['object_tables'].append(json_obj) - else: - logger.warning ( "missing svc obj (uid=" + missing_obj + ") of unexpected type \"" + obj['type'] +"\"" ) - logger.debug ( "missing svc obj: " + missing_obj + " added") - - logout_result = getter.cp_api_call(base_url, 'logout', {}, sid) - - logger.debug ( "checkpointR8x/enrich_config - duration: " + str(int(time.time()) - starttime) + "s" ) - - return 0 diff --git a/roles/importer/files/importer/checkpointR8x/fwcommon.py b/roles/importer/files/importer/checkpointR8x/fwcommon.py index 66d49b24a..ac6a80db3 100644 --- a/roles/importer/files/importer/checkpointR8x/fwcommon.py +++ b/roles/importer/files/importer/checkpointR8x/fwcommon.py @@ -1,28 +1,28 @@ -from distutils.log import debug import sys +import json +import copy from common import importer_base_dir from fwo_log import getFwoLogger sys.path.append(importer_base_dir + '/checkpointR8x') -import copy, time -import cp_const, parse_network, parse_rule, parse_service, parse_user -import getter -from cpcommon import get_basic_config, enrich_config +import time import fwo_globals -from fwo_exception import FwLoginFailed +import cp_rule +import cp_const, cp_network, cp_service +import cp_getter +from cp_enrich import enrich_config +from fwo_exception import FwLoginFailed, FwLogoutFailed +from cp_user import parse_user_objects_from_rulebase def has_config_changed (full_config, mgm_details, force=False): if full_config != {}: # a native config was passed in, so we assume that an import has to be done (simulating changes here) return 1 - # from 5.8 onwards: preferably use domain uid instead of domain name due to CP R81 bug with certain installations - if mgm_details['domainUid'] != None: - domain = mgm_details['domainUid'] - else: - domain = mgm_details['configPath'] + + domain, _ = prepare_get_vars(mgm_details) try: # top level dict start, sid contains the domain information, so only sending domain during login - session_id = getter.login(mgm_details['import_credential']['user'], mgm_details['import_credential']['secret'], mgm_details['hostname'], str(mgm_details['port']), domain) + session_id = login_cp(mgm_details, domain) except: raise FwLoginFailed # maybe 2Temporary failure in name resolution" @@ -34,10 +34,15 @@ def has_config_changed (full_config, mgm_details, force=False): if last_change_time==None or last_change_time=='' or force: # if no last import time found or given or if force flag is set, do full import - return 1 - else: - # otherwise search for any changes since last import - return (getter.get_changes(session_id, mgm_details['hostname'], str(mgm_details['port']),last_change_time) != 0) + result = 1 + else: # otherwise search for any changes since last import + result = (cp_getter.get_changes(session_id, mgm_details['hostname'], str(mgm_details['port']),last_change_time) != 0) + + try: # top level dict start, sid contains the domain information, so only sending domain during login + logout_result = cp_getter.cp_api_call("https://" + mgm_details['hostname'] + ":" + str(mgm_details['port']) + "/web_api/", 'logout', {}, session_id) + except: + raise FwLogoutFailed # maybe temporary failure in name resolution" + return result def get_config(config2import, full_config, current_import_id, mgm_details, limit=150, force=False, jwt=None): @@ -50,18 +55,20 @@ def get_config(config2import, full_config, current_import_id, mgm_details, limit if not parsing_config_only: # get config from cp fw mgr starttime = int(time.time()) - # from 5.8 onwards: preferably use domain uid instead of domain name due to CP R81 bug with certain installations - if mgm_details['domainUid'] != None: - domain = mgm_details['domainUid'] - else: - domain = mgm_details['configPath'] + if 'users' not in full_config: + full_config.update({'users': {}}) + + domain, base_url = prepare_get_vars(mgm_details) - sid = getter.login(mgm_details['import_credential']['user'], mgm_details['import_credential']['secret'], mgm_details['hostname'], str(mgm_details['port']), domain) + sid = login_cp(mgm_details, domain) - result_get_basic_config = get_basic_config (full_config, mgm_details, force=force, sid=sid, limit=str(limit), details_level=cp_const.details_level, test_version='off') + result_get_rules = get_rules (full_config, mgm_details, base_url, sid, force=force, limit=str(limit), details_level=cp_const.details_level, test_version='off') + if result_get_rules>0: + return result_get_rules - if result_get_basic_config>0: - return result_get_basic_config + result_get_objects = get_objects (full_config, mgm_details, base_url, sid, force=force, limit=str(limit), details_level=cp_const.details_level, test_version='off') + if result_get_objects>0: + return result_get_objects result_enrich_config = enrich_config (full_config, mgm_details, limit=str(limit), details_level=cp_const.details_level, sid=sid) @@ -71,47 +78,150 @@ def get_config(config2import, full_config, current_import_id, mgm_details, limit duration = int(time.time()) - starttime logger.debug ( "checkpointR8x/get_config - duration: " + str(duration) + "s" ) - if full_config == {}: # no changes - return 0 + cp_network.normalize_network_objects(full_config, config2import, current_import_id, mgm_id=mgm_details['id']) + cp_service.normalize_service_objects(full_config, config2import, current_import_id) + parse_users_from_rulebases(full_config, full_config['rulebases'], full_config['users'], config2import, current_import_id) + config2import.update({'rules': cp_rule.normalize_rulebases_top_level(full_config, current_import_id, config2import) }) + if not parsing_config_only: # get config from cp fw mgr + try: # logout + logout_result = cp_getter.cp_api_call("https://" + mgm_details['hostname'] + ":" + str(mgm_details['port']) + "/web_api/", 'logout', {}, sid) + except: + raise FwLogoutFailed # maybe emporary failure in name resolution" + return 0 + + +def prepare_get_vars(mgm_details): + + # from 5.8 onwards: preferably use domain uid instead of domain name due to CP R81 bug with certain installations + if mgm_details['domainUid'] != None: + domain = mgm_details['domainUid'] else: - parse_network.parse_network_objects_to_json(full_config, config2import, current_import_id, mgm_id=mgm_details['id']) - parse_service.parse_service_objects_to_json(full_config, config2import, current_import_id) - if 'users' not in full_config: - full_config.update({'users': {}}) - target_rulebase = [] - rule_num = 0 - parent_uid="" - section_header_uids=[] - rb_range = range(len(full_config['rulebases'])) - for rb_id in rb_range: - parse_user.parse_user_objects_from_rulebase( - full_config['rulebases'][rb_id], full_config['users'], current_import_id) - # if current_layer_name == args.rulebase: - if fwo_globals.debug_level>3: - logger.debug("parsing layer " + full_config['rulebases'][rb_id]['layername']) - - # parse access rules - rule_num = parse_rule.parse_rulebase_json( - full_config['rulebases'][rb_id], target_rulebase, full_config['rulebases'][rb_id]['layername'], - current_import_id, rule_num, section_header_uids, parent_uid, config2import) - # now parse the nat rulebase - - # parse nat rules - if len(full_config['nat_rulebases'])>0: - if len(full_config['nat_rulebases']) != len(rb_range): - logger.warning('get_config - found ' + str(len(full_config['nat_rulebases'])) + - ' nat rulebases and ' + str(len(rb_range)) + ' access rulebases') - else: - rule_num = parse_rule.parse_nat_rulebase_json( - full_config['nat_rulebases'][rb_id], target_rulebase, full_config['rulebases'][rb_id]['layername'], - current_import_id, rule_num, section_header_uids, parent_uid, config2import) - config2import.update({'rules': target_rulebase}) - - # copy users from full_config to config2import - # also converting users from dict to array: - config2import.update({'user_objects': []}) - for user_name in full_config['users'].keys(): - user = copy.deepcopy(full_config['users'][user_name]) - user.update({'user_name': user_name}) - config2import['user_objects'].append(user) + domain = mgm_details['configPath'] + api_host = mgm_details['hostname'] + api_user = mgm_details['import_credential']['user'] + if mgm_details['domainUid'] != None: + api_domain = mgm_details['domainUid'] + else: + api_domain = mgm_details['configPath'] + api_port = str(mgm_details['port']) + api_password = mgm_details['import_credential']['secret'] + base_url = 'https://' + api_host + ':' + str(api_port) + '/web_api/' + + return domain, base_url + + +def login_cp(mgm_details, domain, ssl_verification=True): + return cp_getter.login(mgm_details['import_credential']['user'], mgm_details['import_credential']['secret'], mgm_details['hostname'], str(mgm_details['port']), domain) + + +def get_rules (config_json, mgm_details, v_url, sid, force=False, config_filename=None, + limit=150, details_level=cp_const.details_level, test_version='off', debug_level=0, ssl_verification=True): + + logger = getFwoLogger() + config_json.update({'rulebases': [], 'nat_rulebases': [] }) + with_hits = True + show_params_rules = {'limit':limit,'use-object-dictionary':cp_const.use_object_dictionary,'details-level':cp_const.details_level, 'show-hits' : with_hits} + + # read all rulebases: handle per device details + for device in mgm_details['devices']: + if device['global_rulebase_name'] != None and device['global_rulebase_name']!='': + show_params_rules['name'] = device['global_rulebase_name'] + # get global layer rulebase + logger.debug ( "getting layer: " + show_params_rules['name'] ) + current_layer_json = cp_getter.get_layer_from_api_as_dict (v_url, sid, show_params_rules, layername=device['global_rulebase_name']) + if current_layer_json is None: + return 1 + # now also get domain rules + show_params_rules['name'] = device['local_rulebase_name'] + current_layer_json['layername'] = device['local_rulebase_name'] + logger.debug ( "getting domain rule layer: " + show_params_rules['name'] ) + domain_rules = cp_getter.get_layer_from_api_as_dict (v_url, sid, show_params_rules, layername=device['local_rulebase_name']) + if current_layer_json is None: + return 1 + + # now handling possible reference to domain rules within global rules + # if we find the reference, replace it with the domain rules + if 'layerchunks' in current_layer_json: + for chunk in current_layer_json["layerchunks"]: + for rule in chunk['rulebase']: + if "type" in rule and rule["type"] == "place-holder": + logger.debug ("found domain rules place-holder: " + str(rule) + "\n\n") + current_layer_json = cp_getter.insert_layer_after_place_holder(current_layer_json, domain_rules, rule['uid']) + else: # no global rules, just get local ones + show_params_rules['name'] = device['local_rulebase_name'] + logger.debug ( "getting layer: " + show_params_rules['name'] ) + current_layer_json = cp_getter.get_layer_from_api_as_dict (v_url, sid, show_params_rules, layername=device['local_rulebase_name']) + if current_layer_json is None: + return 1 + + config_json['rulebases'].append(current_layer_json) + + # getting NAT rules - need package name for nat rule retrieval + # todo: each gateway/layer should have its own package name (pass management details instead of single data?) + if device['package_name'] != None and device['package_name'] != '': + show_params_rules = {'limit':limit,'use-object-dictionary':cp_const.use_object_dictionary,'details-level':cp_const.details_level, 'package': device['package_name'] } + if debug_level>3: + logger.debug ( "getting nat rules for package: " + device['package_name'] ) + nat_rules = cp_getter.get_nat_rules_from_api_as_dict (v_url, sid, show_params_rules) + if len(nat_rules)>0: + config_json['nat_rulebases'].append(nat_rules) + else: + config_json['nat_rulebases'].append({ "nat_rule_chunks": [] }) + else: # always making sure we have an (even empty) nat rulebase per device + config_json['nat_rulebases'].append({ "nat_rule_chunks": [] }) + return 0 + + +def get_objects(config_json, mgm_details, v_url, sid, force=False, config_filename=None, + limit=150, details_level=cp_const.details_level, test_version='off', debug_level=0, ssl_verification=True): + + logger = getFwoLogger() + + config_json["object_tables"] = [] + show_params_objs = {'limit':limit,'details-level': cp_const.details_level} + + for obj_type in cp_const.api_obj_types: + object_table = { "object_type": obj_type, "object_chunks": [] } + current=0 + total=current+1 + show_cmd = 'show-' + obj_type + if debug_level>5: + logger.debug ( "obj_type: "+ obj_type ) + while (current5: + logger.debug ( obj_type +" current:"+ str(current) + " of a total " + str(total) ) + else : + current = total + if debug_level>5: + logger.debug ( obj_type +" total:"+ str(total) ) + config_json["object_tables"].append(object_table) + # logout_result = cp_getter.cp_api_call(v_url, 'logout', {}, sid) + + # only write config to file if config_filename is given + if config_filename != None and len(config_filename)>1: + with open(config_filename, "w") as configfile_json: + configfile_json.write(json.dumps(config_json)) return 0 + + +def parse_users_from_rulebases (full_config, rulebase, users, config2import, current_import_id): + if 'users' not in full_config: + full_config.update({'users': {}}) + + rb_range = range(len(full_config['rulebases'])) + for rb_id in rb_range: + parse_user_objects_from_rulebase (full_config['rulebases'][rb_id], full_config['users'], current_import_id) + + # copy users from full_config to config2import + # also converting users from dict to array: + config2import.update({'user_objects': []}) + for user_name in full_config['users'].keys(): + user = copy.deepcopy(full_config['users'][user_name]) + user.update({'user_name': user_name}) + config2import['user_objects'].append(user) diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_api-test-call.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_api-test-call.py deleted file mode 100755 index a9253a98c..000000000 --- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_api-test-call.py +++ /dev/null @@ -1,115 +0,0 @@ -#!/usr/bin/python3 -import logging, logging.config -import json, argparse -import sys -from common import importer_base_dir, set_ssl_verification -sys.path.append(importer_base_dir) -import getter - -logging.config.fileConfig(fname='discovery_logging.conf', disable_existing_loggers=False) - -logger = logging.getLogger(__name__) - -logger.info("START") -parser = argparse.ArgumentParser(description='Read configuration from Check Point R8x management via API calls') -parser.add_argument('-a', '--hostname', metavar='api_host', required=True, help='Check Point R8x management server') -parser.add_argument('-w', '--password', metavar='api_password', required=True, help='password for management server') -parser.add_argument('-m', '--mode', metavar='mode', required=True, help='[domains|packages|layers|generic]') -parser.add_argument('-c', '--command', metavar='command', required=False, help='generic command to send to the api (needs -m generic). ' + - 'Please note that the command must be written as one word (e.g. show-access-layer instead of show acess-layers).') -parser.add_argument('-u', '--user', metavar='api_user', default='fworch', help='user for connecting to Check Point R8x management server, default=fworch') -parser.add_argument('-p', '--port', metavar='api_port', default='443', help='port for connecting to Check Point R8x management server, default=443') -parser.add_argument('-D', '--domain', metavar='api_domain', default='', help='name of Domain in a Multi-Domain Environment') -parser.add_argument('-s', '--ssl', metavar='ssl_verification_mode', default='', help='[ca]certfile, if value not set, ssl check is off"; default=empty/off') -parser.add_argument('-l', '--level', metavar='level_of_detail', default='standard', help='[standard|full]') -parser.add_argument('-i', '--limit', metavar='api_limit', default='150', help='The maximal number of returned results per HTTPS Connection; default=150') -parser.add_argument('-n', '--nolimit', metavar='nolimit', default='off', help='[on|off] Set to on if (generic) command does not understand limit switch') -parser.add_argument('-d', '--debug', metavar='debug_level', default='0', help='Debug Level: 0(off) 4(DEBUG Console) 41(DEBUG File); default=0') -parser.add_argument('-V', '--version', metavar='api_version', default='off', help='alternate API version [off|]; default=off') - -args = parser.parse_args() -if len(sys.argv)==1: - parser.print_help(sys.stderr) - sys.exit(1) - -domain = args.domain - -if args.mode == 'packages': - api_command='show-packages' - api_details_level="standard" -elif args.mode == 'domains' or args.mode == 'devices': - api_command='show-domains' - api_details_level="standard" - domain = '' -elif args.mode == 'layers': - api_command='show-access-layers' - api_details_level="standard" -elif args.mode == 'generic': - api_command=args.command - api_details_level=args.level -else: - sys.exit("\"" + args.mode +"\" - unknown mode") - -offset = 0 -use_object_dictionary = 'false' -base_url = 'https://' + args.hostname + ':' + args.port + '/web_api/' -ssl_verification = set_ssl_verification(args.ssl) -logger = logging.getLogger(__name__) - -xsid = getter.login(args.user, args.password, args.hostname, args.port, domain, ssl_verification) -api_versions = getter.cp_api_call(args.hostname, args.port, base_url, 'show-api-versions', {}, xsid, ssl_verification=ssl_verification) - -api_version = api_versions["current-version"] -api_supported = api_versions["supported-versions"] -v_url = getter.set_api_url(base_url,args.version,api_supported,args.hostname) -if args.version != 'off': - api_version = args.version -logger.debug ("using current version: "+ api_version ) -logger.debug ("supported versions: "+ ', '.join(api_supported) ) -logger.debug ("limit:"+ args.limit ) -logger.debug ("Domain:"+ args.domain ) -logger.debug ("login:"+ args.user ) -logger.debug ("sid:"+ xsid ) - -payload = { "details-level" : api_details_level } -if args.nolimit == 'off': - payload.update( { "limit" : args.limit, "offset" : offset } ) - -if args.mode == 'generic': # need to divide command string into command and payload (i.e. parameters) - cmd_parts = api_command.split(" ") - api_command = cmd_parts[0] - idx = 1 - if len(cmd_parts)>1: - payload.pop('limit') - payload.pop('offset') - while idx < len(cmd_parts): - payload.update({cmd_parts[idx]: cmd_parts[idx+1]}) - idx += 2 - -result = getter.cp_api_call(args.hostname, args.port, v_url, api_command, payload, xsid, ssl_verification=ssl_verification) - -if args.debug == "1" or args.debug == "3": - print ("\ndump of result:\n" + json.dumps(result, indent=4)) -if args.mode == 'packages': - print ("\nthe following packages exist on management server:") - for p in result['packages']: - print (" package: " + p['name']) - if "access-layers" in result: - print ("the following layers exist on management server:") - for p in result['packages']: - print (" package: " + p['name']) - for l in p['access-layers']: - print (" layer: " + l['name']) - -if args.mode == 'domains': - print ("\nthe following domains exist on management server:") - for d in result['objects']: - print (" domain: " + d['name'] + ", uid: " + d['uid']) -if args.mode == 'layers': - print ("\nthe following access-layers exist on management server:") - for l in result['access-layers']: - print (" access-layer: " + l['name'] + ", uid: " + l['uid'] ) -if args.mode == 'generic': - print (json.dumps(result, indent=3)) - -logout_result = getter.cp_api_call(args.hostname, args.port, v_url, 'logout', {}, xsid, ssl_verification=ssl_verification) diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_auto-discover.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_auto-discover.py deleted file mode 100755 index 6c2e043dd..000000000 --- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_auto-discover.py +++ /dev/null @@ -1,198 +0,0 @@ -#!/usr/bin/python3 -import sys -# from .. common import importer_base_dir -sys.path.append('..') -import logging, logging.config -import getter -import json, argparse, sys -import fwo_log -logging.config.fileConfig(fname='discovery_logging.conf', disable_existing_loggers=False) - -logger = logging.getLogger(__name__) - -logger.info("START") -parser = argparse.ArgumentParser(description='Discover all devices, policies starting from a single server (MDS or stand-alone) from Check Point R8x management via API calls') -parser.add_argument('-a', '--hostname', metavar='api_host', required=True, help='Check Point R8x management server') -parser.add_argument('-w', '--password_file', metavar='api_password_file', required=True, help='name of file containing the password for API of the management server') -parser.add_argument('-u', '--user', metavar='api_user', default='fworch', help='user for connecting to Check Point R8x management server, default=fworch') -parser.add_argument('-p', '--port', metavar='api_port', default='443', help='port for connecting to Check Point R8x management server, default=443') -parser.add_argument('-s', '--ssl', metavar='ssl_verification_mode', default='', help='[ca]certfile, if value not set, ssl check is off"; default=empty/off') -parser.add_argument('-d', '--debug', metavar='debug_level', default='0', help='Debug Level: 0(off) 4(DEBUG Console) 41(DEBUG File); default=0') -parser.add_argument('-V', '--version', metavar='api_version', default='off', help='alternate API version [off|]; default=off') -parser.add_argument('-D', '--domain', metavar='api_domain', default='', help='name of Domain in a Multi-Domain Environment') -parser.add_argument('-f', '--format', metavar='output_format', default='table', help='[json|table]]') - -args = parser.parse_args() -if len(sys.argv)==1: - parser.print_help(sys.stderr) - sys.exit(1) - -offset = 0 -use_object_dictionary = 'false' -base_url = 'https://' + args.hostname + ':' + args.port + '/web_api/' -ssl_verification = fwo_log.set_ssl_verification(args.ssl, debug_level=args.debug) - -with open(args.password_file, 'r') as file: - apiuser_pwd = file.read().replace('\n', '') - -xsid = getter.login(args.user, apiuser_pwd, args.hostname, args.port, args.domain, ssl_verification=ssl_verification, debug=args.debug) - -api_versions = getter.cp_api_call(base_url, 'show-api-versions', {}, xsid, ssl_verification=ssl_verification) -api_version = api_versions["current-version"] -api_supported = api_versions["supported-versions"] -v_url = getter.set_api_url(base_url,args.version,api_supported,args.hostname) - -v_url = 'https://' + args.hostname + ':' + args.port + '/web_api/' -if args.version != "off": - v_url += 'v' + args.version + '/' - -logger = logging.getLogger(__name__) - -xsid = getter.login(args.user, apiuser_pwd, args.hostname, args.port, '', ssl_verification=ssl_verification) - -if args.debug == "1" or args.debug == "3": - debug = True -else: - debug = False - -# todo: only show active devices (optionally with a switch) -domains = getter.cp_api_call (v_url, 'show-domains', {}, xsid, ssl_verification=ssl_verification) -gw_types = ['simple-gateway', 'simple-cluster', 'CpmiVsClusterNetobj', 'CpmiGatewayPlain', 'CpmiGatewayCluster', 'CpmiVsxClusterNetobj'] -parameters = { "details-level" : "full" } - -if domains['total']== 0: - logging.debug ("no domains found, adding dummy domain.") - domains['objects'].append ({ "name": "", "uid": "" }) - - # fetching gateways for non-MDS management: - obj = domains['objects'][0] - obj['gateways'] = getter.cp_api_call(v_url, 'show-gateways-and-servers', parameters, xsid, ssl_verification=ssl_verification) - - if 'objects' in obj['gateways']: - for gw in obj['gateways']['objects']: - if 'type' in gw and gw['type'] in gw_types and 'policy' in gw: - if 'access-policy-installed' in gw['policy'] and gw['policy']['access-policy-installed'] and "access-policy-name" in gw['policy']: - logging.debug ("standalone mgmt: found gateway " + gw['name'] + " with policy" + gw['policy']['access-policy-name']) - gw['package'] = getter.cp_api_call(v_url, - "show-package", - { "name" : gw['policy']['access-policy-name'], "details-level": "full" }, - xsid, ssl_verification) - else: - logging.warning ("Standalone WARNING: did not find any gateways in stand-alone management") - logout_result = getter.cp_api_call(v_url, 'logout', {}, xsid, ssl_verification=ssl_verification) - -else: # visit each domain and fetch layers - for obj in domains['objects']: - domain_name = obj['name'] - logging.debug ("MDS: searchig in domain " + domain_name) - xsid = getter.login(args.user, apiuser_pwd, args.hostname, args.port, domain_name, ssl_verification=ssl_verification) - obj['gateways'] = getter.cp_api_call(v_url, 'show-gateways-and-servers', parameters, xsid, ssl_verification) - if 'objects' in obj['gateways']: - for gw in obj['gateways']['objects']: - if 'type' in gw and gw['type'] in gw_types and 'policy' in gw: - if 'access-policy-installed' in gw['policy'] and gw['policy']['access-policy-installed'] and "access-policy-name" in gw['policy']: - api_call_str = "show-package name " + gw['policy']['access-policy-name'] + ", logged in to domain " + domain_name - logging.debug ("MDS: found gateway " + gw['name'] + " with policy: " + gw['policy']['access-policy-name']) - logging.debug ("api call: " + api_call_str) - try: - tmp_pkg_name = getter.cp_api_call(v_url, 'show-package', { "name" : gw['policy']['access-policy-name'], "details-level": "full" }, - xsid, ssl_verification=ssl_verification) - except: - tmp_pkg_name = "ERROR while trying to get package " + gw['policy']['access-policy-name'] - gw['package'] = tmp_pkg_name - else: - logging.warning ("Domain-WARNING: did not find any gateways in domain " + obj['name']) - logout_result = getter.cp_api_call(v_url, 'logout', {}, xsid, ssl_verification=ssl_verification) - -# now collect only relevant data and copy to new dict -domains_essential = [] -for obj in domains['objects']: - domain = { 'name': obj['name'], 'uid': obj['uid'] } - gateways = [] - domain['gateways'] = gateways - if 'objects' in obj['gateways']: - for gw in obj['gateways']['objects']: - if 'policy' in gw and 'access-policy-name' in gw['policy']: - gateway = { "name": gw['name'], "uid": gw['uid'], "access-policy-name": gw['policy']['access-policy-name'] } - layers = [] - if 'package' in gw: - if 'access-layers' in gw['package']: - found_domain_layer = False - for ly in gw['package']['access-layers']: - if 'firewall' in ly and ly['firewall']: - if 'parent-layer' in ly: - found_domain_layer = True - for ly in gw['package']['access-layers']: - if 'firewall' in ly and ly['firewall']: - if 'parent-layer' in ly: - layer = { "name": ly['name'], "uid": ly['uid'], "type": "domain-layer", "parent-layer": ly['parent-layer'] } - elif domains['total']==0: - layer = { "name": ly['name'], "uid": ly['uid'], "type": "local-layer" } - elif found_domain_layer: - layer = { "name": ly['name'], "uid": ly['uid'], "type": "global-layer" } - else: # in domain context, but no global layer exists - layer = { "name": ly['name'], "uid": ly['uid'], "type": "stand-alone-layer" } - layers.append(layer) - gateway['layers'] = layers - gateways.append(gateway) - domain['gateways'] = gateways - domains_essential.append(domain) -devices = {"domains": domains_essential } - - -##### output ######## -if args.format == 'json': - print (json.dumps(devices, indent=3)) - -elif args.format == 'table': - # compact print in FWO UI input format - colsize_number = 35 - colsize = "{:"+str(colsize_number)+"}" - table = "" - heading_list = ["Domain/Management", "Gateway", "Policy String"] - - # add table header: - for heading in heading_list: - table += colsize.format(heading) - table += "\n" - x = 0 - while x < len(heading_list) * colsize_number: - table += '-' - x += 1 - table += "\n" - - # print one gateway/policy per line - for dom in devices['domains']: - if 'gateways' in dom: - for gw in dom['gateways']: - table += colsize.format(dom["name"]) - table += colsize.format(gw['name']) - if 'layers' in gw: - found_domain_layer = False - layer_string = '' - for ly in gw['layers']: - if 'parent-layer' in ly: - found_domain_layer = True - for ly in gw['layers']: - if ly['type'] == 'stand-alone-layer' or ly['type'] == 'local-layer': - layer_string = ly["name"] - elif found_domain_layer and ly['type'] == 'domain-layer': - domain_layer = ly['name'] - elif found_domain_layer and ly['type'] == 'global-layer': - global_layer = ly['name'] - else: - logging.warning ("found unknown layer type") - if found_domain_layer: - layer_string = global_layer + '/' + domain_layer - table += colsize.format(layer_string) - table += "\n" - else: - table += colsize.format(dom["name"]) - table += "\n" # empty line between domains for readability - - print (table) - -else: - logging.error("You specified a wrong output format: " + args.format ) - parser.print_help(sys.stderr) - sys.exit(1) diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_enrich_config.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_enrich_config.py deleted file mode 100755 index 2db73c357..000000000 --- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_enrich_config.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/python3 -import argparse, time -import json -import sys, os -import cp_const - -from common import importer_base_dir, set_ssl_verification -sys.path.append(importer_base_dir) -sys.path.append(importer_base_dir + "/checkpointR8x") -from fwo_log import getFwoLogger -from cpcommon import enrich_config - - -parser = argparse.ArgumentParser(description='Read configuration from Check Point R8x management via API calls') -parser.add_argument('-a', '--apihost', metavar='api_host', required=True, help='Check Point R8x management server') -parser.add_argument('-w', '--password', metavar='api_password_file', default='import_user_secret', help='name of the file to read the password for management server from') -parser.add_argument('-u', '--user', metavar='api_user', default='fworch', help='user for connecting to Check Point R8x management server, default=fworch') -parser.add_argument('-p', '--port', metavar='api_port', default='443', help='port for connecting to Check Point R8x management server, default=443') -parser.add_argument('-D', '--domain', metavar='api_domain', default='', help='name of Domain in a Multi-Domain Envireonment') -parser.add_argument('-l', '--layer', metavar='policy_layer_name(s)', required=True, help='name of policy layer(s) to read (comma separated)') -parser.add_argument('-s', '--ssl', metavar='ssl_verification_mode', default='', help='[ca]certfile, if value not set, ssl check is off"; default=empty/off') -parser.add_argument('-i', '--limit', metavar='api_limit', default='150', help='The maximal number of returned results per HTTPS Connection; default=150') -parser.add_argument('-d', '--debug', metavar='debug_level', default='0', help='Debug Level: 0(off) 4(DEBUG Console) 41(DEBUG File); default=0') -parser.add_argument('-k', '--package', metavar='package_name', help='name of the package for a gateway - necessary for getting NAT rules') -parser.add_argument('-c', '--configfile', metavar='config_file', required=True, help='filename to read and write config in json format from/to') -parser.add_argument('-n', '--noapi', metavar='mode', default='false', help='if set to true (only in combination with mode=enrich), no api connections are made. Useful for testing only.') - -args = parser.parse_args() -if len(sys.argv)==1: - parser.print_help(sys.stderr) - sys.exit(1) - -with open(args.password, "r") as password_file: - api_password = password_file.read().rstrip() - -debug_level = int(args.debug) -logger = getFwoLogger() -config = {} -starttime = int(time.time()) - -# possible todo: get mgmt_details via API just from mgmt_name and dev_name? -mgm_details = { - 'hostname': args.apihost, - 'port': args.port, - 'user': args.user, - 'secret': api_password, - 'configPath': args.domain, - 'devices': [ - { - 'local_rulebase_name': args.layer, - 'global_rulebase_name': None, - 'package_name': args.package - } - ] -} - -result = enrich_config (config, mgm_details, noapi=False, limit=args.limit, details_level=cp_const.details_level) - -duration = int(time.time()) - starttime -logger.debug ( "checkpointR8x/enrich_config - duration: " + str(duration) + "s" ) - -# dump new json file if config_filename is set -if args.config_filename != None and len(args.config_filename)>1: - if os.path.exists(args.config_filename): # delete json file (to enabiling re-write) - os.remove(args.config_filename) - with open(args.config_filename, "w") as json_data: - json_data.write(json.dumps(config)) - -sys.exit(0) diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_get_basic_config.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_get_basic_config.py deleted file mode 100755 index c6a95f560..000000000 --- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_get_basic_config.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/python3 - -import time, sys -import argparse -from fwo_const import importer_base_dir -sys.path.append(importer_base_dir) -from fwo_log import getFwoLogger -from cp_const import details_level -from cpcommon import get_basic_config - - -parser = argparse.ArgumentParser(description='Read configuration from Check Point R8x management via API calls') -parser.add_argument('-a', '--apihost', metavar='api_host', required=True, help='Check Point R8x management server') -parser.add_argument('-w', '--password', metavar='api_password_file', default='import_user_secret', help='name of the file to read the password for management server from') -parser.add_argument('-u', '--user', metavar='api_user', default='fworch', help='user for connecting to Check Point R8x management server, default=fworch') -parser.add_argument('-p', '--port', metavar='api_port', default='443', help='port for connecting to Check Point R8x management server, default=443') -parser.add_argument('-D', '--domain', metavar='api_domain', default='', help='name of Domain in a Multi-Domain Envireonment') -parser.add_argument('-l', '--layer', metavar='policy_layer_name(s)', required=True, help='name of policy layer(s) to read (comma separated)') -parser.add_argument('-k', '--package', metavar='policy package name', required=False, help='name of policy package (needed for nat rule retrieval)') -parser.add_argument('-s', '--ssl', metavar='ssl_verification_mode', default='', help='[ca]certfile, if value not set, ssl check is off"; default=empty/off') -parser.add_argument('-i', '--limit', metavar='api_limit', default='150', help='The maximal number of returned results per HTTPS Connection; default=150') -parser.add_argument('-d', '--debug', metavar='debug_level', default='0', help='Debug Level: 0(off) 4(DEBUG Console) 41(DEBUG File); default=0') -parser.add_argument('-t', '--testing', metavar='version_testing', default='off', help='Version test, [off|]; default=off') -parser.add_argument('-o', '--out', metavar='output_file', required=True, help='filename to write output in json format to') -parser.add_argument('-F', '--force', action='store_true', default=False, help='if set the import will be attempted without checking for changes before') - -args = parser.parse_args() -if len(sys.argv)==1: - parser.print_help(sys.stderr) - sys.exit(1) - -with open(args.password, "r") as password_file: - api_password = password_file.read().rstrip() - -debug_level = int(args.debug) -logger = getFwoLogger() -starttime = int(time.time()) -full_config_json = {} - -# possible todo: get mgmt_details via API just from mgmt_name and dev_name? -# todo: allow for multiple gateways -mgm_details = { - 'hostname': args.apihost, - 'port': args.port, - 'user': args.user, - 'secret': api_password, - 'configPath': args.domain, - 'devices': [ - { - 'local_rulebase_name': args.layer, - 'global_rulebase_name': None, - 'package_name': args.package - } - ] -} - -get_basic_config (full_config_json, mgm_details, config_filename=args.out, - force=args.force, limit=args.limit, details_level=details_level, test_version=args.testing, debug_level=debug_level, ssl_verification=set_ssl_verification(args.ssl, debug_level=debug_level)) - -duration = int(time.time()) - starttime -logger.debug ( "checkpointR8x/get_config - duration: " + str(duration) + "s" ) - -sys.exit(0) diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_config.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_config.py deleted file mode 100755 index 6b207f86f..000000000 --- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_config.py +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/python3 -import sys -from common import importer_base_dir -sys.path.append(importer_base_dir) -import parse_network, parse_service, parse_user # parse_rule, -import parse_network_csv, parse_rule_csv, parse_service_csv, parse_user_csv -import argparse -import json -import sys -import fwo_log - - -parser = argparse.ArgumentParser(description='parse json configuration file from Check Point R8x management') -parser.add_argument('-f', '--config_file', required=True, help='name of config file to parse (json format)') -parser.add_argument('-i', '--import_id', default='0', help='unique import id') -parser.add_argument('-m', '--management_name', default='', help='name of management system to import') -parser.add_argument('-r', '--rulebase', default='', help='name of rulebase to import') -parser.add_argument('-n', '--network_objects', action="store_true", help='import network objects') -parser.add_argument('-s', '--service_objects', action="store_true", help='import service objects') -parser.add_argument('-u', '--users', action="store_true", help='import users') -parser.add_argument('-d', '--debug', metavar='debug_level', default='0', help='Debug Level: 0(off) 1(DEBUG Console) 2(DEBUG File)i 2(DEBUG Console&File); default=0') -args = parser.parse_args() - -found_rulebase = False -number_of_section_headers_so_far = 0 -rule_num = 0 -nw_objects = [] -svc_objects = [] -section_header_uids=[] -result = "" - -# log config -debug_level = int(args.debug) -logger = fwo_log.getFwoLogger() - -args = parser.parse_args() -if len(sys.argv)==1: - parser.print_help(sys.stderr) - sys.exit(1) - -config_filename = args.config_file - -with open(args.config_file, "r") as json_data: - config = json.load(json_data) - -logger.debug ("parse_config - args"+ "\nf:" +args.config_file +"\ni: "+ args.import_id +"\nm: "+ args.management_name +"\nr: "+ args.rulebase +"\nn: "+ str(args.network_objects) +"\ns: "+ str(args.service_objects) +"\nu: "+ str(args.users) +"\nd: "+ str(args.debug)) - -if args.rulebase != '': - for rulebase in config['rulebases']: - current_layer_name = rulebase['layername'] - if current_layer_name == args.rulebase: - logger.debug("parse_config: found layer to parse: " + current_layer_name) - found_rulebase = True - rule_num, result = parse_rule_csv.csv_dump_rules(rulebase, args.rulebase, args.import_id, rule_num=0, section_header_uids=[], parent_uid="", debug_level=debug_level) - -if args.network_objects: - result = '' - nw_objects = [] - - if args.network_objects != '': - for obj_table in config['object_tables']: - parse_network.collect_nw_objects(obj_table, nw_objects, debug_level=debug_level) - for idx in range(0, len(nw_objects)-1): - if nw_objects[idx]['obj_typ'] == 'group': - parse_network.add_member_names_for_nw_group(idx, nw_objects) - - for nw_obj in nw_objects: - result += parse_network_csv.csv_dump_nw_obj(nw_obj, args.import_id) - -if args.service_objects: - result = '' - service_objects = [] - if args.service_objects != '': - for obj_table in config['object_tables']: - parse_service.collect_svc_objects(obj_table, service_objects) - # resolving group members: - for idx in range(0, len(service_objects)-1): - if service_objects[idx]['svc_typ'] == 'group': - parse_service.add_member_names_for_svc_group(idx, service_objects) - - for svc_obj in service_objects: - result += parse_service_csv.csv_dump_svc_obj(svc_obj, args.import_id) - -if args.users: - users = {} - result = '' - for rulebase in config['rulebases']: - parse_user.collect_users_from_rulebase(rulebase, users) - - for user_name in users.keys(): - user_dict = users[user_name] - result += parse_user_csv.csv_dump_user(user_name, user_dict, args.import_id) - -if args.rulebase != '' and not found_rulebase: - logger.exception("PARSE ERROR: rulebase '" + args.rulebase + "' not found.") -else: - result = result[:-1] # strip off final line break to avoid empty last line - print(result) diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_network_csv.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_network_csv.py deleted file mode 100644 index c1e43faf8..000000000 --- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_network_csv.py +++ /dev/null @@ -1,41 +0,0 @@ -from fwo_base import csv_add_field -from fwo_const import csv_delimiter, line_delimiter - - -def csv_dump_nw_obj(nw_obj, import_id): - result_line = csv_add_field(import_id) # control_id - result_line += csv_add_field(nw_obj['obj_name']) # obj_name - result_line += csv_add_field(nw_obj['obj_typ']) # ob_typ - if nw_obj['obj_member_names'] != None: - result_line += csv_add_field(nw_obj['obj_member_names']) # obj_member_names - else: - result_line += csv_delimiter # no obj_member_names - if nw_obj['obj_member_refs'] != None: - result_line += csv_add_field(nw_obj['obj_member_refs']) # obj_member_refs - else: - result_line += csv_delimiter # no obj_member_refs - result_line += csv_delimiter # obj_sw - if nw_obj['obj_typ'] == 'group': - result_line += csv_delimiter # obj_ip for groups = null - result_line += csv_delimiter # obj_ip_end for groups = null - else: - result_line += csv_add_field(nw_obj['obj_ip']) # obj_ip - if 'obj_ip_end' in nw_obj: - result_line += csv_add_field(nw_obj['obj_ip_end'])# obj_ip_end - else: - result_line += csv_delimiter - result_line += csv_add_field(nw_obj['obj_color']) # obj_color - if nw_obj['obj_comment'] != None: - result_line += csv_add_field(nw_obj['obj_comment']) # obj_comment - else: - result_line += csv_delimiter # no obj_comment - result_line += csv_delimiter # obj_location - if 'obj_zone' in nw_obj: - result_line += csv_add_field(nw_obj['obj_zone']) # obj_zone - else: - result_line += csv_delimiter - result_line += csv_add_field(nw_obj['obj_uid']) # obj_uid - result_line += csv_delimiter # last_change_admin - # add last_change_time - result_line += line_delimiter - return result_line diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_rule_csv.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_rule_csv.py deleted file mode 100644 index 12f633b41..000000000 --- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_rule_csv.py +++ /dev/null @@ -1,224 +0,0 @@ -from fwo_log import getFwoLogger -import json -import cp_const, cpcommon, parse_rule, fwo_const -from fwo_const import list_delimiter, csv_delimiter, line_delimiter -from fwo_base import csv_add_field -from fwo_exception import ImportRecursionLimitReached - - -def create_section_header(section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid): - # only do this once! : section_header_uids.append(rule_uid) - header_rule_csv = csv_add_field(import_id) # control_id - header_rule_csv += csv_add_field(str(rule_num)) # rule_num - header_rule_csv += csv_add_field(layer_name) # rulebase_name - header_rule_csv += csv_delimiter # rule_ruleid - header_rule_csv += csv_add_field('False') # rule_disabled - header_rule_csv += csv_add_field('False') # rule_src_neg - header_rule_csv += csv_add_field('Any') # rule_src - header_rule_csv += csv_add_field(cp_const.any_obj_uid) # rule_src_refs - header_rule_csv += csv_add_field('False') # rule_dst_neg - header_rule_csv += csv_add_field('Any') # rule_dst - header_rule_csv += csv_add_field(cp_const.any_obj_uid) # rule_dst_refs - header_rule_csv += csv_add_field('False') # rule_svc_neg - header_rule_csv += csv_add_field('Any') # rule_svc - header_rule_csv += csv_add_field(cp_const.any_obj_uid) # rule_svc_refs - header_rule_csv += csv_add_field('Accept') # rule_action - header_rule_csv += csv_add_field('Log') # rule_track - header_rule_csv += csv_add_field('Policy Targets') # rule_installon - header_rule_csv += csv_add_field('Any') # rule_time - header_rule_csv += csv_delimiter # rule_comment - header_rule_csv += csv_delimiter # rule_name - header_rule_csv += csv_add_field(rule_uid) # rule_uid - header_rule_csv += csv_add_field(section_name) # rule_head_text - header_rule_csv += csv_delimiter # rule_from_zone - header_rule_csv += csv_delimiter # rule_to_zone - header_rule_csv += csv_delimiter # rule_last_change_admin - if parent_uid != "": - header_rule_csv += csv_add_field(parent_uid, no_csv_delimiter=True) # parent_rule_uid - return header_rule_csv + line_delimiter - - -def create_domain_rule_header(section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid): - return create_section_header(section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid) - - -def csv_dump_rule(rule, layer_name, import_id, rule_num, parent_uid, debug_level=0): - logger = getFwoLogger() - rule_csv = '' - - # reference to domain rule layer, filling up basic fields - if 'type' in rule and rule['type'] != 'place-holder': -# add_missing_info_to_domain_ref_rule(rule) - if 'rule-number' in rule: # standard rule, no section header - # print ("rule #" + str(rule['rule-number']) + "\n") - rule_csv += csv_add_field(import_id) # control_id - rule_csv += csv_add_field(str(rule_num)) # rule_num - rule_csv += csv_add_field(layer_name) # rulebase_name - rule_csv += csv_add_field('') # rule_ruleid is empty - rule_csv += csv_add_field(str(not rule['enabled'])) # rule_disabled - rule_csv += csv_add_field(str(rule['source-negate'])) # src_neg - - # SOURCE names - rule_src_name = '' - for src in rule["source"]: - if src['type'] == 'LegacyUserAtLocation': - rule_src_name += src['name'] + list_delimiter - elif src['type'] == 'access-role': - if isinstance(src['networks'], str): # just a single source - if src['networks'] == 'any': - rule_src_name += src["name"] + '@' + 'Any' + list_delimiter - else: - rule_src_name += src["name"] + '@' + src['networks'] + list_delimiter - else: # more than one source - for nw in src['networks']: - rule_src_name += src[ - # TODO: this is not correct --> need to reverse resolve name from given UID - "name"] + '@' + nw + list_delimiter - else: # standard network objects as source - rule_src_name += src["name"] + list_delimiter - rule_src_name = rule_src_name[:-1] # removing last list_delimiter - rule_csv += csv_add_field(rule_src_name) # src_names - - # SOURCE refs - rule_src_ref = '' - for src in rule["source"]: - if src['type'] == 'LegacyUserAtLocation': - rule_src_ref += src["userGroup"] + '@' + src["location"] + list_delimiter - elif src['type'] == 'access-role': - if isinstance(src['networks'], str): # just a single source - if src['networks'] == 'any': - rule_src_ref += src['uid'] + '@' + cp_const.any_obj_uid + list_delimiter - else: - rule_src_ref += src['uid'] + '@' + src['networks'] + list_delimiter - else: # more than one source - for nw in src['networks']: - rule_src_ref += src['uid'] + '@' + nw + list_delimiter - else: # standard network objects as source - rule_src_ref += src["uid"] + list_delimiter - rule_src_ref = rule_src_ref[:-1] # removing last list_delimiter - rule_csv += csv_add_field(rule_src_ref) # src_refs - - rule_csv += csv_add_field(str(rule['destination-negate'])) # destination negation - - rule_dst_name = '' - for dst in rule["destination"]: - rule_dst_name += dst["name"] + list_delimiter - rule_dst_name = rule_dst_name[:-1] - rule_csv += csv_add_field(rule_dst_name) # rule dest_name - - rule_dst_ref = '' - for dst in rule["destination"]: - rule_dst_ref += dst["uid"] + list_delimiter - rule_dst_ref = rule_dst_ref[:-1] - rule_csv += csv_add_field(rule_dst_ref) # rule_dest_refs - - # SERVICE negate - rule_csv += csv_add_field(str(rule['service-negate'])) # service negation - # SERVICE names - rule_svc_name = '' - for svc in rule["service"]: - rule_svc_name += svc["name"] + list_delimiter - rule_svc_name = rule_svc_name[:-1] - rule_csv += csv_add_field(rule_svc_name) # rule svc name - - # SERVICE refs - rule_svc_ref = '' - for svc in rule["service"]: - rule_svc_ref += svc["uid"] + list_delimiter - rule_svc_ref = rule_svc_ref[:-1] - rule_csv += csv_add_field(rule_svc_ref) # rule svc ref - - rule_action = rule['action'] - rule_action_name = rule_action['name'] - rule_csv += csv_add_field(rule_action_name) # rule action - rule_track = rule['track'] - rule_track_type = rule_track['type'] - rule_csv += csv_add_field(rule_track_type['name']) # rule track - - rule_install_on = rule['install-on'] - first_rule_install_target = rule_install_on[0] - rule_csv += csv_add_field(first_rule_install_target['name']) # install on - - rule_time = rule['time'] - first_rule_time = rule_time[0] - rule_csv += csv_add_field(first_rule_time['name']) # time - if (rule['comments']!=None and rule['comments']!=''): - rule_csv += csv_add_field(rule['comments']) # comments - else: - rule_csv += csv_delimiter # no comments - if 'name' in rule: - rule_name = rule['name'] - else: - rule_name = None - rule_csv += csv_add_field(rule_name) # rule_name - - rule_csv += csv_add_field(rule['uid']) # rule_uid - rule_head_text = '' - rule_csv += csv_add_field(rule_head_text) # rule_head_text - rule_from_zone = '' - rule_csv += csv_add_field(rule_from_zone) - rule_to_zone = '' - rule_csv += csv_add_field(rule_to_zone) - rule_meta_info = rule['meta-info'] - rule_csv += csv_add_field(rule_meta_info['last-modifier']) - # new in v5.1.17: - if 'parent_rule_uid' in rule: - logger.debug('found rule (uid=' + rule['uid'] + ') with parent_rule_uid set: ' + rule['parent_rule_uid']) - parent_rule_uid = rule['parent_rule_uid'] - else: - parent_rule_uid = parent_uid - if (parent_rule_uid!=''): - rule_csv += csv_add_field(parent_rule_uid,no_csv_delimiter=True) - rule_csv += line_delimiter - return rule_csv - - -def csv_dump_rules(rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, debug_level=0, recursion_level=1): - logger = getFwoLogger() - result = '' - - if recursion_level>fwo_const.max_recursion_level: - raise ImportRecursionLimitReached("csv_dump_rules") from None - - if 'layerchunks' in rulebase: - for chunk in rulebase['layerchunks']: - if 'rulebase' in chunk: - for rules_chunk in chunk['rulebase']: - rule_num, rules_in_csv = csv_dump_rules(rules_chunk, layer_name, import_id, rule_num, section_header_uids, parent_uid, debug_level=debug_level, recursion_level=recursion_level+1) - result += rules_in_csv - else: - logger.warning("found no rulebase in chunk:\n" + json.dumps(chunk, indent=2)) - else: - if 'rulebase' in rulebase: - if rulebase['type'] == 'access-section' and not rulebase['uid'] in section_header_uids: # add section header, but only if it does not exist yet (can happen by chunking a section) - section_name = "section without name" - if 'name' in rulebase: - section_name = rulebase['name'] - if 'parent_rule_uid' in rulebase: - parent_uid = rulebase['parent_rule_uid'] - else: - parent_uid = "" - section_header = create_section_header(section_name, layer_name, import_id, rulebase['uid'], rule_num, section_header_uids, parent_uid) - rule_num += 1 - result += section_header - parent_uid = rulebase['uid'] - for rule in rulebase['rulebase']: - if rule['type'] == 'place-holder': # add domain rules - section_name = "" - if 'name' in rulebase: - section_name = rule['name'] - result += parse_rule.create_domain_rule_header(section_name, layer_name, import_id, rule['uid'], rule_num, section_header_uids, parent_uid) - else: # parse standard sections - rule_num, rules_in_layer = csv_dump_rules(rule, layer_name, import_id, rule_num, section_header_uids, parent_uid, debug_level=debug_level) - result += rules_in_layer - if rulebase['type'] == 'place-holder': # add domain rules - logger.debug('found domain rule ref: ' + rulebase['uid']) - section_name = "" - if 'name' in rulebase: - section_name = rulebase['name'] - result += parse_rule.create_domain_rule_header(section_name, layer_name, import_id, rulebase['uid'], rule_num, section_header_uids, parent_uid) - rule_num += 1 - if 'rule-number' in rulebase: - result += csv_dump_rule(rulebase, layer_name, import_id, rule_num, parent_uid, debug_level=debug_level) - rule_num += 1 - return rule_num, result diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_service_csv.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_service_csv.py deleted file mode 100644 index 9e01b6b4e..000000000 --- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_service_csv.py +++ /dev/null @@ -1,55 +0,0 @@ -from fwo_base import csv_add_field -from fwo_const import csv_delimiter, line_delimiter - - -def csv_dump_svc_obj(svc_obj, import_id): - result_line = csv_add_field(import_id) # control_id - result_line += csv_add_field(svc_obj['svc_name']) # svc_name - result_line += csv_add_field(svc_obj['svc_typ']) # svc_typ - result_line += csv_delimiter # no svc_prod_specific - if svc_obj['svc_member_names'] != None: - result_line += csv_add_field(svc_obj['svc_member_names']) # svc_member_names - else: - result_line += csv_delimiter # no svc_member_names - if svc_obj['svc_member_refs'] != None: - result_line += csv_add_field(svc_obj['svc_member_refs']) # obj_member_refs - else: - result_line += csv_delimiter # no svc_member_refs - result_line += csv_add_field(svc_obj['svc_color']) # svc_color - result_line += csv_add_field(svc_obj['ip_proto']) # ip_proto - if svc_obj['svc_port']!=None: - result_line += str(svc_obj['svc_port']) + csv_delimiter # svc_port - else: - result_line += csv_delimiter # no svc_port - if svc_obj['svc_port_end']!=None: - result_line += str(svc_obj['svc_port_end']) + csv_delimiter # svc_port_end - else: - result_line += csv_delimiter # no svc_port_end - if 'svc_source_port' in svc_obj: - result_line += csv_add_field(svc_obj['svc_source_port']) # svc_source_port - else: - result_line += csv_delimiter # svc_source_port - if 'svc_source_port_end' in svc_obj: - result_line += csv_add_field(svc_obj['svc_source_port_end']) # svc_source_port_end - else: - result_line += csv_delimiter # svc_source_port_end - if 'svc_comment' in svc_obj and svc_obj['svc_comment'] != None: - result_line += csv_add_field(svc_obj['svc_comment']) # svc_comment - else: - result_line += csv_delimiter # no svc_comment - if 'rpc_nr' in svc_obj and svc_obj['rpc_nr'] != None: - result_line += csv_add_field(str(svc_obj['rpc_nr'])) # rpc_nr - else: - result_line += csv_delimiter # no rpc_nr - if 'svc_timeout_std' in svc_obj: - result_line += csv_add_field(svc_obj['svc_timeout_std']) # svc_timeout_std - else: - result_line += csv_delimiter # svc_timeout_std - if 'svc_timeout' in svc_obj and svc_obj['svc_timeout']!="" and svc_obj['svc_timeout']!=None: - result_line += csv_add_field(str(svc_obj['svc_timeout'])) # svc_timeout - else: - result_line += csv_delimiter # svc_timeout null - result_line += csv_add_field(svc_obj['svc_uid']) # svc_uid - result_line += csv_delimiter # last_change_admin - result_line += line_delimiter # last_change_time - return result_line diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_user_csv.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_user_csv.py deleted file mode 100644 index 032540b52..000000000 --- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_user_csv.py +++ /dev/null @@ -1,28 +0,0 @@ -from fwo_base import csv_add_field -from fwo_const import csv_delimiter, line_delimiter - - -def csv_dump_user(user_name, user, import_id): - user_line = csv_add_field(import_id) # control_id - user_line += csv_add_field(user_name) # user_name - user_line += csv_add_field(user['user_typ']) # user_typ - if 'user_member_names' in user: - user_line += csv_add_field(user['user_member_names']) # user_member_names - else: - user_line += csv_delimiter # no user_member_names - if 'user_member_refs' in user: - user_line += csv_add_field(user['user_member_refs']) # user_member_refs - else: - user_line += csv_delimiter # no user_member_refs - if 'user_color' in user: - user_line += csv_add_field(user['user_color']) # user_color - else: - user_line += csv_delimiter # no user_color - if 'user_comment' in user and user['user_comment']!=None and user['user_comment']!='': - user_line += csv_add_field(user['user_comment']) # user_comment - else: - user_line += csv_delimiter # no user_comment - user_line += csv_add_field(user['user_uid']) # user_uid - user_line += csv_delimiter # user_valid_until - user_line += line_delimiter # last_change_admin - return user_line diff --git a/roles/importer/files/importer/common.py b/roles/importer/files/importer/common.py index 9d8c51120..ba9ae768d 100644 --- a/roles/importer/files/importer/common.py +++ b/roles/importer/files/importer/common.py @@ -15,7 +15,7 @@ import jsonpickle from fwo_exception import FwoApiLoginFailed, FwoApiFailedLockImport, ConfigFileNotFound, FwLoginFailed, ImportRecursionLimitReached from fwo_base import split_config - +from fwo_mail import send_change_notification_mail # import_management: import a single management (if no import for it is running) # lock mgmt for import via FWORCH API call, generating new import_id y @@ -120,6 +120,8 @@ def import_management(mgm_id=None, ssl_verification=None, debug_level_in=0, config_changed_since_last_import, error_string, error_count, change_count = get_config_from_api(mgm_details, full_config_json, config2import, jwt, current_import_id, start_time, in_file=in_file, import_tmp_path=import_tmp_path, error_string=error_string, error_count=error_count, change_count=change_count, limit=limit, force=force) + if (debug_level>7): # dump full native config read from fw API + logger.info(json.dumps(full_config_json, indent=2)) time_get_config = int(time.time()) - start_time logger.debug("import_management - getting config total duration " + str(time_get_config) + "s") @@ -148,6 +150,11 @@ def import_management(mgm_id=None, ssl_verification=None, debug_level_in=0, try: # get change count from db change_count = fwo_api.count_changes_per_import(fwo_config['fwo_api_base_url'], jwt, current_import_id) + if change_count>0: + emailConfig = fwo_api.get_config_values(fwo_config['fwo_api_base_url'], jwt, keyFilter="email") + impChangeNotifyConfig = fwo_api.get_config_values(fwo_config['fwo_api_base_url'], jwt, keyFilter="impChangeNotify") + notificationConfig = dict(emailConfig, **impChangeNotifyConfig) # merge the two config dicts + send_change_notification_mail(notificationConfig, change_count, mgm_details['name'], mgm_id) except: logger.error("import_management - unspecified error while getting change count: " + str(traceback.format_exc())) raise @@ -170,7 +177,7 @@ def import_management(mgm_id=None, ssl_verification=None, debug_level_in=0, else: # if no changes were found, we skip everything else without errors pass - if (debug_level>8): + if (debug_level>8): # dump normalized config for debugging purposes logger.info(json.dumps(config2import, indent=2)) error_count = complete_import(current_import_id, error_string, start_time, mgm_details, change_count, error_count, jwt) @@ -261,7 +268,8 @@ def complete_import(current_import_id, error_string, start_time, mgm_details, ch logger = getFwoLogger() fwo_config = readConfig(fwo_config_filename) - fwo_api.log_import_attempt(fwo_config['fwo_api_base_url'], jwt, mgm_details['id'], successful=not error_count) + success = (error_count==0) + log_result = fwo_api.log_import_attempt(fwo_config['fwo_api_base_url'], jwt, mgm_details['id'], successful=success) try: # CLEANUP: delete configs of imports (without changes) (if no error occured) if fwo_api.delete_json_config_in_import_table(fwo_config['fwo_api_base_url'], jwt, {"importId": current_import_id})<0: diff --git a/roles/importer/files/importer/fortiosmanagementREST/fOS_common.py b/roles/importer/files/importer/fortiosmanagementREST/fOS_common.py new file mode 100644 index 000000000..154be9d41 --- /dev/null +++ b/roles/importer/files/importer/fortiosmanagementREST/fOS_common.py @@ -0,0 +1,34 @@ +import sys +from common import importer_base_dir +sys.path.append(importer_base_dir + '/fortiosmanagementREST') +from curses import raw +from fwo_log import getFwoLogger +from fwo_const import list_delimiter, fwo_config_filename +from fwo_config import readConfig +from fwo_api import setAlert, create_data_issue + + +# TODO: deal with objects with identical names (e.g. all ipv4 & all ipv6) +def resolve_objects (obj_name_string_list, lookup_dict={}, delimiter=list_delimiter, jwt=None, import_id=None, mgm_id=None): + logger = getFwoLogger() + fwo_config = readConfig(fwo_config_filename) + + ref_list = [] + objects_not_found = [] + for el in obj_name_string_list.split(delimiter): + found = False + if el in lookup_dict: + ref_list.append(lookup_dict[el]) + else: + objects_not_found.append(el) + + for obj in objects_not_found: + if obj != 'all' and obj != 'Original': + if not create_data_issue(fwo_config['fwo_api_base_url'], jwt, import_id=import_id, obj_name=obj, severity=1, mgm_id=mgm_id): + logger.warning("resolve_raw_objects: encountered error while trying to log an import data issue using create_data_issue") + + desc = "found a broken object reference '" + obj + "' " + setAlert(fwo_config['fwo_api_base_url'], jwt, import_id=import_id, title="object reference error", mgm_id=mgm_id, severity=1, role='importer', \ + description=desc, source='import', alertCode=16) + + return delimiter.join(ref_list) diff --git a/roles/importer/files/importer/fortiosmanagementREST/fOS_network.py b/roles/importer/files/importer/fortiosmanagementREST/fOS_network.py index 2ade05e2e..de3a8f2a1 100644 --- a/roles/importer/files/importer/fortiosmanagementREST/fOS_network.py +++ b/roles/importer/files/importer/fortiosmanagementREST/fOS_network.py @@ -15,6 +15,7 @@ def normalize_nwobjects(full_config, config2import, import_id, nw_obj_types, jwt for obj_orig in full_config[obj_type]: obj_zone = 'global' obj = {} + ipa = "" obj.update({'obj_name': obj_orig['name']}) if 'subnet' in obj_orig: # ipv4 object if isinstance(obj_orig['subnet'], str) and ' ' in obj_orig['subnet']: @@ -57,9 +58,7 @@ def normalize_nwobjects(full_config, config2import, import_id, nw_obj_types, jwt if 'extip' not in obj_orig or len(obj_orig['extip'])==0: logger.error("vip (extip): found empty extip field for " + obj_orig['name']) else: - if len(obj_orig['extip'])>1: - logger.warning("vip (extip): found more than one extip, just using the first one for " + obj_orig['name']) - set_ip_in_obj(obj, obj_orig['extip'][0]) # resolving nat range if there is one + set_ip_in_obj(obj, obj_orig['extip']) # resolving nat range if there is one nat_obj = {} nat_obj.update({'obj_typ': 'host' }) nat_obj.update({'obj_color': 'black'}) @@ -73,14 +72,14 @@ def normalize_nwobjects(full_config, config2import, import_id, nw_obj_types, jwt else: if len(obj_orig['mappedip'])>1: logger.warning("vip (extip): found more than one mappedip, just using the first one for " + obj_orig['name']) - nat_ip = obj_orig['mappedip'][0] + nat_ip = obj_orig['mappedip'][0]['range'] set_ip_in_obj(nat_obj, nat_ip) obj.update({ 'obj_nat_ip': nat_obj['obj_ip'] }) # save nat ip in vip obj if 'obj_ip_end' in nat_obj: # this nat obj is a range - include the end ip in name and uid as well to avoid akey conflicts obj.update({ 'obj_nat_ip_end': nat_obj['obj_ip_end'] }) # save nat ip in vip obj nat_obj.update({'obj_name': nat_obj['obj_ip'] + '-' + nat_obj['obj_ip_end'] + nat_postfix}) else: - nat_obj.update({'obj_name': nat_obj['obj_ip'] + nat_postfix}) + nat_obj.update({'obj_name': str(nat_obj['obj_ip']) + nat_postfix}) nat_obj.update({'obj_uid': nat_obj['obj_name']}) ###### range handling diff --git a/roles/importer/files/importer/fortiosmanagementREST/fOS_rule.py b/roles/importer/files/importer/fortiosmanagementREST/fOS_rule.py index eeace4080..019f0d590 100644 --- a/roles/importer/files/importer/fortiosmanagementREST/fOS_rule.py +++ b/roles/importer/files/importer/fortiosmanagementREST/fOS_rule.py @@ -9,7 +9,7 @@ from fwo_log import getFwoLogger from fwo_data_networking import get_matching_route_obj, get_ip_of_interface_obj import ipaddress -from fwcommon import resolve_objects +from fOS_common import resolve_objects import time diff --git a/roles/importer/files/importer/fortiosmanagementREST/fwcommon.py b/roles/importer/files/importer/fortiosmanagementREST/fwcommon.py index 5b16cf669..86415b0ac 100644 --- a/roles/importer/files/importer/fortiosmanagementREST/fwcommon.py +++ b/roles/importer/files/importer/fortiosmanagementREST/fwcommon.py @@ -69,30 +69,30 @@ def get_config(config2import, full_config, current_import_id, mgm_details, limit fOS_rule.getAccessPolicy(sid, fm_api_url, full_config, limit) # fOS_rule.getNatPolicy(sid, fm_api_url, full_config, limit) - # now we normalize relevant parts of the raw config and write the results to config2import dict - # currently reading zone from objects for backward compat with FortiManager 6.x - # fmgr_zone.normalize_zones(full_config, config2import, current_import_id) - - # write normalized networking data to config2import - # this is currently not written to the database but only used for natting decisions - # later we will probably store the networking info in the database as well as a basis - # for path analysis - - # normalize_network_data(full_config, config2import, mgm_details) - - fOS_user.normalize_users( - full_config, config2import, current_import_id, user_scope) - fOS_network.normalize_nwobjects( - full_config, config2import, current_import_id, nw_obj_scope, jwt=jwt, mgm_id=mgm_details['id']) - fOS_service.normalize_svcobjects( - full_config, config2import, current_import_id, svc_obj_scope) - fOS_user.normalize_users( - full_config, config2import, current_import_id, user_scope) - fOS_rule.normalize_access_rules( - full_config, config2import, current_import_id, mgm_details=mgm_details, jwt=jwt) - # fOS_rule.normalize_nat_rules( - # full_config, config2import, current_import_id, jwt=jwt) - # fOS_network.remove_nat_ip_entries(config2import) + # now we normalize relevant parts of the raw config and write the results to config2import dict + # currently reading zone from objects for backward compat with FortiManager 6.x + # fmgr_zone.normalize_zones(full_config, config2import, current_import_id) + + # write normalized networking data to config2import + # this is currently not written to the database but only used for natting decisions + # later we will probably store the networking info in the database as well as a basis + # for path analysis + + # normalize_network_data(full_config, config2import, mgm_details) + + fOS_user.normalize_users( + full_config, config2import, current_import_id, user_scope) + fOS_network.normalize_nwobjects( + full_config, config2import, current_import_id, nw_obj_scope, jwt=jwt, mgm_id=mgm_details['id']) + fOS_service.normalize_svcobjects( + full_config, config2import, current_import_id, svc_obj_scope) + fOS_zone.add_zone_if_missing (config2import, 'global', current_import_id) + + fOS_rule.normalize_access_rules( + full_config, config2import, current_import_id, mgm_details=mgm_details, jwt=jwt) + # fOS_rule.normalize_nat_rules( + # full_config, config2import, current_import_id, jwt=jwt) + # fOS_network.remove_nat_ip_entries(config2import) return 0 @@ -112,28 +112,3 @@ def getObjects(sid, fm_api_url, raw_config, limit, nw_obj_types, svc_obj_types): fOS_getter.update_config_with_fortiOS_api_call( raw_config, fm_api_url + "/cmdb/" + object_type + "?access_token=" + sid, "user_obj_" + object_type, limit=limit) - -# TODO: deal with objects with identical names (e.g. all ipv4 & all ipv6) -def resolve_objects (obj_name_string_list, lookup_dict={}, delimiter=list_delimiter, jwt=None, import_id=None, mgm_id=None): - logger = getFwoLogger() - fwo_config = readConfig(fwo_config_filename) - - ref_list = [] - objects_not_found = [] - for el in obj_name_string_list.split(delimiter): - found = False - if el in lookup_dict: - ref_list.append(lookup_dict[el]) - else: - objects_not_found.append(el) - - for obj in objects_not_found: - if obj != 'all' and obj != 'Original': - if not create_data_issue(fwo_config['fwo_api_base_url'], jwt, import_id=import_id, obj_name=obj, severity=1, mgm_id=mgm_id): - logger.warning("resolve_raw_objects: encountered error while trying to log an import data issue using create_data_issue") - - desc = "found a broken object reference '" + obj + "' " - setAlert(fwo_config['fwo_api_base_url'], jwt, import_id=import_id, title="object reference error", mgm_id=mgm_id, severity=1, role='importer', \ - description=desc, source='import', alertCode=16) - - return delimiter.join(ref_list) diff --git a/roles/importer/files/importer/fwo_api.py b/roles/importer/files/importer/fwo_api.py index 1477b550c..6d24dba86 100644 --- a/roles/importer/files/importer/fwo_api.py +++ b/roles/importer/files/importer/fwo_api.py @@ -41,58 +41,57 @@ def call(url, jwt, query, query_variables="", role="reporter", show_progress=Fal full_query = {"query": query, "variables": query_variables} logger = getFwoLogger() - session = requests.Session() - if fwo_globals.verify_certs is None: # only for first FWO API call (getting info on cert verification) - session.verify = False - else: - session.verify = fwo_globals.verify_certs - session.headers = request_headers + with requests.Session() as session: + if fwo_globals.verify_certs is None: # only for first FWO API call (getting info on cert verification) + session.verify = False + else: + session.verify = fwo_globals.verify_certs + session.headers = request_headers - try: - r = session.post(url, data=json.dumps(full_query), timeout=int(fwo_api_http_import_timeout)) - r.raise_for_status() - except requests.exceptions.RequestException: - logger.error(showApiCallInfo(url, full_query, request_headers, type='error') + ":\n" + str(traceback.format_exc())) + try: + r = session.post(url, data=json.dumps(full_query), timeout=int(fwo_api_http_import_timeout)) + r.raise_for_status() + except requests.exceptions.RequestException: + logger.error(showApiCallInfo(url, full_query, request_headers, type='error') + ":\n" + str(traceback.format_exc())) + if r != None: + if r.status_code == 503: + raise FwoApiTServiceUnavailable("FWO API HTTP error 503 (FWO API died?)" ) + if r.status_code == 502: + raise FwoApiTimeout("FWO API HTTP error 502 (might have reached timeout of " + str(int(fwo_api_http_import_timeout)/60) + " minutes)" ) + else: + raise + if int(fwo_globals.debug_level) > 4: + logger.debug (showApiCallInfo(url, full_query, request_headers, type='debug')) + if show_progress: + print('.', end='', flush=True) if r != None: - if r.status_code == 503: - raise FwoApiTServiceUnavailable("FWO API HTTP error 503 (FWO API died?)" ) - if r.status_code == 502: - raise FwoApiTimeout("FWO API HTTP error 502 (might have reached timeout of " + str(int(fwo_api_http_import_timeout)/60) + " minutes)" ) + return r.json() else: - raise - if int(fwo_globals.debug_level) > 4: - logger.debug (showApiCallInfo(url, full_query, request_headers, type='debug')) - if show_progress: - print('.', end='', flush=True) - - if r != None: - return r.json() - else: - return None + return None def login(user, password, user_management_api_base_url, method='api/AuthenticationToken/Get'): payload = {"Username": user, "Password": password} - session = requests.Session() - if fwo_globals.verify_certs is None: # only for first FWO API call (getting info on cert verification) - session.verify = False - else: - session.verify = fwo_globals.verify_certs - session.headers = {'Content-Type': 'application/json'} + with requests.Session() as session: + if fwo_globals.verify_certs is None: # only for first FWO API call (getting info on cert verification) + session.verify = False + else: + session.verify = fwo_globals.verify_certs + session.headers = {'Content-Type': 'application/json'} - try: - response = session.post(user_management_api_base_url + method, data=json.dumps(payload)) - except requests.exceptions.RequestException: - raise FwoApiLoginFailed ("fwo_api: error during login to url: " + str(user_management_api_base_url) + " with user " + user) from None + try: + response = session.post(user_management_api_base_url + method, data=json.dumps(payload)) + except requests.exceptions.RequestException: + raise FwoApiLoginFailed ("fwo_api: error during login to url: " + str(user_management_api_base_url) + " with user " + user) from None - if response.text is not None and response.status_code==200: - return response.text - else: - error_txt = "fwo_api: ERROR: did not receive a JWT during login" + \ - ", api_url: " + str(user_management_api_base_url) + \ - ", ssl_verification: " + str(fwo_globals.verify_certs) - raise FwoApiLoginFailed(error_txt) + if response.text is not None and response.status_code==200: + return response.text + else: + error_txt = "fwo_api: ERROR: did not receive a JWT during login" + \ + ", api_url: " + str(user_management_api_base_url) + \ + ", ssl_verification: " + str(fwo_globals.verify_certs) + raise FwoApiLoginFailed(error_txt) def set_api_url(base_url, testmode, api_supported, hostname): @@ -135,6 +134,18 @@ def get_config_value(fwo_api_base_url, jwt, key='limit'): return None +def get_config_values(fwo_api_base_url, jwt, keyFilter='limit'): + query_variables = {'keyFilter': keyFilter+"%"} + config_query = "query getConf($keyFilter: String) { config(where: {config_key: {_ilike: $keyFilter}}) { config_key config_value } }" + result = call(fwo_api_base_url, jwt, config_query, query_variables=query_variables, role='importer') + if 'data' in result and 'config' in result['data']: + resultArray = result['data']['config'] + dict1 = {v['config_key']: v['config_value'] for k,v in enumerate(resultArray)} + return dict1 + else: + return None + + def get_mgm_details(fwo_api_base_url, jwt, query_variables, debug_level=0): mgm_query = """ query getManagementDetails($mgmId: Int!) { diff --git a/roles/importer/files/importer/fwo_const.py b/roles/importer/files/importer/fwo_const.py index 6794a81ca..1a3fd5686 100644 --- a/roles/importer/files/importer/fwo_const.py +++ b/roles/importer/files/importer/fwo_const.py @@ -22,6 +22,7 @@ import_tmp_path = base_dir + '/tmp/import' fwo_config_filename = base_dir + '/etc/fworch.json' max_recursion_level = 25 # do not call a function recursively more than this +default_section_header_text = 'section without name' # how many objects (network, services, rules, ...) should be sent to the FWO API in one go? # should be between 500 and 2.000 in production (results in a max obj number of max. 5 x this value - nwobj/svc/rules/...) diff --git a/roles/importer/files/importer/fwo_exception.py b/roles/importer/files/importer/fwo_exception.py index 6906525f3..c2c1e69da 100644 --- a/roles/importer/files/importer/fwo_exception.py +++ b/roles/importer/files/importer/fwo_exception.py @@ -6,6 +6,13 @@ def __init__(self, message="Login to FW management failed"): self.message = message super().__init__(self.message) +class FwLogoutFailed(Exception): + """Raised when logout from FW management failed""" + + def __init__(self, message="Logout from FW management failed"): + self.message = message + super().__init__(self.message) + class FwoApiLoginFailed(Exception): """Raised when login to FWO API failed""" diff --git a/roles/importer/files/importer/fwo_mail.py b/roles/importer/files/importer/fwo_mail.py new file mode 100644 index 000000000..de8a60ae7 --- /dev/null +++ b/roles/importer/files/importer/fwo_mail.py @@ -0,0 +1,82 @@ +import json +import jsonpickle +from fwo_data_networking import InterfaceSerializable, RouteSerializable +import fwo_globals +from fwo_const import max_objs_per_chunk, csv_delimiter, apostrophe, line_delimiter +from fwo_log import getFwoLogger, getFwoAlertLogger +from copy import deepcopy +import smtplib, ssl +from email.message import EmailMessage + + +def send_mail(recipient_list, subject, body, fwo_config): + logger = getFwoLogger() + # Create a text/plain message + msg = EmailMessage() + senderAddress = "" + msg.set_content(body) + msg['Subject'] = subject + if 'emailSenderAddress' in fwo_config: + senderAddress = fwo_config['emailSenderAddress'] + msg['From'] = senderAddress + msg['To'] = recipient_list + tlsSetting = "" + + try: + if 'emailTls' not in fwo_config or fwo_config['emailTls']=='StartTls': + smtp_server = smtplib.SMTP(fwo_config['emailServerAddress'], int(fwo_config['emailPort'])) + if 'emailTls' in fwo_config and fwo_config['emailTls']=='StartTls': + tlsSetting = fwo_config['emailTls'] + smtp_server.starttls() #setting up to TLS connection + smtp_server.ehlo() #calling the ehlo() again as encryption happens on calling startttls() + else: + smtp_server.ehlo() #setting the ESMTP protocol + elif fwo_config['emailTls']=='Tls': + context = ssl.create_default_context() + context.check_hostname = False + context.verify_mode = ssl.CERT_NONE + smtp_server = smtplib.SMTP(fwo_config['emailServerAddress'], int(fwo_config['emailPort'])) + smtp_server.starttls(context=context) + smtp_server.ehlo() + if 'emailUser' in fwo_config and 'emailPassword' in fwo_config and fwo_config['emailUser']!="": + smtp_server.login(fwo_config['emailUser'], fwo_config['emailPassword']) #logging into out email id + + #sending the mail by specifying the from and to address and the message + smtp_server.send_message(msg) + smtp_server.quit() #terminating the server + except Exception as e: + if 'emailPort' not in fwo_config: + logger.warning("Missing email server port config. Double-check your emailPort configuration") + elif int(fwo_config['emailPort'])<1 or int(fwo_config['emailPort'])>65535: + logger.warning("Email server port configuration out of bounds: " + str(fwo_config['emailPort']) + ". Double-check your emailPort configuration") + elif 'emailServer' not in fwo_config: + logger.warning("Missing email server address. Double-check your emailServer configuration") + elif len(fwo_config['emailServer'])==0: + logger.warning("Empty email server address. Double-check your emailServer configuration") + elif recipient_list is None: + logger.warning("Undefined email recipient list. Double-check your email recipient list") + elif len(recipient_list)==0: + logger.warning("Empty email recipient list. Double-check your email recipient list") + else: + logger.warning("error while sending import change notification email: " + + "emailServer: " + fwo_config['emailServerAddress'] + ", " + + "emailSenderAddress: " + senderAddress + ", " + + "emailPort: " + fwo_config['emailPort'] + ", " + + "emailTls: " + str(tlsSetting) + ", " + + "impChangeNotifyRecipients: " + str(recipient_list) + ", " + + "error: " + str(e) + ) + + +def send_change_notification_mail(fwo_config, number_of_changes, mgm_name, mgm_id): + if 'impChangeNotifyActive' in fwo_config and bool(fwo_config['impChangeNotifyActive']) and 'impChangeNotifyRecipients' in fwo_config: + body = "" + if 'impChangeNotifyBody' in fwo_config: + body += fwo_config['impChangeNotifyBody'] + ": " + body += str(number_of_changes) + ", Management: " + mgm_name + " (id=" + mgm_id + ")" + send_mail( + fwo_config['impChangeNotifyRecipients'].split(','), + fwo_config['impChangeNotifySubject'] if 'impChangeNotifySubject' in fwo_config else "firewall orchestrator change notification", + body, + fwo_config + ) diff --git a/roles/importer/files/importer/import-mgm.py b/roles/importer/files/importer/import-mgm.py index ddbf1169d..b1897c2d8 100755 --- a/roles/importer/files/importer/import-mgm.py +++ b/roles/importer/files/importer/import-mgm.py @@ -18,8 +18,16 @@ parser.add_argument('-f', '--force', action='store_true', default=False, help='If set the import will be attempted without checking for changes or if the importer module is the one defined') parser.add_argument('-d', '--debug', metavar='debug_level', default='0', - help='Debug Level: 0=off, 1=send debug to console, 2=send debug to file, 3=save noramlized config file; 4=additionally save native config file; default=0. \n' +\ - 'config files are saved to $FWORCH/tmp/import dir') + help='Debug Level: \ + 0=off, \ + 1=send debug to console, \ + 2=send debug to file, \ + 3=save noramlized config file, \ + 4=additionally save native config file, \ + 8=send native config (as read from firewall) to standard out, \ + 9=send normalized config to standard out, \ + (default=0), \ + config files are saved to $FWORCH/tmp/import dir') parser.add_argument('-v', "--verify_certificates", action='store_true', default = None, help = "verify certificates") parser.add_argument('-s', "--suppress_certificate_warnings", action='store_true', default = None, diff --git a/roles/importer/tasks/main.yml b/roles/importer/tasks/main.yml index 7a8c16123..ac439a1c6 100644 --- a/roles/importer/tasks/main.yml +++ b/roles/importer/tasks/main.yml @@ -50,6 +50,7 @@ owner: "{{ fworch_user }}" group: "{{ fworch_group }}" mode: "0755" + tags: [ 'test' ] - name: set x-flag for importer executables (top level only) file: diff --git a/roles/importer/templates/fworch-importer-api.service.j2 b/roles/importer/templates/fworch-importer-api.service.j2 index ac5971a16..1287133fe 100644 --- a/roles/importer/templates/fworch-importer-api.service.j2 +++ b/roles/importer/templates/fworch-importer-api.service.j2 @@ -17,8 +17,8 @@ ExecStartPre=/bin/sleep 10 ExecStart={{ importer_home }}/import-main-loop.py # ExecStop={{ importer_home }}/import-api-stop-helper TimeoutStopSec=300min -StandardOutput=syslog -StandardError=syslog +StandardOutput=journal +StandardError=journal SyslogIdentifier={{ product_name }}-importer-api User={{ fworch_user }} KillSignal=SIGINT diff --git a/roles/importer/templates/fworch-importer-legacy.service.j2 b/roles/importer/templates/fworch-importer-legacy.service.j2 index 4ea747068..ba32e021e 100644 --- a/roles/importer/templates/fworch-importer-legacy.service.j2 +++ b/roles/importer/templates/fworch-importer-legacy.service.j2 @@ -7,8 +7,8 @@ WorkingDirectory={{ importer_home }} ExecStartPre=/bin/sleep 10 ExecStart={{ importer_home }}/fworch-importer-main.pl ExecStop={{ importer_home }}/import-stop-helper -StandardOutput=syslog -StandardError=syslog +StandardOutput=journal +StandardError=journal SyslogIdentifier={{ product_name }}-importer-legacy User={{ fworch_user }} Environment="PERL5LIB={{ importer_home }}" diff --git a/roles/lib/files/FWO.Api.Client/APIConnection.cs b/roles/lib/files/FWO.Api.Client/APIConnection.cs index c1b8798b7..76c2ae95c 100644 --- a/roles/lib/files/FWO.Api.Client/APIConnection.cs +++ b/roles/lib/files/FWO.Api.Client/APIConnection.cs @@ -6,10 +6,14 @@ namespace FWO.Api.Client { - public abstract class ApiConnection + public abstract class ApiConnection : IDisposable { + private bool disposed = false; + public event EventHandler? OnAuthHeaderChanged; + protected List subscriptions = new List(); + protected void InvokeOnAuthHeaderChanged(object? sender, string newAuthHeader) { OnAuthHeaderChanged?.Invoke(sender, newAuthHeader); @@ -19,8 +23,33 @@ protected void InvokeOnAuthHeaderChanged(object? sender, string newAuthHeader) public abstract void SetRole(string role); + public abstract void SetProperRole(System.Security.Claims.ClaimsPrincipal user, List targetRoleList); + + public abstract void SwitchBack(); + public abstract Task SendQueryAsync(string query, object? variables = null, string? operationName = null); - public abstract ApiSubscription GetSubscription(Action exceptionHandler, ApiSubscription.SubscriptionUpdate subscriptionUpdateHandler, string subscription, object? variables = null, string? operationName = null); + public abstract GraphQlApiSubscription GetSubscription(Action exceptionHandler, GraphQlApiSubscription.SubscriptionUpdate subscriptionUpdateHandler, string subscription, object? variables = null, string? operationName = null); + + protected virtual void AddSubscription(ApiSubscription subscription) + { + subscriptions.Add(subscription); + } + + protected abstract void Dispose(bool disposing); + + ~ ApiConnection() + { + if (disposed) return; + Dispose(false); + } + + public void Dispose() + { + if (disposed) return; + Dispose(true); + disposed = true; + GC.SuppressFinalize(this); + } } } diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/_repo.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/_repo.graphql deleted file mode 100644 index 80cdfe785..000000000 --- a/roles/lib/files/FWO.Api.Client/APIcalls/_repo.graphql +++ /dev/null @@ -1,553 +0,0 @@ -################ basics - -query getImportId($management_id: Int!, $time: timestamp!) { - import_control_aggregate( - where: { mgm_id: { _eq: $management_id }, stop_time: { _lte: $time } } - ) { - aggregate { - max { - control_id - } - } - } -} - -################# dyn_filter - -query filter_dyn($manufacturer_id: [Int!]) { - __typename - stm_dev_typ(where: { dev_typ_id: { _in: $manufacturer_id } }) { - dev_typ_name - dev_typ_version - dev_typ_id - } -} - -query filter_dyn($management_id: [Int!], $device_id: [Int!]) { - __typename - management(where: { mgm_id: { _in: $management_id } }) { - mgm_id - mgm_name - devices(where: { dev_id: { _in: $device_id } }) { - dev_id - dev_name - } - } -} - -query filter_dyn($manufacturer_id: [Int!]!, $management_id: [Int!]!) { - __typename - stm_dev_typ(where: { dev_typ_id: { _in: $manufacturer_id } }) { - dev_typ_name - dev_typ_version - dev_typ_id - management(where: { mgm_id: { _in: $management_id } }) { - mgm_id - mgm_name - } - } -} - -# query returning a flat list of all device_types matching triple filter: -query filter_dyn_device_type( - $manufacturer_id: [Int!] - $management_id: [Int!] - $device_id: [Int!] -) { - stm_dev_typ( - where: { - _and: { - dev_typ_id: { _in: $manufacturer_id } - devices: { dev_id: { _in: $device_id } } - management: { mgm_id: { _in: $management_id } } - } - } - ) { - dev_typ_id - dev_typ_name - } -} - -# query returning a flat list of all managements matching triple filter: -query filter_dyn_management( - $manufacturer_id: [Int!] - $management_id: [Int!] - $device_id: [Int!] -) { - management( - where: { - _and: { - mgm_id: { _in: $management_id } - dev_typ_id: { _in: $manufacturer_id } - devices: { dev_id: { _in: $device_id } } - } - } - ) { - mgm_id - mgm_name - } -} - -# query returning a flat list of all devices matching triple filter: -query filter_dyn_device( - $manufacturer_id: [Int!] - $management_id: [Int!] - $device_id: [Int!] -) { - device( - where: { - _and: { - mgm_id: { _in: $management_id } - dev_typ_id: { _in: $manufacturer_id } - dev_id: { _in: $device_id } - } - } - ) { - dev_id - dev_name - } -} - -####################### - -query filter_dyn_device_type_count( - $manufacturer_id: [Int!] - $management_id: [Int!] - $device_id: [Int!] -) { - stm_dev_typ_aggregate( - where: { - _and: { - dev_typ_id: { _in: $manufacturer_id } - devices: { dev_id: { _in: $device_id } } - management: { mgm_id: { _in: $management_id } } - } - } - ) { - aggregate { - count - } - } -} - -query filter_dyn_management_count( - $manufacturer_id: [Int!] - $management_id: [Int!] - $device_id: [Int!] -) { - management_aggregate( - where: { - _and: { - mgm_id: { _in: $management_id } - dev_typ_id: { _in: $manufacturer_id } - devices: { dev_id: { _in: $device_id } } - } - } - ) { - aggregate { - count - } - } -} - -# query returning the aggregate number of all devices matching triple filter: -query filter_dyn_device_count( - $manufacturer_id: [Int!] - $management_id: [Int!] - $device_id: [Int!] -) { - device_aggregate( - where: { - _and: { - mgm_id: { _in: $management_id } - dev_typ_id: { _in: $manufacturer_id } - dev_id: { _in: $device_id } - } - } - ) { - aggregate { - count - } - } -} - -####################### - -# query returning devices matching a query and total count: -query filterDeviceByType( - $manufacturer_id: [Int!] - $management_id: [Int!] - $device_id: [Int!] -) { - stm_dev_typ_aggregate( - where: { - _and: { - dev_typ_id: { _in: $manufacturer_id } - devices: { dev_id: { _in: $device_id } } - management: { mgm_id: { _in: $management_id } } - } - } - ) { - aggregate { - count - } - } - device( - where: { - _and: { - mgm_id: { _in: $management_id } - dev_typ_id: { _in: $manufacturer_id } - dev_id: { _in: $device_id } - } - } - ) { - dev_id - dev_name - } -} - -####################### - -# query returning a multi-level structure with all data matching triple filter: -query filterDevices( - $manufacturerId: [Int!] - $managementId: [Int!] - $deviceId: [Int!] -) { - __typename - stm_dev_typ(where: { dev_typ_id: { _in: $manufacturerId } }) { - dev_typ_name - dev_typ_version - dev_typ_id - management(where: { mgm_id: { _in: $managementId } }) { - mgm_id - mgm_name - devices(where: { dev_id: { _in: $deviceId } }) { - dev_id - dev_name - } - } - } -} - -query ruleFilterFullTextCurrent( - $managementId: [Int!] - $deviceId: [Int!] - $fullText: String! - $limit: Int - $offset: Int -) { - management( - where: { mgm_id: { _in: $managementId } } - order_by: { mgm_name: asc } - ) { - mgm_id - mgm_name - devices( - where: { dev_id: { _in: $deviceId } } - order_by: { dev_name: asc } - ) { - dev_id - dev_name - } - rules( - limit: $limit - offset: $offset - where: { - _and: { - active: { _eq: true } - _or: [ - { rule_src: { _ilike: $fullText } } - { rule_dst: { _ilike: $fullText } } - { rule_svc: { _ilike: $fullText } } - ] - } - } - order_by: { rule_num_numeric: asc } - ) { - rule_uid - rule_src - rule_dst - rule_svc - } - } -} - -query ruleFilterFullTextInTime ( - $managementId: [Int!] - $deviceId: [Int!] - $ruleSrcName: [String!] - $ruleSrcIp: [cidr!] - $limit: Int - $offset: Int - $current: Boolean - $reportTime: timestamp -) { - management( - where: { mgm_id: { _in: $managementId } } - order_by: { mgm_name: asc } - ) { - mgm_id - mgm_name - devices( - where: { dev_id: { _in: $deviceId } } - order_by: { dev_name: asc } - ) { - dev_id - dev_name - rules_aggregate( - limit: $limit - offset: $offset - where: { - import_control: { stop_time: {_lte: $reportTime } } - importControlByRuleLastSeen: { stop_time: {_gt: $reportTime } } - active: { _eq: $current } - rule_src: { _in: $ruleSrcName } - rule_disabled: { _eq: false } - rule_froms: { object: { obj_ip: { _in: $ruleSrcIp } } } - } - order_by: { rule_num_numeric: asc } - ) { - aggregate { - count - } - } - rules( - limit: $limit - offset: $offset - where: { - import_control: { stop_time: {_lte: $reportTime } } - importControlByRuleLastSeen: { stop_time: {_gt: $reportTime } } - active: { _eq: $current } - rule_src: { _in: $ruleSrcName } - rule_disabled: { _eq: false } - rule_froms: { object: { obj_ip: { _in: $ruleSrcIp } } } - } - order_by: { rule_num_numeric: asc } - ) { - rule_uid - rule_src - lastSeenImport: importControlByRuleLastSeen { - stop_time - control_id - } - createImport: import_control { - stop_time - control_id - } - } - } - } -} - -query ruleFilterKVCurrent( - $managementId: [Int!] - $deviceId: [Int!] - $reportTime: timestamp - $ruleSrcName: [String!] - $ruleSrcIp: [cidr!] - $ruleDstName: [String!] - $ruleDstIp: [cidr!] - $limit: Int - $offset: Int -) { - management( - where: { mgm_id: { _in: $managementId } } - order_by: { mgm_name: asc } - ) { - mgm_id - mgm_name - devices( - where: { dev_id: { _in: $deviceId } } - order_by: { dev_name: asc } - ) { - dev_id - dev_name - rules_aggregate( - limit: $limit - offset: $offset - where: { - active: { _eq: true } - rule_src: { _in: $ruleSrcName } - rule_disabled: { _eq: false } - rule_froms: { object: { obj_ip: { _in: $ruleSrcIp } } } - } - order_by: { rule_num_numeric: asc } - ) { - aggregate { - count - } - } - rules( - limit: $limit - offset: $offset - where: { - active: { _eq: true } - rule_src: { _in: $ruleSrcName } - rule_disabled: { _eq: false } - rule_froms: { object: { obj_ip: { _in: $ruleSrcIp } } } - } - order_by: { rule_num_numeric: asc } - ) { - rule_uid - rule_src - lastSeenImport: importControlByRuleLastSeen { - stop_time - control_id - } - createImport: import_control { - stop_time - control_id - } - } - } - } -} - - -query ruleFilterKVInTime( - $managementId: [Int!] - $deviceId: [Int!] - $reportTime: timestamp - $ruleSrcName: [String!] - $ruleSrcIp: [cidr!] - $ruleDstName: [String!] - $ruleDstIp: [cidr!] - $limit: Int - $offset: Int -) { - management( - where: { mgm_id: { _in: $managementId } } - order_by: { mgm_name: asc } - ) { - mgm_id - mgm_name - devices( - where: { dev_id: { _in: $deviceId } } - order_by: { dev_name: asc } - ) { - dev_id - dev_name - } - rules( - limit: $limit - offset: $offset - where: { - import_control: { stop_time: { _lte: $reportTime } } - importControlByRuleLastSeen: { stop_time: { _gt: $reportTime } } - rule_disabled: { _eq: false } - rule_src: { _in: $ruleSrcName } - rule_froms: { object: { obj_ip: { _in: $ruleSrcIp } } } - rule_dst: { _in: $ruleDstName } - rule_tos: { object: { obj_ip: { _in: $ruleDstIp } } } - } - order_by: { rule_num_numeric: asc } - ) { - rule_uid - rule_src - lastSeenImport: importControlByRuleLastSeen { - stop_time - control_id - } - createImport: import_control { - stop_time - control_id - } - } - } -} - - -query ruleFilterKVInTimeCount( - $managementId: [Int!] - $deviceId: [Int!] - $reportTime: timestamp - $ruleSrcName: [String!] - $ruleSrcIp: [cidr!] - $ruleDstName: [String!] - $ruleDstIp: [cidr!] -) { - management( - where: { mgm_id: { _in: $managementId } } - order_by: { mgm_name: asc } - ) { - mgm_id - mgm_name - devices( - where: { dev_id: { _in: $deviceId } } - order_by: { dev_name: asc } - ) { - dev_id - dev_name - rules_aggregate( - where: { - import_control: { stop_time: { _lte: $reportTime } } - importControlByRuleLastSeen: { stop_time: { _gt: $reportTime } } - rule_disabled: { _eq: false } - rule_src: { _in: $ruleSrcName } - rule_froms: { object: { obj_ip: { _in: $ruleSrcIp } } } - rule_dst: { _in: $ruleDstName } - rule_tos: { object: { obj_ip: { _in: $ruleDstIp } } } - } - ) { - aggregate { - count - } - } - } - } -} - -query ruleFilterKVInTimeSingleValues( - $managementId: [Int!] - $deviceId: [Int!] - $reportTime: timestamp - $ruleSrcName1: String - $ruleSrcName2: String - $limit: Int - $offset: Int -) { - management( - where: { mgm_id: { _in: $managementId } } - order_by: { mgm_name: asc } - ) { - mgm_id - mgm_name - devices( - where: { dev_id: { _in: $deviceId } } - order_by: { dev_name: asc } - ) { - dev_id - dev_name - } - rules( - limit: $limit - offset: $offset - where: { - _and: { - import_control: { stop_time: { _lte: $reportTime } } - importControlByRuleLastSeen: { stop_time: { _gt: $reportTime } } - rule_disabled: { _eq: false } - _or: [ - { rule_src: { _ilike: $ruleSrcName1 } } - { rule_src: { _ilike: $ruleSrcName2 } } - ] - } - } - order_by: { rule_num_numeric: asc } - ) { - rule_uid - rule_src - lastSeenImport: importControlByRuleLastSeen { - stop_time - control_id - } - createImport: import_control { - stop_time - control_id - } - } - } -} - -# replace rule values with ...ruleOverview diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/auth/_repo.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/auth/_repo.graphql deleted file mode 100644 index b514c7ea7..000000000 --- a/roles/lib/files/FWO.Api.Client/APIcalls/auth/_repo.graphql +++ /dev/null @@ -1,23 +0,0 @@ - -query getVisibleDevIdsPerTenant($tenant_id: Int!) { - device(where: { tenant_to_devices: { tenant_id: { _eq: $tenant_id } } }) { - dev_id - } -} - -# this does not work: -# query getVisibleDevIdsFromTenantName($tenant_name: String!) { -# device( -# where: {client_to_devices: -# { -# tenant_id: {_eq: getTenantId($tenant_name)}} -# } -# ) -# { dev_id } -# } - -query tenantCanViewAllDevices($tenant_id: Int!) { - tenant(where: { tenant_id: { _eq: $tenant_id } }) { - tenant_can_view_all_devices - } -} diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/compliance/addNetworkZone.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/addNetworkZone.graphql new file mode 100644 index 000000000..8b8193548 --- /dev/null +++ b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/addNetworkZone.graphql @@ -0,0 +1,25 @@ +mutation insert_compliance_network_zone ($name: String!, $description: String!, $ip_ranges: [compliance_ip_range_insert_input!]!, $super_network_zone_id: bigint, +$communication_sources: [compliance_network_zone_communication_insert_input!]!, $communication_destinations: [compliance_network_zone_communication_insert_input!]!, +$sub_network_zones: [compliance_network_zone_insert_input!]!) { + insert_compliance_network_zone_one ( + object: { + super_network_zone_id: $super_network_zone_id, + name: $name, + description: $description, + ip_ranges: { + data: $ip_ranges + }, + network_zone_communication_destinations: { + data: $communication_destinations + }, + network_zone_communication_sources: { + data: $communication_sources + }, + sub_network_zones: { + data: $sub_network_zones + } + } + ) { + id + } +} \ No newline at end of file diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/compliance/deleteNetworkZone.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/deleteNetworkZone.graphql new file mode 100644 index 000000000..7800da5be --- /dev/null +++ b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/deleteNetworkZone.graphql @@ -0,0 +1,7 @@ +mutation delete_compliance_network_zone ($id: bigint!) { + delete_compliance_network_zone_by_pk ( + id: $id + ) { + id + } +} diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/compliance/getNetworkZones.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/getNetworkZones.graphql new file mode 100644 index 000000000..cca37df14 --- /dev/null +++ b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/getNetworkZones.graphql @@ -0,0 +1,31 @@ +query get_compliance_network_zones { + compliance_network_zone (order_by: {name: asc}) { + id + name + description + ip_ranges { + ip_range_start + ip_range_end + } + super_network_zone { + id + name + } + sub_network_zones { + id + name + } + network_zone_communication_destinations { + to_network_zone { + id + name + } + } + network_zone_communication_sources { + from_network_zone { + id + name + } + } + } +} \ No newline at end of file diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/compliance/updateNetworkZone.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/updateNetworkZone.graphql new file mode 100644 index 000000000..3b25ce7fb --- /dev/null +++ b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/updateNetworkZone.graphql @@ -0,0 +1,68 @@ +mutation update_compliance_network_zone ($network_zone_id: bigint!, $name: String!, $description: String!, $super_network_zone_id: bigint, + $add_ip_ranges: [compliance_ip_range_insert_input!]!, $delete_ip_ranges_exp: [compliance_ip_range_bool_exp!]!, + $add_zone_communication: [compliance_network_zone_communication_insert_input!]!, $delete_zone_communication_exp: [compliance_network_zone_communication_bool_exp!]!, + $add_sub_zones_exp: [compliance_network_zone_bool_exp!]!, $delete_sub_zones_exp: [compliance_network_zone_bool_exp!]!) +{ + update_compliance_network_zone ( + where: {id: {_eq: $network_zone_id}} + _set: { + name: $name, + description: $description, + super_network_zone_id: $super_network_zone_id + } + ) { + affected_rows + } + + delete_compliance_ip_range ( + where: { + network_zone_id: {_eq: $network_zone_id}, + _or: $delete_ip_ranges_exp + } + ) { + affected_rows + } + + insert_compliance_ip_range ( + objects: $add_ip_ranges + ) { + affected_rows + } + + delete_compliance_network_zone_communication ( + where: { + _or: $delete_zone_communication_exp + } + ) { + affected_rows + } + + insert_compliance_network_zone_communication ( + objects: $add_zone_communication + ) { + affected_rows + } + + update_compliance_network_zone_many ( + updates: [ + { + where: { + _or: $delete_sub_zones_exp + } + _set: { + super_network_zone_id: null + } + }, + { + where: { + _or: $add_sub_zones_exp + } + _set: { + super_network_zone_id: $network_zone_id + } + } + ] + ) { + affected_rows + } +} \ No newline at end of file diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/compliance/updateNetworkZoneCommunication.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/updateNetworkZoneCommunication.graphql new file mode 100644 index 000000000..54aed3e5f --- /dev/null +++ b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/updateNetworkZoneCommunication.graphql @@ -0,0 +1,18 @@ +mutation update_compliance_network_zone_communication( + $delete_zone_communication_exp: [compliance_network_zone_communication_bool_exp!]!, + $add_zone_communication: [compliance_network_zone_communication_insert_input!]!,) +{ + delete_compliance_network_zone_communication ( + where: { + _or: $delete_zone_communication_exp + } + ) { + affected_rows + } + + insert_compliance_network_zone_communication ( + objects: $add_zone_communication + ) { + affected_rows + } +} \ No newline at end of file diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/config/_repo.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/config/_repo.graphql deleted file mode 100644 index 72f78f671..000000000 --- a/roles/lib/files/FWO.Api.Client/APIcalls/config/_repo.graphql +++ /dev/null @@ -1,24 +0,0 @@ - -# JWT Hash algorithm (needed by API, Middleware, UI) - -# default language per user (UI) -# current strategy: all user specific information is stored in ldap -# --> should be retrieved via middleware server? - -############################################### -# basic config data related to device import/report -# the following could be exposed for offering a UI menu for adding new basic config data: -# currently only read by (UI, Importer) - -# stm_ -# action -# change_type -# color -# dev_typ -# ip_proto -# nattyp (needed?) -# obj_typ -# report_typ -# svc_typ -# track -# usr_typ diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/device/_repo.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/device/_repo.graphql deleted file mode 100644 index 1ea0bddc0..000000000 --- a/roles/lib/files/FWO.Api.Client/APIcalls/device/_repo.graphql +++ /dev/null @@ -1,51 +0,0 @@ -query showManufacturers { - stm_dev_typ { - dev_typ_id - dev_typ_manufacturer - dev_typ_version - } -} - -##################################### - -query showManagements { - management { - mgm_id - mgm_name - } -} - -##################################### - -query showDevices { - device { - dev_id - dev_name - local_rulebase_name - management { - mgm_id - mgm_name - } - } -} - -query showDevicesWithType { - device { - dev_id - dev_name - stm_dev_typ { - dev_typ_name - dev_typ_version - } - } -} - -################################### - -query showManufacturers { - stm_dev_typ(order_by: { dev_typ_manufacturer: asc, dev_typ_version: asc }) { - dev_typ_id - dev_typ_manufacturer - dev_typ_version - } -} diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/monitor/getAllUiLogEntrys.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/monitor/getAllUiLogEntrys.graphql new file mode 100644 index 000000000..cb60dbc27 --- /dev/null +++ b/roles/lib/files/FWO.Api.Client/APIcalls/monitor/getAllUiLogEntrys.graphql @@ -0,0 +1,10 @@ +query getAllUiLogEntrys{ + log_data_issue (where: {source: {_eq: "ui"}} order_by: { data_issue_id: desc }){ + data_issue_id + severity + issue_timestamp + suspected_cause + description + user_id + } +} diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/device/getImportStatus.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/monitor/getImportStatus.graphql similarity index 100% rename from roles/lib/files/FWO.Api.Client/APIcalls/device/getImportStatus.graphql rename to roles/lib/files/FWO.Api.Client/APIcalls/monitor/getImportStatus.graphql diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/monitor/getUiLogEntrys.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/monitor/getUiLogEntrys.graphql index bd17688da..b18a016c6 100644 --- a/roles/lib/files/FWO.Api.Client/APIcalls/monitor/getUiLogEntrys.graphql +++ b/roles/lib/files/FWO.Api.Client/APIcalls/monitor/getUiLogEntrys.graphql @@ -5,5 +5,6 @@ query getUiLogEntrys ($user: Int!){ issue_timestamp suspected_cause description + user_id } } diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/networkObject/_repo.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/networkObject/_repo.graphql deleted file mode 100644 index a8ae2d117..000000000 --- a/roles/lib/files/FWO.Api.Client/APIcalls/networkObject/_repo.graphql +++ /dev/null @@ -1,25 +0,0 @@ - - -# needs to be exact import id for the specific device, otherwise it might not return desired results -query listHistoricalObjects($import_id: Int!, $management_id: Int) { - object_aggregate( - where: { - mgm_id: { _eq: $mgmt } - obj_create: { _lte: $import_id } - obj_last_seen: { _gte: $import_id } - } - ) { - aggregate { - count - } - } - object( - where: { - mgm_id: { _eq: $mgmt } - obj_create: { _lte: $import_id } - obj_last_seen: { _gte: $import_id } - } - ) { - ...networkObjectDetails - } -} diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/recertification/fragments/ruleOpenCertOverview.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/recertification/fragments/ruleOpenCertOverview.graphql index cdd8292e5..e8f07924b 100644 --- a/roles/lib/files/FWO.Api.Client/APIcalls/recertification/fragments/ruleOpenCertOverview.graphql +++ b/roles/lib/files/FWO.Api.Client/APIcalls/recertification/fragments/ruleOpenCertOverview.graphql @@ -35,6 +35,15 @@ fragment ruleOpenCertOverview on rule { name } } + recert_history: recertifications (where: { owner: $ownerWhere, recert_date: {_is_null: false}}, order_by: { recert_date: desc }) { + recert_date + recertified + user_dn + comment + owner { + name + } + } } rule_src_neg rule_dst_neg diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/report/getUsageDataCount.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/report/getUsageDataCount.graphql new file mode 100644 index 000000000..a76006fea --- /dev/null +++ b/roles/lib/files/FWO.Api.Client/APIcalls/report/getUsageDataCount.graphql @@ -0,0 +1,8 @@ + +query getUsageDataCount($devId: Int) { + rule_aggregate(where: {_and: [ {dev_id: {_eq: $devId } }, { rule_metadatum: {rule_last_hit: { _is_null: false } } } ] }) { + aggregate { + count + } + } +} diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/report/subscribeGeneratedReportsChanges.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/report/subscribeGeneratedReportsChanges.graphql new file mode 100644 index 000000000..14057b001 --- /dev/null +++ b/roles/lib/files/FWO.Api.Client/APIcalls/report/subscribeGeneratedReportsChanges.graphql @@ -0,0 +1,16 @@ +subscription subscribeGeneratedReportsChanges { + report(order_by:{report_id:desc}) { + report_id + report_name + report_start_time + report_end_time + report_type + description + uiuser { + uiuser_username + } + report_template { + report_template_name + } + } +} \ No newline at end of file diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/report/subscribeReportScheduleChanges.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/report/subscribeReportScheduleChanges.graphql index 71931da7a..d16fdd659 100644 --- a/roles/lib/files/FWO.Api.Client/APIcalls/report/subscribeReportScheduleChanges.graphql +++ b/roles/lib/files/FWO.Api.Client/APIcalls/report/subscribeReportScheduleChanges.graphql @@ -1,5 +1,5 @@ subscription subscribeReportScheduleChanges { - report_schedule { + report_schedule(order_by: {report_schedule_id: desc}) { report_schedule_id report_schedule_name report_schedule_every @@ -8,7 +8,6 @@ report_schedule_owner_user: uiuser { uiuser_id uiuser_username - uuid ldap_connection: ldap_connection { ldap_connection_id } @@ -20,8 +19,9 @@ report_filter report_parameters } - report_schedule_formats{ + report_schedule_formats { report_schedule_format_name } + report_schedule_counter } -} \ No newline at end of file +} diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/rule/_repo.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/rule/_repo.graphql deleted file mode 100644 index 0990e97df..000000000 --- a/roles/lib/files/FWO.Api.Client/APIcalls/rule/_repo.graphql +++ /dev/null @@ -1,120 +0,0 @@ - -query getSpecificRuleById($ruleId: Int!) { - rule(where: { rule_id: { _eq: $ruleId } }) { - ...ruleDetailsForReport - } -} - -query listRuleChangesOverview( - $startId: Int - $stopId: Int - $devId: Int - $changeType: bpchar -) { - changelog_rule( - where: { - _and: [ - { control_id: { _lt: $stopId } } - { control_id: { _gt: $startId } } - { security_relevant: { _eq: true } } - ] - dev_id: { _eq: $devId } - change_action: { _eq: $changeType } - } - ) { - change_request_info - change_time - changelog_rule_comment - new_rule_id - old_rule_id - unique_name - dev_id - change_action - new_rule: rule { - ...ruleOverview - } - old_rule: ruleByOldRuleId { - ...ruleOverview - } - } -} - -query listRuleChangesDetails( - $startId: Int - $stopId: Int - $devId: Int - $changeType: bpchar -) { - changelog_rule( - where: { - _and: [ - { control_id: { _lt: $stopId } } - { control_id: { _gt: $startId } } - { security_relevant: { _eq: true } } - ] - dev_id: { _eq: $devId } - change_action: { _eq: $changeType } - } - ) { - dev_id - change_action - import_run_details: import_control { - import_id: control_id - mgm_id - is_initial_import - import_time: stop_time - } - rule { - ...ruleDetailsForReport - } - ruleByOldRuleId { - ...ruleDetailsForReport - } - } -} - - - -############################## -## mutations -############################## - - -mutation updateRuleRuleComment($rule_id: Int!, $new_comment: String!) { - update_rule(where: {rule_id: {_eq: $rule_id}}, _set: {rule_comment: $new_comment}) { - affected_rows - returning { - rule_id - rule_comment_post: rule_comment - } - } -} - -query filterRulesByTenant($importId: bigint) { - view_tenant_rules(where: {access_rule: {_eq: true}, rule_last_seen: {_gte: $importId}, rule_create: {_lte: $importId}}) { - rule_id - rule_src - rule_dst - rule_create - rule_last_seen - tenant_id - } -} - -query filterRulesByTenantWithoutAnyRulesWithCount($importId: bigint) { - view_tenant_rules_aggregate - (where: {access_rule: {_eq: true}, rule_last_seen: {_gte: $importId}, rule_create: {_lte: $importId}, _and: [{rule_src: {_neq: "all"}}, {rule_dst: {_neq: "all"}}, {rule_src: {_neq: "Any"}}, {rule_dst: {_neq: "Any"}}]}) - { - aggregate { - count - } - } - view_tenant_rules(where: {access_rule: {_eq: true}, rule_last_seen: {_gte: $importId}, rule_create: {_lte: $importId}, _and: [{rule_src: {_neq: "all"}}, {rule_dst: {_neq: "all"}}, {rule_src: {_neq: "Any"}}, {rule_dst: {_neq: "Any"}}]}) { - rule_id - rule_src - rule_dst - rule_create - rule_last_seen - tenant_id - } -} diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleDetails.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleDetails.graphql index d090fe986..33b764bf5 100644 --- a/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleDetails.graphql +++ b/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleDetails.graphql @@ -1,6 +1,7 @@ fragment ruleDetails on rule { rule_id rule_uid + dev_id rule_action section_header: rule_head_text rule_comment diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleDetailsForReport.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleDetailsForReport.graphql index c4116d8a7..8ffa21369 100644 --- a/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleDetailsForReport.graphql +++ b/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleDetailsForReport.graphql @@ -1,6 +1,7 @@ fragment ruleDetails on rule { rule_id rule_uid + dev_id rule_action section_header: rule_head_text rule_comment diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleOverview.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleOverview.graphql index 89f810f42..5042df8cf 100644 --- a/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleOverview.graphql +++ b/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleOverview.graphql @@ -1,6 +1,7 @@ fragment ruleOverview on rule { rule_id rule_uid + dev_id rule_action section_header: rule_head_text rule_comment diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/user/_repo.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/user/_repo.graphql deleted file mode 100644 index 0cebcc73d..000000000 --- a/roles/lib/files/FWO.Api.Client/APIcalls/user/_repo.graphql +++ /dev/null @@ -1,35 +0,0 @@ -fragment userDetails on usr { - user_id - user_uid - user_name - user_comment - user_lastname - user_firstname - usr_typ_id - stm_usr_typ { - usr_typ_name - } - user_member_names - user_member_refs -} - -query listUsers( - $management_id: [Int!] - $time: String - $user_name: [String!] - $limit: Int - $offset: Int -) { - management(where: { mgm_id: { _in: $management_id } }) { - mgm_id - mgm_name - usrs( - limit: $limit - offset: $offset - where: { active: { _eq: true }, user_name: { _in: $user_name } } - order_by: { user_name: asc } - ) { - ...userDetails - } - } -} diff --git a/roles/lib/files/FWO.Api.Client/ApiSubscription.cs b/roles/lib/files/FWO.Api.Client/ApiSubscription.cs index d3b6eda6f..b8f964771 100644 --- a/roles/lib/files/FWO.Api.Client/ApiSubscription.cs +++ b/roles/lib/files/FWO.Api.Client/ApiSubscription.cs @@ -1,108 +1,29 @@ -using GraphQL; -using System; +using System; using System.Collections.Generic; using System.Linq; using System.Text; -using System.Text.Json; using System.Threading.Tasks; -using FWO.Api.Client; -using Newtonsoft.Json.Linq; -using FWO.Logging; -using GraphQL.Client.Abstractions; -using GraphQL.Client.Http; namespace FWO.Api.Client { - public class ApiSubscription : IDisposable + public abstract class ApiSubscription : IDisposable { - public delegate void SubscriptionUpdate(SubscriptionResponseType reponse); - public event SubscriptionUpdate OnUpdate; + private bool disposed = false; - private IObservable> subscriptionStream; - private IDisposable subscription; - private readonly GraphQLHttpClient graphQlClient; - private readonly GraphQLRequest request; - private readonly Action internalExceptionHandler; + protected abstract void Dispose(bool disposing); - public ApiSubscription(ApiConnection apiConnection, GraphQLHttpClient graphQlClient, GraphQLRequest request, Action exceptionHandler, SubscriptionUpdate OnUpdate) - { - this.OnUpdate = OnUpdate; - this.graphQlClient = graphQlClient; - this.request = request; - - // handle subscription terminating exceptions - internalExceptionHandler = (Exception exception) => - { - // Case: Jwt expired - if (exception.Message.Contains("JWTExpired")) - { - // Quit subscription by throwing exception. - // This does NOT lead to a real thrown exception within the application but is instead handled by the graphql library - throw exception; - } - exceptionHandler(exception); - }; - - CreateSubscription(); - - apiConnection.OnAuthHeaderChanged += ApiConnectionOnAuthHeaderChanged; - } - - private void CreateSubscription() - { - Log.WriteDebug("API", $"Creating API subscription {request.OperationName}."); - subscriptionStream = graphQlClient.CreateSubscriptionStream(request, internalExceptionHandler); - Log.WriteDebug("API", "API subscription created."); - - subscription = subscriptionStream.Subscribe(response => - { - if (ApiConstants.UseSystemTextJsonSerializer) - { - JsonElement.ObjectEnumerator responseObjectEnumerator = response.Data.EnumerateObject(); - responseObjectEnumerator.MoveNext(); - SubscriptionResponseType returnValue = JsonSerializer.Deserialize(responseObjectEnumerator.Current.Value.GetRawText()) ?? - throw new Exception($"Could not convert result from Json to {nameof(SubscriptionResponseType)}.\nJson: {responseObjectEnumerator.Current.Value.GetRawText()}"); ; - OnUpdate(returnValue); - } - else - { - try - { - // If repsonse.Data == null -> Jwt expired - connection was closed - // Leads to this method getting called again - if (response.Data == null) - { - // Terminate subscription - subscription.Dispose(); - } - else - { - JObject data = (JObject)response.Data; - JProperty prop = (JProperty)(data.First ?? throw new Exception($"Could not retrieve unique result attribute from Json.\nJson: {response.Data}")); - JToken result = prop.Value; - SubscriptionResponseType returnValue = result.ToObject() ?? throw new Exception($"Could not convert result from Json to {typeof(SubscriptionResponseType)}.\nJson: {response.Data}"); - OnUpdate(returnValue); - } - } - catch (Exception ex) - { - Log.WriteError("GraphQL Subscription", "Subscription lead to exception", ex); - throw; - } - } - }); - } - - private void ApiConnectionOnAuthHeaderChanged(object? sender, string jwt) + public void Dispose() { - subscription.Dispose(); - CreateSubscription(); + if (disposed) return; + Dispose(true); + disposed = true; + GC.SuppressFinalize(this); } - public void Dispose() + ~ ApiSubscription() { - subscription.Dispose(); - GC.SuppressFinalize(this); + if (disposed) return; + Dispose(false); } } } diff --git a/roles/lib/files/FWO.Api.Client/Data/ComplianceNetworkZone.cs b/roles/lib/files/FWO.Api.Client/Data/ComplianceNetworkZone.cs new file mode 100644 index 000000000..fe825434d --- /dev/null +++ b/roles/lib/files/FWO.Api.Client/Data/ComplianceNetworkZone.cs @@ -0,0 +1,181 @@ +using FWO.Api.Client; +using NetTools; +using Newtonsoft.Json; +using System.Net; +using System.Text.Json.Serialization; + +namespace FWO.Api.Data +{ + public class ComplianceNetworkZone + { + [JsonProperty("id"), JsonPropertyName("id")] + public int Id { get; set; } = -1; + + [JsonProperty("name"), JsonPropertyName("name")] + public string Name { get; set; } = ""; + + [JsonProperty("description"), JsonPropertyName("description")] + public string Description { get; set; } = ""; + + [JsonProperty("ip_ranges", ItemConverterType = typeof(IpAddressRangeJsonTypeConverter)), JsonPropertyName("ip_ranges")] + public IPAddressRange[] IPRanges { get; set; } = new IPAddressRange[0]; + + [JsonProperty("super_network_zone"), JsonPropertyName("super_network_zone")] + public ComplianceNetworkZone? Superzone { get; set; } = null; + + [JsonProperty("sub_network_zones"), JsonPropertyName("sub_network_zones")] + public ComplianceNetworkZone[] Subzones { get; set; } = new ComplianceNetworkZone[0]; + + [JsonProperty("network_zone_communication_sources", ItemConverterType = typeof(WrapperConverter), + ItemConverterParameters = new object[] { "from_network_zone" }), JsonPropertyName("network_zone_communication_sources")] + public ComplianceNetworkZone[] AllowedCommunicationSources { get; set; } = new ComplianceNetworkZone[0]; + + [JsonProperty("network_zone_communication_destinations", ItemConverterType = typeof(WrapperConverter), + ItemConverterParameters = new object[] { "to_network_zone" }), JsonPropertyName("network_zone_communication_destinations")] + public ComplianceNetworkZone[] AllowedCommunicationDestinations { get; set; } = new ComplianceNetworkZone[0]; + + + public bool CommunicationAllowedFrom(ComplianceNetworkZone from) + { + return AllowedCommunicationSources.Contains(from); + } + + public bool CommunicationAllowedTo(ComplianceNetworkZone to) + { + return AllowedCommunicationDestinations.Contains(to); + } + + public bool OverlapExists(List ipRanges, List> unseenIpRanges) + { + bool result = false; + + for (int i = 0; i < IPRanges.Length; i++) + { + for (int j = 0; j < ipRanges.Count; j++) + { + if (OverlapExists(IPRanges[i], ipRanges[j])) + { + result = true; + RemoveOverlap(unseenIpRanges[j], IPRanges[i]); + } + } + } + return result; + } + + /// + /// Checks if IP range a and b overlap. + /// + /// First IP range + /// Second IP range + /// True, if IP ranges overlap, false otherwise. + private bool OverlapExists(IPAddressRange a, IPAddressRange b) + { + return IpToUint(a.Begin) <= IpToUint(b.End) && IpToUint(b.Begin) <= IpToUint(a.End); + } + + private void RemoveOverlap(List ranges, IPAddressRange toRemove) + { + for (int i = 0; i < ranges.Count; i++) + { + if (OverlapExists(ranges[i], toRemove)) + { + if (IpToUint(toRemove.Begin) <= IpToUint(ranges[i].Begin) && IpToUint(toRemove.End) >= IpToUint(ranges[i].End)) + { + // Complete overlap, remove the entire range + ranges.RemoveAt(i); + i--; + } + else if (IpToUint(toRemove.Begin) <= IpToUint(ranges[i].Begin)) + { + // Overlap on the left side, update the start + ranges[i].Begin = UintToIp(IpToUint(toRemove.End) + 1); + } + else if (IpToUint(toRemove.End) >= IpToUint(ranges[i].End)) + { + // Overlap on the right side, update the end + ranges[i].End = UintToIp(IpToUint(toRemove.Begin) - 1); + } + else + { + // Overlap in the middle, split the range + // begin..remove.begin-1 + IPAddress end = ranges[i].End; + ranges[i].End = UintToIp(IpToUint(toRemove.Begin) - 1); + // remove.end+1..end + ranges.Insert(i, new IPAddressRange(UintToIp(IpToUint(toRemove.End) + 1), end)); + i++; + } + } + } + } + + private uint IpToUint(IPAddress ipAddress) + { + byte[] bytes = ipAddress.GetAddressBytes(); + + // flip big-endian(network order) to little-endian + if (BitConverter.IsLittleEndian) + { + Array.Reverse(bytes); + } + + return BitConverter.ToUInt32(bytes, 0); + } + + private IPAddress UintToIp(uint ipAddress) + { + byte[] bytes = BitConverter.GetBytes(ipAddress); + + // flip big-endian(network order) to little-endian + if (BitConverter.IsLittleEndian) + { + Array.Reverse(bytes); + } + + return new IPAddress(bytes); + } + + public object Clone() + { + IPAddressRange[] ipRangesClone = new IPAddressRange[IPRanges.Length]; + for (int i = 0; i < IPRanges.Length; i++) + { + ipRangesClone[i] = new IPAddressRange(IPRanges[i].Begin, IPRanges[i].End); + } + + return new ComplianceNetworkZone() + { + Id = Id, + Superzone = (ComplianceNetworkZone?)Superzone?.Clone(), + Name = Name, + Description = Description, + IPRanges = ipRangesClone, + Subzones = CloneArray(Subzones), + AllowedCommunicationSources = CloneArray(AllowedCommunicationSources), + AllowedCommunicationDestinations = CloneArray(AllowedCommunicationDestinations) + }; + } + + private static ComplianceNetworkZone[] CloneArray(ComplianceNetworkZone[] array) + { + ComplianceNetworkZone[] arrayClone = new ComplianceNetworkZone[array.Length]; + for (int i = 0; i < array.Length; i++) + { + arrayClone[i] = (ComplianceNetworkZone)array[i].Clone(); + } + return arrayClone; + } + + public override bool Equals(object? obj) + { + if (obj == null) return false; + return ((ComplianceNetworkZone)obj).Id == Id; + } + + public override int GetHashCode() + { + return HashCode.Combine(Id); + } + } +} diff --git a/roles/lib/files/FWO.Api.Client/Data/DeviceFilter.cs b/roles/lib/files/FWO.Api.Client/Data/DeviceFilter.cs index de33eee3a..3bfbd4b4c 100644 --- a/roles/lib/files/FWO.Api.Client/Data/DeviceFilter.cs +++ b/roles/lib/files/FWO.Api.Client/Data/DeviceFilter.cs @@ -41,6 +41,11 @@ public class DeviceFilter public DeviceFilter() {} + public DeviceFilter(DeviceFilter devFilter) + { + Managements = devFilter.Managements; + } + public DeviceFilter(List devIds) { ManagementSelect dummyManagement = new ManagementSelect(); diff --git a/roles/lib/files/FWO.Api.Client/Data/DeviceType.cs b/roles/lib/files/FWO.Api.Client/Data/DeviceType.cs index 3f29d0d6c..ad221bab7 100644 --- a/roles/lib/files/FWO.Api.Client/Data/DeviceType.cs +++ b/roles/lib/files/FWO.Api.Client/Data/DeviceType.cs @@ -5,8 +5,6 @@ using Newtonsoft.Json; namespace FWO.Api.Data { - [Newtonsoft.Json.JsonConverter(typeof(NoTypeConverterJsonConverter))] - [TypeConverter(typeof(JsonStringConverter))] public class DeviceType { [JsonProperty("id"), JsonPropertyName("id")] @@ -22,15 +20,12 @@ public class DeviceType public string Manufacturer { get; set; } = ""; [JsonProperty("isPureRoutingDevice"), JsonPropertyName("isPureRoutingDevice")] - public Boolean IsPureRoutingDevice { get; set; } + public bool IsPureRoutingDevice { get; set; } [JsonProperty("isManagement"), JsonPropertyName("isManagement")] - public Boolean IsManagement { get; set; } + public bool IsManagement { get; set; } - // [JsonProperty("predefinedObjects"), JsonPropertyName("predefinedObjects")] - // public ??? PredefinedObjects { get; set; } - - public static List LegacyDevTypeList = new List + private static List LegacyDevTypeList = new List { 2, // Netscreen 5.x-6.x 4, // FortiGateStandalone 5ff @@ -40,13 +35,13 @@ public class DeviceType 8 // JUNOS 10-21 }; - public static Dictionary SupermanagerMap = new Dictionary + private static Dictionary SupermanagerMap = new Dictionary { // Mgmt -> Supermgmt { 11, 12 }, // FortiADOM 5ff -> FortiManager 5ff { 9, 13 } // Check Point R8x -> Check Point MDS R8x }; - public static Dictionary SupermanagerGatewayMap = new Dictionary + private static Dictionary SupermanagerGatewayMap = new Dictionary { // Supermgmt -> Gateway { 12, 10}, // FortiManager 5ff-> FortiGate 5ff @@ -55,16 +50,17 @@ public class DeviceType { 14, 16} // Cisco Firepower }; - public static List CheckPointManagers = new List + private static List CheckPointManagers = new List { 13, 9 // Check Point MDS R8x and Check Point R8x }; - public static List FortiManagers = new List + private static List FortiManagers = new List { 12 // FortiManager 5ff }; + public DeviceType() {} diff --git a/roles/lib/files/FWO.Api.Client/Data/NetworkZone.cs b/roles/lib/files/FWO.Api.Client/Data/NetworkZone.cs index d1dbdf2b9..aeb0c16b1 100644 --- a/roles/lib/files/FWO.Api.Client/Data/NetworkZone.cs +++ b/roles/lib/files/FWO.Api.Client/Data/NetworkZone.cs @@ -1,4 +1,6 @@ -using System.Text.Json.Serialization; +using System.Net; +using System.Text.Json.Serialization; +using NetTools; using Newtonsoft.Json; namespace FWO.Api.Data @@ -10,5 +12,6 @@ public class NetworkZone [JsonProperty("zone_name"), JsonPropertyName("zone_name")] public string Name { get; set; } = ""; + } } diff --git a/roles/lib/files/FWO.Api.Client/Data/Recertification.cs b/roles/lib/files/FWO.Api.Client/Data/Recertification.cs index 133cc5531..d10fa1576 100644 --- a/roles/lib/files/FWO.Api.Client/Data/Recertification.cs +++ b/roles/lib/files/FWO.Api.Client/Data/Recertification.cs @@ -7,6 +7,8 @@ public class Recertification : RecertificationBase { [JsonProperty("owner"), JsonPropertyName("owner")] public FwoOwner? FwoOwner { get; set; } = new FwoOwner(); - } + [JsonProperty("user_dn"), JsonPropertyName("user_dn")] + public string UserDn { get; set; } = ""; + } } diff --git a/roles/lib/files/FWO.Api.Client/Data/ReportTemplate.cs b/roles/lib/files/FWO.Api.Client/Data/ReportTemplate.cs index 70bd4446b..a3d2476e1 100644 --- a/roles/lib/files/FWO.Api.Client/Data/ReportTemplate.cs +++ b/roles/lib/files/FWO.Api.Client/Data/ReportTemplate.cs @@ -32,13 +32,14 @@ public class ReportTemplate public ReportTemplate() {} - public ReportTemplate(string filter, DeviceFilter deviceFilter, int? reportType, TimeFilter timeFilter, RecertFilter recertFilter) + public ReportTemplate(string filter, DeviceFilter deviceFilter, int? reportType, TimeFilter timeFilter, RecertFilter recertFilter, UnusedFilter? unusedFilter) { Filter = filter; ReportParams.DeviceFilter = deviceFilter; ReportParams.ReportType = reportType; ReportParams.TimeFilter = timeFilter; ReportParams.RecertFilter = recertFilter; + ReportParams.UnusedFilter = unusedFilter ?? new UnusedFilter(); Detailed = false; } @@ -64,5 +65,9 @@ public class ReportParams [JsonProperty("recert_filter"), JsonPropertyName("recert_filter")] public RecertFilter RecertFilter { get; set; } = new RecertFilter(); + + [JsonProperty("unused_filter"), JsonPropertyName("unused_filter")] + public UnusedFilter UnusedFilter { get; set; } = new UnusedFilter(); + } } diff --git a/roles/lib/files/FWO.Api.Client/Data/Rule.cs b/roles/lib/files/FWO.Api.Client/Data/Rule.cs index 773e67690..9d69e7383 100644 --- a/roles/lib/files/FWO.Api.Client/Data/Rule.cs +++ b/roles/lib/files/FWO.Api.Client/Data/Rule.cs @@ -83,10 +83,12 @@ public class Rule [JsonProperty("matches"), JsonPropertyName("matches")] public string IpMatch {get; set;} = ""; + [JsonProperty("dev_id"), JsonPropertyName("dev_id")] + public int DeviceId { get; set; } + public int DisplayOrderNumber { get; set; } public bool Certified { get; set; } - public int DeviceId { get; set; } public string DeviceName { get; set; } = ""; } diff --git a/roles/lib/files/FWO.Api.Client/Data/RuleMetadata.cs b/roles/lib/files/FWO.Api.Client/Data/RuleMetadata.cs index c906a0e37..a3b7a32f7 100644 --- a/roles/lib/files/FWO.Api.Client/Data/RuleMetadata.cs +++ b/roles/lib/files/FWO.Api.Client/Data/RuleMetadata.cs @@ -38,6 +38,9 @@ public class RuleMetadata [JsonProperty("recertification"), JsonPropertyName("recertification")] public List RuleRecertification { get; set; } = new List(); + [JsonProperty("recert_history"), JsonPropertyName("recert_history")] + public List RecertHistory { get; set; } = new List(); + public DateTime NextRecert { get; set; } public string LastCertifierName { get; set; } = ""; diff --git a/roles/lib/files/FWO.Api.Client/Data/UiUser.cs b/roles/lib/files/FWO.Api.Client/Data/UiUser.cs index e3889fb57..872b4d33f 100644 --- a/roles/lib/files/FWO.Api.Client/Data/UiUser.cs +++ b/roles/lib/files/FWO.Api.Client/Data/UiUser.cs @@ -38,8 +38,6 @@ public class UiUser [JsonProperty("ldap_connection"), JsonPropertyName("ldap_connection")] public UiLdapConnection LdapConnection { get; set;} = new UiLdapConnection(); - public string DefaultRole { get; set; } = ""; - public List Roles { get; set; } = new List(); public string Jwt { get; set; } = ""; diff --git a/roles/lib/files/FWO.Api.Client/Data/UnusedFilter.cs b/roles/lib/files/FWO.Api.Client/Data/UnusedFilter.cs new file mode 100644 index 000000000..83d65ed9b --- /dev/null +++ b/roles/lib/files/FWO.Api.Client/Data/UnusedFilter.cs @@ -0,0 +1,8 @@ +namespace FWO.Api.Data +{ + public class UnusedFilter + { + public int UnusedForDays = int.MaxValue; + public int CreationTolerance = 0; + } +} diff --git a/roles/lib/files/FWO.Api.Client/GraphQlApiConnection.cs b/roles/lib/files/FWO.Api.Client/GraphQlApiConnection.cs index 1a4dd93e5..c8685a538 100644 --- a/roles/lib/files/FWO.Api.Client/GraphQlApiConnection.cs +++ b/roles/lib/files/FWO.Api.Client/GraphQlApiConnection.cs @@ -23,6 +23,7 @@ public class GraphQlApiConnection : ApiConnection private GraphQLHttpClient graphQlClient; private string? jwt; + private string prevRole = ""; private void Initialize(string ApiServerUri) { @@ -72,6 +73,39 @@ public override void SetRole(string role) graphQlClient.HttpClient.DefaultRequestHeaders.Add("x-hasura-role", role); } + public override void SetProperRole(System.Security.Claims.ClaimsPrincipal user, List targetRoleList) + { + try + { + prevRole = graphQlClient.HttpClient.DefaultRequestHeaders.GetValues("x-hasura-role")?.First() ?? ""; + } + catch(Exception){} + + // first look if user is already in one of the target roles + foreach(string role in targetRoleList) + { + if (user.IsInRole(role)) + { + SetRole(role); + return; + } + } + // now look if user has a target role as allowed role + foreach(string role in targetRoleList) + { + if(user.Claims.FirstOrDefault(claim => claim.Type == "x-hasura-allowed-roles" && claim.Value == role) != null) + { + SetRole(role); + return; + } + } + } + + public override void SwitchBack() + { + SetRole(prevRole); + } + /// /// Sends an APICall (query, mutation) /// NB: SendQueryAsync always returns an array of objects (even if the result is a single element) @@ -141,12 +175,12 @@ public override async Task SendQueryAsync( } } - public override ApiSubscription GetSubscription(Action exceptionHandler, ApiSubscription.SubscriptionUpdate subscriptionUpdateHandler, string subscription, object? variables = null, string? operationName = null) + public override GraphQlApiSubscription GetSubscription(Action exceptionHandler, GraphQlApiSubscription.SubscriptionUpdate subscriptionUpdateHandler, string subscription, object? variables = null, string? operationName = null) { try { GraphQLRequest request = new GraphQLRequest(subscription, variables, operationName); - return new ApiSubscription(this, graphQlClient, request, exceptionHandler, subscriptionUpdateHandler); + return new GraphQlApiSubscription(this, graphQlClient, request, exceptionHandler, subscriptionUpdateHandler); } catch (Exception exception) { @@ -154,5 +188,13 @@ public override ApiSubscription GetSubscription : ApiSubscription, IDisposable + { + public delegate void SubscriptionUpdate(SubscriptionResponseType reponse); + public event SubscriptionUpdate OnUpdate; + + private IObservable> subscriptionStream; + private IDisposable subscription; + private readonly GraphQLHttpClient graphQlClient; + private readonly GraphQLRequest request; + private readonly Action internalExceptionHandler; + + public GraphQlApiSubscription(ApiConnection apiConnection, GraphQLHttpClient graphQlClient, GraphQLRequest request, Action exceptionHandler, SubscriptionUpdate OnUpdate) + { + this.OnUpdate = OnUpdate; + this.graphQlClient = graphQlClient; + this.request = request; + + // handle subscription terminating exceptions + internalExceptionHandler = (Exception exception) => + { + // Case: Jwt expired + if (exception.Message.Contains("JWTExpired")) + { + // Quit subscription by throwing exception. + // This does NOT lead to a real thrown exception within the application but is instead handled by the graphql library + throw exception; + } + exceptionHandler(exception); + }; + + CreateSubscription(); + + apiConnection.OnAuthHeaderChanged += ApiConnectionOnAuthHeaderChanged; + } + + private void CreateSubscription() + { + Log.WriteDebug("API", $"Creating API subscription {request.OperationName}."); + subscriptionStream = graphQlClient.CreateSubscriptionStream(request, internalExceptionHandler); + Log.WriteDebug("API", "API subscription created."); + + subscription = subscriptionStream.Subscribe(response => + { + if (ApiConstants.UseSystemTextJsonSerializer) + { + JsonElement.ObjectEnumerator responseObjectEnumerator = response.Data.EnumerateObject(); + responseObjectEnumerator.MoveNext(); + SubscriptionResponseType returnValue = JsonSerializer.Deserialize(responseObjectEnumerator.Current.Value.GetRawText()) ?? + throw new Exception($"Could not convert result from Json to {nameof(SubscriptionResponseType)}.\nJson: {responseObjectEnumerator.Current.Value.GetRawText()}"); ; + OnUpdate(returnValue); + } + else + { + try + { + // If repsonse.Data == null -> Jwt expired - connection was closed + // Leads to this method getting called again + if (response.Data == null) + { + // Terminate subscription + subscription.Dispose(); + } + else + { + JObject data = (JObject)response.Data; + JProperty prop = (JProperty)(data.First ?? throw new Exception($"Could not retrieve unique result attribute from Json.\nJson: {response.Data}")); + JToken result = prop.Value; + SubscriptionResponseType returnValue = result.ToObject() ?? throw new Exception($"Could not convert result from Json to {typeof(SubscriptionResponseType)}.\nJson: {response.Data}"); + OnUpdate(returnValue); + } + } + catch (Exception ex) + { + Log.WriteError("GraphQL Subscription", "Subscription lead to exception", ex); + throw; + } + } + }); + } + + private void ApiConnectionOnAuthHeaderChanged(object? sender, string jwt) + { + subscription.Dispose(); + CreateSubscription(); + } + + protected override void Dispose(bool disposing) + { + if (disposing) + { + subscription.Dispose(); + } + } + } +} diff --git a/roles/lib/files/FWO.Api.Client/JsonCustomConverters.cs b/roles/lib/files/FWO.Api.Client/JsonCustomConverters.cs new file mode 100644 index 000000000..06dd7624f --- /dev/null +++ b/roles/lib/files/FWO.Api.Client/JsonCustomConverters.cs @@ -0,0 +1,82 @@ +using NetTools; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Net; +using System.Text; +using System.Text.Json.Nodes; +using System.Threading.Tasks; + +namespace FWO.Api.Client +{ + public class WrapperConverter : JsonConverter + { + private readonly string wrappedObjectName = ""; + + public WrapperConverter(string wrappedObjectName) + { + this.wrappedObjectName = wrappedObjectName; + } + + public override bool CanConvert(Type objectType) => typeof(ValueType).IsAssignableFrom(objectType); + + public override object? ReadJson(JsonReader reader, Type objectType, object? existingValue, JsonSerializer serializer) + { + // Load the JSON as a JObject + JObject jsonObject = JObject.Load(reader); + + // Check if the "wrappedObjectName" property exists + if (jsonObject.TryGetValue(wrappedObjectName, out JToken? wrappedObjectToken)) + { + // Deserialize the wrapped object + return wrappedObjectToken.ToObject(serializer); + } + + // Deserialize the wrapper object otherwise + return jsonObject.ToObject(serializer); + } + + public override void WriteJson(JsonWriter writer, object? value, JsonSerializer serializer) + { + // Wrap the object with a property named "wrappedObjectName" + JObject jsonObject = new JObject + { + { wrappedObjectName, value == null ? null : JToken.FromObject(value, serializer) } + }; + + // Write the JSON + jsonObject.WriteTo(writer); + } + } + + public class IpAddressRangeJsonTypeConverter : JsonConverter + { + public override IPAddressRange ReadJson(JsonReader reader, Type objectType, IPAddressRange? existingValue, bool hasExistingValue, JsonSerializer serializer) + { + // Load the JSON as a JObject + JObject jsonObject = JObject.Load(reader); + // Deserialize the IP address range based on the properties ip_range_start and ip_range_end + IPAddress start = IPAddress.Parse((jsonObject.GetValue("ip_range_start")?.ToObject() ?? throw new ArgumentNullException("ip_range_start")).Replace("/32", "")); + IPAddress end = IPAddress.Parse((jsonObject.GetValue("ip_range_end")?.ToObject() ?? throw new ArgumentNullException("ip_range_start")).Replace("/32", "")); + return new IPAddressRange(start, end); + } + + public override void WriteJson(JsonWriter writer, IPAddressRange? value, JsonSerializer serializer) + { + if (value != null) + { + // Create a JSON JObject + JObject result = new JObject + { + { "ip_range_start", value.Begin.ToString() }, + { "ip_range_end", value.Begin.ToString() } + }; + + result.WriteTo(writer); + } + } + } +} diff --git a/roles/lib/files/FWO.Api.Client/JsonStringConverter.cs b/roles/lib/files/FWO.Api.Client/JsonStringConverter.cs deleted file mode 100644 index 06d64253c..000000000 --- a/roles/lib/files/FWO.Api.Client/JsonStringConverter.cs +++ /dev/null @@ -1,79 +0,0 @@ -using FWO.Api.Data; -using Newtonsoft.Json; -using Newtonsoft.Json.Serialization; -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Globalization; -using System.Linq; -using System.Text; -using System.Threading.Tasks; - -namespace FWO.Api.Client -{ - public class JsonStringConverter : TypeConverter - { - public override bool CanConvertFrom(ITypeDescriptorContext? context, Type sourceType) - { - return sourceType == typeof(string) || base.CanConvertFrom(context, sourceType); - } - - public override object? ConvertFrom(ITypeDescriptorContext? context, CultureInfo? culture, object value) - { - if (value is string stringValue) - { - return JsonConvert.DeserializeObject(stringValue); - } - else - { - return base.ConvertFrom(context, culture, value); - } - } - - public override object? ConvertTo(ITypeDescriptorContext? context, CultureInfo? culture, object? value, Type destinationType) - { - if (destinationType == null || destinationType == typeof(string)) - { - return JsonConvert.SerializeObject(value); - } - else - { - return base.ConvertTo(context, culture, value, destinationType); - } - } - } - - public class NoTypeConverterJsonConverter : JsonConverter - { - static readonly IContractResolver resolver = new NoTypeConverterContractResolver(); - - class NoTypeConverterContractResolver : DefaultContractResolver - { - protected override JsonContract CreateContract(Type objectType) - { - if (typeof(T).IsAssignableFrom(objectType)) - { - var contract = this.CreateObjectContract(objectType); - contract.Converter = null; // Also null out the converter to prevent infinite recursion. - return contract; - } - return base.CreateContract(objectType); - } - } - - public override bool CanConvert(Type objectType) - { - return typeof(T).IsAssignableFrom(objectType); - } - - public override object? ReadJson(JsonReader reader, Type objectType, object? existingValue, JsonSerializer serializer) - { - return JsonSerializer.CreateDefault(new JsonSerializerSettings { ContractResolver = resolver }).Deserialize(reader, objectType); - } - - public override void WriteJson(JsonWriter writer, object? value, JsonSerializer serializer) - { - JsonSerializer.CreateDefault(new JsonSerializerSettings { ContractResolver = resolver }).Serialize(writer, value); - } - } -} diff --git a/roles/lib/files/FWO.Api.Client/Queries/ComplianceQueries.cs b/roles/lib/files/FWO.Api.Client/Queries/ComplianceQueries.cs new file mode 100644 index 000000000..a31b87581 --- /dev/null +++ b/roles/lib/files/FWO.Api.Client/Queries/ComplianceQueries.cs @@ -0,0 +1,30 @@ +using FWO.Logging; + +namespace FWO.Api.Client.Queries +{ + public class ComplianceQueries : Queries + { + public static readonly string addNetworkZone; + public static readonly string deleteNetworkZone; + public static readonly string getNetworkZones; + public static readonly string updateNetworkZones; + public static readonly string modifyNetworkZoneCommunication; + + static ComplianceQueries() + { + try + { + addNetworkZone = File.ReadAllText(QueryPath + "compliance/addNetworkZone.graphql"); + deleteNetworkZone = File.ReadAllText(QueryPath + "compliance/deleteNetworkZone.graphql"); + getNetworkZones = File.ReadAllText(QueryPath + "compliance/getNetworkZones.graphql"); + updateNetworkZones = File.ReadAllText(QueryPath + "compliance/updateNetworkZone.graphql"); + modifyNetworkZoneCommunication = File.ReadAllText(QueryPath + "compliance/updateNetworkZoneCommunication.graphql"); + } + catch (Exception exception) + { + Log.WriteError("Initialize Compliance Queries", "Api compliance queries could not be loaded.", exception); + Environment.Exit(-1); + } + } + } +} diff --git a/roles/lib/files/FWO.Api.Client/Queries/DeviceQueries.cs b/roles/lib/files/FWO.Api.Client/Queries/DeviceQueries.cs index f84d43478..5eafa3d52 100644 --- a/roles/lib/files/FWO.Api.Client/Queries/DeviceQueries.cs +++ b/roles/lib/files/FWO.Api.Client/Queries/DeviceQueries.cs @@ -22,7 +22,6 @@ public class DeviceQueries : Queries public static readonly string updateDevice; public static readonly string changeDeviceState; public static readonly string deleteDevice; - public static readonly string getImportStatus; public static readonly string deleteImport; public static readonly string getCredentials; public static readonly string getCredentialsWithoutSecrets; @@ -58,9 +57,8 @@ static DeviceQueries() updateDevice = File.ReadAllText(QueryPath + "device/updateDevice.graphql"); changeDeviceState = File.ReadAllText(QueryPath + "device/changeDeviceState.graphql"); deleteDevice = File.ReadAllText(QueryPath + "device/deleteDevice.graphql"); - getImportStatus = File.ReadAllText(QueryPath + "device/getImportStatus.graphql"); deleteImport = File.ReadAllText(QueryPath + "device/deleteImport.graphql"); - + getCredentials = File.ReadAllText(QueryPath + "device/getCredentials.graphql") + " " + File.ReadAllText(QueryPath + "device/fragments/importCredentials.graphql"); getCredentialsWithoutSecrets = File.ReadAllText(QueryPath + "device/getCredentialsWithoutSecrets.graphql") + " " diff --git a/roles/lib/files/FWO.Api.Client/Queries/MonitorQueries.cs b/roles/lib/files/FWO.Api.Client/Queries/MonitorQueries.cs index fc8631815..debe8d00e 100644 --- a/roles/lib/files/FWO.Api.Client/Queries/MonitorQueries.cs +++ b/roles/lib/files/FWO.Api.Client/Queries/MonitorQueries.cs @@ -8,6 +8,7 @@ public class MonitorQueries : Queries public static readonly string getLogEntrys; public static readonly string addUiLogEntry; public static readonly string getUiLogEntrys; + public static readonly string getAllUiLogEntrys; public static readonly string getImportLogEntrys; public static readonly string addAlert; public static readonly string getOpenAlerts; @@ -18,6 +19,7 @@ public class MonitorQueries : Queries public static readonly string addAutodiscoveryLogEntry; public static readonly string getAutodiscoveryLogEntrys; public static readonly string getDailyCheckLogEntrys; + public static readonly string getImportStatus; static MonitorQueries() @@ -29,6 +31,7 @@ static MonitorQueries() addUiLogEntry = File.ReadAllText(QueryPath + "monitor/addUiLogEntry.graphql"); getUiLogEntrys = File.ReadAllText(QueryPath + "monitor/getUiLogEntrys.graphql"); + getAllUiLogEntrys = File.ReadAllText(QueryPath + "monitor/getAllUiLogEntrys.graphql"); getImportLogEntrys = File.ReadAllText(QueryPath + "monitor/getImportLogEntrys.graphql"); @@ -39,6 +42,8 @@ static MonitorQueries() acknowledgeAlert = File.ReadAllText(QueryPath + "monitor/acknowledgeAlert.graphql"); subscribeAlertChanges = File.ReadAllText(QueryPath + "monitor/subscribeAlertChanges.graphql"); + getImportStatus = File.ReadAllText(QueryPath + "monitor/getImportStatus.graphql"); + addAutodiscoveryLogEntry = File.ReadAllText(QueryPath + "monitor/addAutodiscoveryLogEntry.graphql"); getAutodiscoveryLogEntrys = File.ReadAllText(QueryPath + "monitor/getAutodiscoveryLogEntrys.graphql"); getDailyCheckLogEntrys = File.ReadAllText(QueryPath + "monitor/getDailyCheckLogEntrys.graphql"); diff --git a/roles/lib/files/FWO.Api.Client/Queries/ReportQueries.cs b/roles/lib/files/FWO.Api.Client/Queries/ReportQueries.cs index fa649d4fe..c222c3986 100644 --- a/roles/lib/files/FWO.Api.Client/Queries/ReportQueries.cs +++ b/roles/lib/files/FWO.Api.Client/Queries/ReportQueries.cs @@ -28,11 +28,14 @@ public class ReportQueries : Queries public static readonly string getRelevantImportIdsAtTime; public static readonly string statisticsReportCurrent; + public static readonly string subscribeGeneratedReportsChanges; public static readonly string getGeneratedReport; public static readonly string getGeneratedReports; public static readonly string deleteGeneratedReport; public static readonly string addGeneratedReport; + public static readonly string getUsageDataCount; + static ReportQueries() { try @@ -53,10 +56,12 @@ static ReportQueries() editReportTemplate = File.ReadAllText(QueryPath + "report/editReportTemplate.graphql"); deleteReportTemplate = File.ReadAllText(QueryPath + "report/deleteReportTemplate.graphql"); subscribeReportScheduleChanges = File.ReadAllText(QueryPath + "report/subscribeReportScheduleChanges.graphql"); + subscribeGeneratedReportsChanges = File.ReadAllText(QueryPath + "report/subscribeGeneratedReportsChanges.graphql"); getGeneratedReports = File.ReadAllText(QueryPath + "report/getGeneratedReports.graphql"); getGeneratedReport = File.ReadAllText(QueryPath + "report/getGeneratedReport.graphql"); deleteGeneratedReport = File.ReadAllText(QueryPath + "report/deleteGeneratedReport.graphql"); addGeneratedReport = File.ReadAllText(QueryPath + "report/addGeneratedReport.graphql"); + getUsageDataCount = File.ReadAllText(QueryPath + "report/getUsageDataCount.graphql"); } catch (Exception exception) { diff --git a/roles/lib/files/FWO.Config.Api/Data/ConfigData.cs b/roles/lib/files/FWO.Config.Api/Data/ConfigData.cs index 923d6cbc5..c818d0415 100644 --- a/roles/lib/files/FWO.Config.Api/Data/ConfigData.cs +++ b/roles/lib/files/FWO.Config.Api/Data/ConfigData.cs @@ -34,6 +34,12 @@ public class ConfigData : ICloneable [JsonProperty("autoFillRightSidebar"), JsonPropertyName("autoFillRightSidebar")] public bool AutoFillRightSidebar { get; set; } = false; + [JsonProperty("unusedTolerance"), JsonPropertyName("unusedTolerance")] + public int UnusedTolerance { get; set; } = 400; + + [JsonProperty("creationTolerance"), JsonPropertyName("creationTolerance")] + public int CreationTolerance { get; set; } = 90; + [JsonProperty("dataRetentionTime"), JsonPropertyName("dataRetentionTime")] public int DataRetentionTime { get; set; } = 731; @@ -55,6 +61,19 @@ public class ConfigData : ICloneable [JsonProperty("fwApiElementsPerFetch"), JsonPropertyName("fwApiElementsPerFetch")] public int FwApiElementsPerFetch { get; set; } = 150; + [JsonProperty("impChangeNotifyRecipients"), JsonPropertyName("impChangeNotifyRecipients")] + public string ImpChangeNotifyRecipients { get; set; } = ""; + + [JsonProperty("impChangeNotifySubject"), JsonPropertyName("impChangeNotifySubject")] + public string ImpChangeNotifySubject { get; set; } = ""; + + [JsonProperty("impChangeNotifyBody"), JsonPropertyName("impChangeNotifyBody")] + public string ImpChangeNotifyBody { get; set; } = ""; + + [JsonProperty("impChangeNotifyActive"), JsonPropertyName("impChangeNotifyActive")] + public bool ImpChangeNotifyActive { get; set; } = false; + + [JsonProperty("recertificationPeriod"), JsonPropertyName("recertificationPeriod")] public int RecertificationPeriod { get; set; } = 365; @@ -131,7 +150,7 @@ public class ConfigData : ICloneable public string EmailServerAddress { get; set; } = ""; [JsonProperty("emailPort"), JsonPropertyName("emailPort")] - public int EmailPort { get; set; } = 25; + public int EmailPort { get; set; } [JsonProperty("emailTls"), JsonPropertyName("emailTls")] public EmailEncryptionMethod EmailTls { get; set; } = EmailEncryptionMethod.None; diff --git a/roles/lib/files/FWO.Config.Api/UserConfig.cs b/roles/lib/files/FWO.Config.Api/UserConfig.cs index fc9807fde..16fd11830 100644 --- a/roles/lib/files/FWO.Config.Api/UserConfig.cs +++ b/roles/lib/files/FWO.Config.Api/UserConfig.cs @@ -136,7 +136,7 @@ public override string GetText(string key) } } - public string Convert(string rawText) + private string Convert(string rawText) { string plainText = System.Web.HttpUtility.HtmlDecode(rawText); @@ -153,7 +153,7 @@ public string Convert(string rawText) while (cont) { begin = plainText.IndexOf(startLink, index); - if (begin > 0) + if (begin >= 0) { end = plainText.IndexOf("\"", begin + startLink.Length); if (end > 0) diff --git a/roles/lib/files/FWO.Logging/Log.cs b/roles/lib/files/FWO.Logging/Log.cs index 7001a7284..16fc7da66 100644 --- a/roles/lib/files/FWO.Logging/Log.cs +++ b/roles/lib/files/FWO.Logging/Log.cs @@ -1,6 +1,4 @@ -using System; -using System.Diagnostics; -using System.Linq; +using System.Diagnostics; using System.Reflection; using System.Runtime.CompilerServices; @@ -17,61 +15,62 @@ static Log() Task.Factory.StartNew(async () => { // log switch - log file locking - bool logOwned = false; + bool logOwnedByExternal = false; Stopwatch stopwatch = new Stopwatch(); while (true) { try { + // Open file using FileStream file = await GetFile(lockFilePath); - // read file content + // Read file content using StreamReader reader = new StreamReader(file); string lockFileContent = (await reader.ReadToEndAsync()).Trim(); - // REQUESTED - lock was requested by log swap process + // Forcefully release lock after timeout + if (logOwnedByExternal && stopwatch.ElapsedMilliseconds > 10_000) + { + using StreamWriter writer = new StreamWriter(file); + await writer.WriteLineAsync("FORCEFULLY RELEASED"); + stopwatch.Reset(); + semaphore.Release(); + logOwnedByExternal = false; + } // GRANTED - lock was granted by us - // RELEASED - lock was released by log swap process - if (lockFileContent.EndsWith("GRANTED")) + else if (lockFileContent.EndsWith("GRANTED")) { // Request lock if it is not already requested by us // (in case of restart with log already granted) - if (!logOwned) + if (!logOwnedByExternal) { semaphore.Wait(); stopwatch.Restart(); - logOwned = true; - } - // Forcefully release lock after timeout - else if (stopwatch.ElapsedMilliseconds > 10 * 1000) - { - using StreamWriter writer = new StreamWriter(file); - await writer.WriteLineAsync("FORCEFULLY RELEASED"); - stopwatch.Reset(); - semaphore.Release(); - logOwned = false; + logOwnedByExternal = true; } } - if (lockFileContent.EndsWith("REQUESTED")) + // REQUESTED - lock was requested by log swap process + else if (lockFileContent.EndsWith("REQUESTED")) { // only request lock if it is not already requested by us - if (!logOwned) + if (!logOwnedByExternal) { semaphore.Wait(); stopwatch.Restart(); - logOwned = true; + logOwnedByExternal = true; } using StreamWriter writer = new StreamWriter(file); await writer.WriteLineAsync("GRANTED"); } - if (lockFileContent.EndsWith("RELEASED")) + // RELEASED - lock was released by log swap process + else if (lockFileContent.EndsWith("RELEASED")) { // only release lock if it was formerly requested by us - if (logOwned) + if (logOwnedByExternal) { stopwatch.Reset(); semaphore.Release(); - logOwned = false; + logOwnedByExternal = false; } } } @@ -116,12 +115,12 @@ public static void WriteWarning(string Title, string Text, [CallerMemberName] st WriteLog("Warning", Title, Text, callerName, callerFile, callerLineNumber, ConsoleColor.DarkYellow); } - public static void WriteError(string Title, string? Text = null, Exception? Error = null, [CallerMemberName] string callerName = "", [CallerFilePath] string callerFile = "", [CallerLineNumber] int callerLineNumber = 0) + public static void WriteError(string Title, string? Text = null, Exception? Error = null, string? User = null, string? Role = null, [CallerMemberName] string callerName = "", [CallerFilePath] string callerFile = "", [CallerLineNumber] int callerLineNumber = 0) { string DisplayText = - (Text != null ? - $"{Text}" - : "") + + (User != null ? $"User: {User}, " : "") + + (Role != null ? $"Role: {Role}, " : "") + + (Text != null ? $"{Text}" : "") + (Error != null ? "\n ---\n" + $"Exception thrown: \n {Error?.GetType().Name} \n" + diff --git a/roles/lib/files/FWO.Mail/MailerMailKit.cs b/roles/lib/files/FWO.Mail/MailerMailKit.cs index abcbcecfc..ac5f5852d 100644 --- a/roles/lib/files/FWO.Mail/MailerMailKit.cs +++ b/roles/lib/files/FWO.Mail/MailerMailKit.cs @@ -28,7 +28,17 @@ public class MailData public string? Body { get; } - public MailData(List to, string subject, string? body = null, string? from = null, string? displayName = null, string? replyTo = null, string? replyToName = null, List? bcc = null, List? cc = null) + public MailData( + List to, + string subject, + string? body = null, + string? from = null, + string? displayName = null, + string? replyTo = null, + string? replyToName = null, + List? bcc = null, + List? cc = null + ) { // Receiver To = to; @@ -40,7 +50,7 @@ public MailData(List to, string subject, string? body = null, string? fr DisplayName = displayName; ReplyTo = replyTo; ReplyToName = replyToName; - + // Content Subject = subject; Body = body; @@ -61,7 +71,12 @@ public MailKitMailer(EmailConnection emailConn) EmailConn = emailConn; } - public async Task SendAsync(MailData mailData, EmailConnection emailConn, CancellationToken ct = default, bool mailFormatHtml = false) + public async Task SendAsync( + MailData mailData, + EmailConnection emailConn, + CancellationToken ct = default, + bool mailFormatHtml = false + ) { try { @@ -83,7 +98,7 @@ public async Task SendAsync(MailData mailData, EmailConnection emailConn, mail.To.Add(MailboxAddress.Parse(mailAddress)); // Set Reply to if specified in mail data - if(!string.IsNullOrEmpty(mailData.ReplyTo)) + if (!string.IsNullOrEmpty(mailData.ReplyTo)) mail.ReplyTo.Add(new MailboxAddress(mailData.ReplyToName, mailData.ReplyTo)); // BCC @@ -91,7 +106,9 @@ public async Task SendAsync(MailData mailData, EmailConnection emailConn, if (mailData.Bcc != null) { // Get only addresses where value is not null or with whitespace. x = value of address - foreach (string mailAddress in mailData.Bcc.Where(x => !string.IsNullOrWhiteSpace(x))) + foreach ( + string mailAddress in mailData.Bcc.Where(x => !string.IsNullOrWhiteSpace(x)) + ) mail.Bcc.Add(MailboxAddress.Parse(mailAddress.Trim())); } @@ -99,7 +116,9 @@ public async Task SendAsync(MailData mailData, EmailConnection emailConn, // Check if a CC address was supplied in the request if (mailData.Cc != null) { - foreach (string mailAddress in mailData.Cc.Where(x => !string.IsNullOrWhiteSpace(x))) + foreach ( + string mailAddress in mailData.Cc.Where(x => !string.IsNullOrWhiteSpace(x)) + ) mail.Cc.Add(MailboxAddress.Parse(mailAddress.Trim())); } #endregion @@ -125,13 +144,30 @@ public async Task SendAsync(MailData mailData, EmailConnection emailConn, switch (emailConn.Encryption) { case EmailEncryptionMethod.None: - await smtp.ConnectAsync(emailConn.ServerAddress, emailConn.Port, SecureSocketOptions.None, ct); + await smtp.ConnectAsync( + emailConn.ServerAddress, + emailConn.Port, + SecureSocketOptions.None, + ct + ); break; case EmailEncryptionMethod.StartTls: - await smtp.ConnectAsync(emailConn.ServerAddress, emailConn.Port, SecureSocketOptions.StartTls, ct); + smtp.ServerCertificateValidationCallback = (s, c, h, e) => true; //accept all SSL certificates + await smtp.ConnectAsync( + emailConn.ServerAddress, + emailConn.Port, + SecureSocketOptions.StartTls, + ct + ); break; case EmailEncryptionMethod.Tls: - await smtp.ConnectAsync(emailConn.ServerAddress, emailConn.Port, SecureSocketOptions.SslOnConnect, ct); + smtp.ServerCertificateValidationCallback = (s, c, h, e) => true; //accept all SSL certificates + await smtp.ConnectAsync( + emailConn.ServerAddress, + emailConn.Port, + SecureSocketOptions.SslOnConnect, + ct + ); break; } if (emailConn.User != null && emailConn.User != "") @@ -140,11 +176,10 @@ public async Task SendAsync(MailData mailData, EmailConnection emailConn, } await smtp.SendAsync(mail, ct); await smtp.DisconnectAsync(true, ct); - + #endregion return true; - } catch (Exception) { diff --git a/roles/lib/files/FWO.Middleware.Client/JwtReader.cs b/roles/lib/files/FWO.Middleware.Client/JwtReader.cs index 70e63f6cc..c3d9b5252 100644 --- a/roles/lib/files/FWO.Middleware.Client/JwtReader.cs +++ b/roles/lib/files/FWO.Middleware.Client/JwtReader.cs @@ -122,5 +122,12 @@ public TimeSpan TimeUntilExpiry() return jwt.ValidTo - DateTime.UtcNow; } + + public string GetRole() + { + if (jwt == null) + throw new ArgumentNullException(nameof(jwt), "Jwt was not validated yet."); + return jwt.Claims.FirstOrDefault(claim => claim.Type == "role")?.Value ?? ""; + } } } diff --git a/roles/lib/files/FWO.Middleware.Client/MiddlewareClient.cs b/roles/lib/files/FWO.Middleware.Client/MiddlewareClient.cs index 8794475c0..08ed76d5c 100644 --- a/roles/lib/files/FWO.Middleware.Client/MiddlewareClient.cs +++ b/roles/lib/files/FWO.Middleware.Client/MiddlewareClient.cs @@ -10,8 +10,9 @@ namespace FWO.Middleware.Client { - public class MiddlewareClient + public class MiddlewareClient : IDisposable { + private bool disposed = false; private RestClient restClient; readonly string middlewareServerUri; @@ -241,5 +242,26 @@ public async Task> DeleteTenant(TenantDeleteParameters parame request.AddJsonBody(parameters); return await restClient.ExecuteAsync(request); } + + protected virtual void Dispose(bool disposing) + { + if (disposed) return; + if (disposing) + { + restClient.Dispose(); + disposed = true; + } + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + ~ MiddlewareClient() + { + Dispose(false); + } } } diff --git a/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilter.cs b/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilter.cs index 6fe55e0fb..8e989e6d2 100644 --- a/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilter.cs +++ b/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilter.cs @@ -1,6 +1,3 @@ -using System.ComponentModel; -using System.Net; -using FWO.Logging; using FWO.Report.Filter.Exceptions; namespace FWO.Report.Filter.Ast @@ -89,104 +86,5 @@ protected string AddVariable(DynGraphqlQuery query, string name, TokenKind public abstract void ConvertToSemanticType(); - //public void ConvertToSemanticType() - //{ - // TypeConverter converter = TypeDescriptor.GetConverter(this.GetType()); - // if (converter.CanConvertFrom(this.GetType())) - // { - // try - // { - // object convertedValue = converter.ConvertFrom(this) ?? throw new NullReferenceException("Error while converting: converted value is null"); - // SemanticValue = (SemanticType)convertedValue ?? throw new NullReferenceException($"Error while converting: value could not be converted to semantic type: {typeof(SemanticType)}"); - // } - // catch (SemanticException) - // { - // throw; - // } - // catch (Exception ex) - // { - // throw new SemanticException($"Filter could not be converted to expected semantic type {typeof(SemanticType)}: {ex.Message}", Value.Position); - // } - // } - // else - // { - // throw new NotSupportedException($"Internal error: TypeConverter does not support conversion from {this.GetType()} to {typeof(SemanticType)}"); - // } - //} - - //public override void Extract(ref DynGraphqlQuery query) - //{ - // switch (Name.Kind) - // - - // // "xy" and "FullText=xy" are the same filter - // case TokenKind.FullText: - // case TokenKind.Value: - // ExtractFullTextFilter(query); - // break; - // case TokenKind.ReportType: - // ExtractReportTypeFilter(query); - // break; - // case TokenKind.Source: - // ExtractSourceFilter(query); - // break; - // case TokenKind.Destination: - // ExtractDestinationFilter(query); - // break; - // case TokenKind.Action: - // ExtractActionFilter(query); - // break; - // case TokenKind.Service: - // ExtractServiceFilter(query); - // break; - // case TokenKind.DestinationPort: - // ExtractDestinationPortFilter(query); - // break; - // case TokenKind.Protocol: - // ExtractProtocolFilter(query); - // break; - // case TokenKind.Management: - // ExtractManagementFilter(query); - // break; - // case TokenKind.Gateway: - // ExtractGatewayFilter(query); - // break; - // case TokenKind.Remove: - // ExtractRemoveFilter(query); - // break; - // case TokenKind.RecertDisplay: - // ExtractRecertDisplayFilter(query); //, (int)(SemanticValue as int?)!); - // break; - // case TokenKind.Time: - // ExtractTimeFilter(query); - // break; - // default: - // throw new NotSupportedException($"### Compiler Error: Found unexpected and unsupported filter token: \"{Name}\" ###"); - // } - //} - - //private static string SetQueryOpString(Token @operator, Token filter, string value) - //{ - // string operation; - // switch (@operator.Kind) - // { - // case TokenKind.EQ: - // if (filter.Kind == TokenKind.Time || filter.Kind == TokenKind.DestinationPort) - // operation = "_eq"; - // else if ((filter.Kind == TokenKind.Source && IsCidr(value)) || filter.Kind == TokenKind.DestinationPort) - // operation = "_eq"; - // else if (filter.Kind == TokenKind.Management && int.TryParse(value, out int _)) - // operation = "_eq"; - // else - // operation = "_ilike"; - // break; - // case TokenKind.NEQ: - // operation = "_nilike"; - // break; - // default: - // throw new Exception("### Parser Error: Expected Operator Token (and thought there is one) ###"); - // } - // return operation; - //} } } diff --git a/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterDateTimeRange.cs b/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterDateTimeRange.cs index 201b3082a..0de31b27e 100644 --- a/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterDateTimeRange.cs +++ b/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterDateTimeRange.cs @@ -1,10 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading.Tasks; - -namespace FWO.Report.Filter.Ast +namespace FWO.Report.Filter.Ast { internal class AstNodeFilterDateTimeRange : AstNodeFilter { @@ -66,118 +60,5 @@ private DynGraphqlQuery ExtractLastHitFilter(DynGraphqlQuery query, ReportType r } return query; } - - //private DynGraphqlQuery ExtractTimeFilter(DynGraphqlQuery query) - //{ - // switch (query.ReportType) - // { - // case ReportType.Rules: - // case ReportType.Statistics: - // case ReportType.NatRules: - // switch (Operator.Kind) - // { - // case TokenKind.EQ: - // case TokenKind.EEQ: - // query.ruleWhereStatement += - // $"import_control: {{ control_id: {{_lte: $relevantImportId }} }}, " + - // $"importControlByRuleLastSeen: {{ control_id: {{_gte: $relevantImportId }} }}"; - // query.nwObjWhereStatement += - // $"import_control: {{ control_id: {{_lte: $relevantImportId }} }}, " + - // $"importControlByObjLastSeen: {{ control_id: {{_gte: $relevantImportId }} }}"; - // query.svcObjWhereStatement += - // $"import_control: {{ control_id: {{_lte: $relevantImportId }} }}, " + - // $"importControlBySvcLastSeen: {{ control_id: {{_gte: $relevantImportId }} }}"; - // query.userObjWhereStatement += - // $"import_control: {{ control_id: {{_lte: $relevantImportId }} }}, " + - // $"importControlByUserLastSeen: {{ control_id: {{_gte: $relevantImportId }} }}"; - // query.ReportTime = Value.Text; - // break; - // default: - // throw new SemanticException($"Unexpected operator token. Expected equals token.", Operator.Position); - // } - // break; - // case ReportType.Changes: - // switch (Operator.Kind) - // { - // case TokenKind.EQ: - // case TokenKind.EEQ: - // case TokenKind.GRT: - // case TokenKind.LSS: - // (string start, string stop) = ResolveTimeRange(Value.Text); - // query.QueryVariables["start"] = start; - // query.QueryVariables["stop"] = stop; - // query.QueryParameters.Add("$start: timestamp! "); - // query.QueryParameters.Add("$stop: timestamp! "); - - // query.ruleWhereStatement += $@" - // _and: [ - // {{ import_control: {{ stop_time: {{ _gte: $start }} }} }} - // {{ import_control: {{ stop_time: {{ _lte: $stop }} }} }} - // ] - // change_type_id: {{ _eq: 3 }} - // security_relevant: {{ _eq: true }}"; - // break; - // default: - // throw new SemanticException($"Unexpected operator token.", Operator.Position); - // } - // break; - // default: - // Log.WriteError("Filter", $"Unexpected report type found: {query.ReportType}"); - // break; - // } - // // todo: deal with time ranges for changes report type - // return query; - //} - - //private (string, string) ResolveTimeRange(string timeRange) - //{ - // string start; - // string stop; - // //string currentTime = (string)DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss"); - // string currentYear = (string)DateTime.Now.ToString("yyyy"); - // string currentMonth = (string)DateTime.Now.ToString("MM"); - // string currentDay = (string)DateTime.Now.ToString("dd"); - // DateTime startOfCurrentMonth = new DateTime(Convert.ToInt16(currentYear), Convert.ToInt16(currentMonth), 1); - // DateTime startOfNextMonth = startOfCurrentMonth.AddMonths(1); - // DateTime startOfPrevMonth = startOfCurrentMonth.AddMonths(-1); - - // switch (timeRange) - // { - // // todo: add today, yesterday, this week, last week - // case "last year": - // start = $"{(Convert.ToInt16(currentYear) - 1)}-01-01"; - // stop = $"{Convert.ToInt16(currentYear)}-01-01"; - // break; - // case "this year": - // start = $"{Convert.ToInt16(currentYear)}-01-01"; - // stop = $"{Convert.ToInt16(currentYear) + 1}-01-01"; - // break; - // case "this month": - // start = startOfCurrentMonth.ToString("yyyy-MM-dd"); - // stop = startOfNextMonth.ToString("yyyy-MM-dd"); - // break; - // case "last month": - // start = startOfPrevMonth.ToString("yyyy-MM-dd"); - // stop = startOfCurrentMonth.ToString("yyyy-MM-dd"); - // break; - // default: - // string[] times = timeRange.Split('/'); - // if (times.Length == 2) - // { - // start = Convert.ToDateTime(times[0]).ToString("yyyy-MM-dd HH:mm:ss"); - // if (times[1].Trim().Length < 11) - // { - // times[1] += " 23:59:59"; - // } - // stop = Convert.ToDateTime(times[1]).ToString("yyyy-MM-dd HH:mm:ss"); - // } - // else - // throw new SyntaxException($"Error: wrong time range format.", Value.Position); // Unexpected token - // // we have some hard coded string positions here which we should get rid off - // // how can we access the tokens[position].Position information here? - // break; - // } - // return (start, stop); - //} } } diff --git a/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterInt.cs b/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterInt.cs index 9e2180a12..2e5611e3d 100644 --- a/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterInt.cs +++ b/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterInt.cs @@ -1,9 +1,4 @@ using FWO.Report.Filter.Exceptions; -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading.Tasks; namespace FWO.Report.Filter.Ast { @@ -39,6 +34,9 @@ public override void Extract(ref DynGraphqlQuery query, ReportType? reportType) case TokenKind.Owner: ExtractOwnerFilter(query); break; + case TokenKind.Unused: + ExtractUnusedFilter(query); + break; default: break; } @@ -65,6 +63,15 @@ private DynGraphqlQuery ExtractOwnerFilter(DynGraphqlQuery query) query.ruleWhereStatement += $"owner: {{ {ExtractOperator()}: ${QueryVarName} }}"; return query; } - + + private DynGraphqlQuery ExtractUnusedFilter(DynGraphqlQuery query) + { + string QueryVarName = AddVariable(query, "cut", Operator.Kind, DateTime.Now.AddDays(-semanticValue)); + query.ruleWhereStatement += $@"rule_metadatum: {{_or: [ + {{_and: [{{rule_last_hit: {{_is_null: false}} }}, {{rule_last_hit: {{_lte: ${QueryVarName} }} }} ] }}, + {{ rule_last_hit: {{_is_null: true}} }} + ]}}"; + return query; + } } } diff --git a/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterReportType.cs b/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterReportType.cs index 2ac1e59e1..feb603858 100644 --- a/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterReportType.cs +++ b/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterReportType.cs @@ -13,10 +13,14 @@ public override void ConvertToSemanticType() { "rules" or "rule" => ReportType.Rules, "resolvedrules" or "resolvedrule" => ReportType.ResolvedRules, + "resolvedrulestech" or "resolvedruletech" => ReportType.ResolvedRulesTech, + "unusedrules" or "unusedrule" => ReportType.UnusedRules, "statistics" or "statistic" => ReportType.Statistics, "changes" or "change" => ReportType.Changes, "resolvedchanges" or "resolvedchange" => ReportType.ResolvedChanges, + "resolvedchangestech" or "resolvedchangetech" => ReportType.ResolvedChangesTech, "natrules" or "nat_rules" => ReportType.NatRules, + "recertifications" or "recertification" => ReportType.Recertification, _ => throw new SemanticException($"Unexpected report type found", Value.Position) }; } diff --git a/roles/lib/files/FWO.Report.Filter/DynGraphqlQuery.cs b/roles/lib/files/FWO.Report.Filter/DynGraphqlQuery.cs index 71dd5bab6..fc5382cc0 100644 --- a/roles/lib/files/FWO.Report.Filter/DynGraphqlQuery.cs +++ b/roles/lib/files/FWO.Report.Filter/DynGraphqlQuery.cs @@ -21,9 +21,8 @@ public class DynGraphqlQuery { " $limit: Int ", " $offset: Int ", - " $mgmId: [Int!]", - " $relevantImportId: bigint" - }; // $mgmId and $relevantImporId are only needed for time based filtering + " $mgmId: [Int!]" // not needed for change reports?? + }; public string ReportTimeString { get; set; } = ""; public List RelevantManagementIds { get; set; } = new List(); @@ -60,18 +59,6 @@ private static void SetDeviceFilter(ref DynGraphqlQuery query, DeviceFilter? dev query.ruleWhereStatement += "}]}, "; } } - private static List GetDeviceFilterAsList(DeviceFilter? deviceFilter) - { - List devIdList = new List(); - if (deviceFilter != null) - { - foreach (ManagementSelect mgmt in deviceFilter.Managements) - foreach (DeviceSelect dev in mgmt.Devices) - if (dev.Selected == true) - devIdList.Add(dev.Id); - } - return devIdList; - } private static void SetTimeFilter(ref DynGraphqlQuery query, TimeFilter? timeFilter, ReportType? reportType, RecertFilter recertFilter) { @@ -85,6 +72,8 @@ private static void SetTimeFilter(ref DynGraphqlQuery query, TimeFilter? timeFil case ReportType.ResolvedRulesTech: case ReportType.Statistics: case ReportType.NatRules: + case ReportType.UnusedRules: + query.QueryParameters.Add("$relevantImportId: bigint "); query.ruleWhereStatement += $"import_control: {{ control_id: {{_lte: $relevantImportId }} }}, " + $"importControlByRuleLastSeen: {{ control_id: {{_gte: $relevantImportId }} }}"; @@ -109,6 +98,7 @@ private static void SetTimeFilter(ref DynGraphqlQuery query, TimeFilter? timeFil query.QueryVariables["stop"] = stop; query.QueryParameters.Add("$start: timestamp! "); query.QueryParameters.Add("$stop: timestamp! "); + query.QueryParameters.Add("$relevantImportId: bigint "); query.ruleWhereStatement += $@" _and: [ @@ -214,32 +204,40 @@ private static (string, string) ResolveTimeRange(TimeFilter timeFilter) return (start, stop); } - - private static void SetRecertFilter(ref DynGraphqlQuery query, RecertFilter? recertFilter, DeviceFilter deviceFilter) + private static void SetRecertFilter(ref DynGraphqlQuery query, RecertFilter? recertFilter) { - // bool first = true; - - List deviceIdFilter = GetDeviceFilterAsList(deviceFilter); if (recertFilter != null) { - // query.QueryParameters.Add("$ownerIds: [Int!] "); - // query.QueryParameters.Add("$refdate1: Timestamp!"); - // setting owner filter: if (recertFilter.RecertOwnerList.Count > 0) { - // query.QueryVariables["ownerIds"] = recertFilter.RecertOwnerList; query.QueryParameters.Add("$ownerWhere: owner_bool_exp"); query.QueryVariables["ownerWhere"] = new {id = new {_in = recertFilter.RecertOwnerList}}; } else - { // if no ownerIds are set in the filter, return all recerts + { + // if no ownerIds are set in the filter, return all recerts query.QueryParameters.Add("$ownerWhere: owner_bool_exp"); query.QueryVariables["ownerWhere"] = new {id = new {}}; } } } + private static void SetUnusedFilter(ref DynGraphqlQuery query, UnusedFilter? unusedFilter) + { + if (unusedFilter != null) + { + query.QueryParameters.Add("$cut: timestamp"); + query.QueryParameters.Add("$tolerance: timestamp"); + query.QueryVariables["cut"] = DateTime.Now.AddDays(-unusedFilter.UnusedForDays); + query.QueryVariables["tolerance"] = DateTime.Now.AddDays(-unusedFilter.CreationTolerance); + query.ruleWhereStatement += $@"{{rule_metadatum: {{_or: [ + {{_and: [{{rule_last_hit: {{_is_null: false}} }}, {{rule_last_hit: {{_lte: $cut}} }} ] }}, + {{_and: [{{rule_last_hit: {{_is_null: true}} }}, {{rule_created: {{_lte: $tolerance}} }} ] }} + ]}} }}"; + } + } + private static void SetFixedFilters(ref DynGraphqlQuery query, ReportTemplate reportParams) { // leave out all header texts @@ -253,7 +251,11 @@ private static void SetFixedFilters(ref DynGraphqlQuery query, ReportTemplate re SetTimeFilter(ref query, reportParams.ReportParams.TimeFilter, (ReportType)(reportParams.ReportParams.ReportType ?? throw new Exception("No report type set")), reportParams.ReportParams.RecertFilter); if (reportParams.ReportParams.ReportType!= null && (ReportType)reportParams.ReportParams.ReportType==ReportType.Recertification) { - SetRecertFilter(ref query, reportParams.ReportParams.RecertFilter, reportParams.ReportParams.DeviceFilter); + SetRecertFilter(ref query, reportParams.ReportParams.RecertFilter); + } + if (reportParams.ReportParams.ReportType!= null && (ReportType)reportParams.ReportParams.ReportType==ReportType.UnusedRules) + { + SetUnusedFilter(ref query, reportParams.ReportParams.UnusedFilter); } } @@ -275,6 +277,15 @@ public static DynGraphqlQuery GenerateQuery(ReportTemplate filter, AstNode? ast) string paramString = string.Join(" ", query.QueryParameters.ToArray()); + string mgmtWhereString = $@"where: {{ hide_in_gui: {{_eq: false }} + mgm_id: {{_in: $mgmId }} + stm_dev_typ: {{dev_typ_is_multi_mgmt: {{_eq: false}} is_pure_routing_device: {{_eq: false}} }} + }} order_by: {{ mgm_name: asc }}"; + + string devWhereString = $@"where: {{ hide_in_gui: {{_eq: false }}, + stm_dev_typ: {{is_pure_routing_device:{{_eq:false}} }} + }} order_by: {{ dev_name: asc }}"; + if (((ReportType)(filter.ReportParams.ReportType ?? throw new Exception("No report type set"))).IsResolvedReport()) filter.Detailed = true; @@ -282,141 +293,120 @@ public static DynGraphqlQuery GenerateQuery(ReportTemplate filter, AstNode? ast) { case ReportType.Statistics: query.FullQuery = Queries.compact($@" - query statisticsReport ({paramString}) - {{ - management( - where: {{ - hide_in_gui: {{_eq: false }} - mgm_id: {{_in: $mgmId }} - stm_dev_typ: {{dev_typ_is_multi_mgmt: {{_eq: false}} is_pure_routing_device: {{_eq: false}} }} - }} - order_by: {{ mgm_name: asc }} - ) - {{ - name: mgm_name - id: mgm_id - objects_aggregate(where: {{ {query.nwObjWhereStatement} }}) {{ aggregate {{ count }} }} - services_aggregate(where: {{ {query.svcObjWhereStatement} }}) {{ aggregate {{ count }} }} - usrs_aggregate(where: {{ {query.userObjWhereStatement} }}) {{ aggregate {{ count }} }} - rules_aggregate(where: {{ {query.ruleWhereStatement} }}) {{ aggregate {{ count }} }} - devices( where: {{ hide_in_gui: {{_eq: false }}, stm_dev_typ: {{is_pure_routing_device:{{_eq:false}} }} }} order_by: {{ dev_name: asc }} ) + query statisticsReport ({paramString}) + {{ + management({mgmtWhereString}) {{ - name: dev_name - id: dev_id + name: mgm_name + id: mgm_id + objects_aggregate(where: {{ {query.nwObjWhereStatement} }}) {{ aggregate {{ count }} }} + services_aggregate(where: {{ {query.svcObjWhereStatement} }}) {{ aggregate {{ count }} }} + usrs_aggregate(where: {{ {query.userObjWhereStatement} }}) {{ aggregate {{ count }} }} rules_aggregate(where: {{ {query.ruleWhereStatement} }}) {{ aggregate {{ count }} }} + devices({devWhereString}) + {{ + name: dev_name + id: dev_id + rules_aggregate(where: {{ {query.ruleWhereStatement} }}) {{ aggregate {{ count }} }} + }} }} }} - }}"); + "); break; case ReportType.Rules: case ReportType.ResolvedRules: case ReportType.ResolvedRulesTech: + case ReportType.UnusedRules: query.FullQuery = Queries.compact($@" - {(filter.Detailed ? RuleQueries.ruleDetailsForReportFragments : RuleQueries.ruleOverviewFragments)} - - query rulesReport ({paramString}) - {{ - management( where: - {{ - mgm_id: {{_in: $mgmId }}, - hide_in_gui: {{_eq: false }} - stm_dev_typ: {{dev_typ_is_multi_mgmt: {{_eq: false}} is_pure_routing_device: {{_eq: false}} }} - }} order_by: {{ mgm_name: asc }} ) + {(filter.Detailed ? RuleQueries.ruleDetailsForReportFragments : RuleQueries.ruleOverviewFragments)} + query rulesReport ({paramString}) + {{ + management({mgmtWhereString}) {{ id: mgm_id name: mgm_name - devices ( where: {{ hide_in_gui: {{_eq: false }} }} order_by: {{ dev_name: asc }} ) + devices ({devWhereString}) + {{ + id: dev_id + name: dev_name + rules( + limit: $limit + offset: $offset + where: {{ access_rule: {{_eq: true}} {query.ruleWhereStatement} }} + order_by: {{ rule_num_numeric: asc }} ) {{ - id: dev_id - name: dev_name - rules( - limit: $limit - offset: $offset - where: {{ access_rule: {{_eq: true}} {query.ruleWhereStatement} }} - order_by: {{ rule_num_numeric: asc }} ) - {{ - mgm_id: mgm_id - ...{(filter.Detailed ? "ruleDetails" : "ruleOverview")} - }} - }} + mgm_id: mgm_id + {((ReportType)filter.ReportParams.ReportType == ReportType.UnusedRules ? "rule_metadatum { rule_last_hit }" : "")} + ...{(filter.Detailed ? "ruleDetails" : "ruleOverview")} + }} + }} }} - }}"); + }} + "); break; case ReportType.Recertification: - // remove Query Parameter relevant import id - var itemToRemove = query.QueryParameters.Single(r => r == " $relevantImportId: bigint"); - query.QueryParameters.Remove(itemToRemove); - paramString = string.Join(" ", query.QueryParameters.ToArray()); - - query.FullQuery = Queries.compact($@"{RecertQueries.ruleOpenRecertFragments} - query rulesCertReport({paramString}) {{ - management( - where: {{ - mgm_id: {{ _in: $mgmId }} - hide_in_gui: {{ _eq: false }} - stm_dev_typ: {{ - dev_typ_is_multi_mgmt: {{ _eq: false }} - is_pure_routing_device: {{ _eq: false }} - }} - }} - order_by: {{ mgm_name: asc }} - ) {{ - id: mgm_id - name: mgm_name - devices( - where: {{ hide_in_gui: {{ _eq: false }} }} - order_by: {{ dev_name: asc }} - ) {{ - id: dev_id - name: dev_name - rules( - where: {{ - rule_metadatum: {{ recertifications_aggregate: {{ count: {{ filter: {{ _and: [{{owner: $ownerWhere}}, {{recert_date: {{_is_null: true}}}}, {{next_recert_date: {{_lte: $refdate1}}}}]}}, predicate: {{_gt: 0}}}}}}}} - active:{{ _eq:true }} - {query.ruleWhereStatement} - }} - limit: $limit - offset: $offset - order_by: {{ rule_num_numeric: asc }} - ) {{ - mgm_id: mgm_id - ...ruleOpenCertOverview + query.FullQuery = Queries.compact($@" + {RecertQueries.ruleOpenRecertFragments} + query rulesCertReport({paramString}) + {{ + management({mgmtWhereString}) + {{ + id: mgm_id + name: mgm_name + devices({devWhereString}) + {{ + id: dev_id + name: dev_name + rules( + where: {{ + rule_metadatum: {{ recertifications_aggregate: {{ count: {{ filter: {{ _and: [{{owner: $ownerWhere}}, {{recert_date: {{_is_null: true}}}}, {{next_recert_date: {{_lte: $refdate1}}}}]}}, predicate: {{_gt: 0}}}}}}}} + active:{{ _eq:true }} + {query.ruleWhereStatement} + }} + limit: $limit + offset: $offset + order_by: {{ rule_num_numeric: asc }} + ) + {{ + mgm_id: mgm_id + ...ruleOpenCertOverview + }} }} }} }} - }}"); + "); break; case ReportType.Changes: case ReportType.ResolvedChanges: case ReportType.ResolvedChangesTech: query.FullQuery = Queries.compact($@" - {(filter.Detailed ? RuleQueries.ruleDetailsForReportFragments : RuleQueries.ruleOverviewFragments)} - - query changeReport({paramString}) {{ - management(where: {{ hide_in_gui: {{_eq: false }} stm_dev_typ: {{dev_typ_is_multi_mgmt: {{_eq: false}} is_pure_routing_device: {{_eq: false}} }} }} order_by: {{mgm_name: asc}}) + {(filter.Detailed ? RuleQueries.ruleDetailsForReportFragments : RuleQueries.ruleOverviewFragments)} + query changeReport({paramString}) {{ - id: mgm_id - name: mgm_name - devices (where: {{ hide_in_gui: {{_eq: false}} stm_dev_typ:{{is_pure_routing_device:{{_eq:false}} }} }}, order_by: {{dev_name: asc}} ) + management(where: {{ hide_in_gui: {{_eq: false }} stm_dev_typ: {{dev_typ_is_multi_mgmt: {{_eq: false}} is_pure_routing_device: {{_eq: false}} }} }} order_by: {{mgm_name: asc}}) {{ - id: dev_id - name: dev_name - changelog_rules( - offset: $offset - limit: $limit - where: {{ - _or:[ - {{_and: [{{change_action:{{_eq:""I""}}}}, {{rule: {{access_rule:{{_eq:true}}}}}}]}}, - {{_and: [{{change_action:{{_eq:""D""}}}}, {{ruleByOldRuleId: {{access_rule:{{_eq:true}}}}}}]}}, - {{_and: [{{change_action:{{_eq:""C""}}}}, {{rule: {{access_rule:{{_eq:true}}}}}}, {{ruleByOldRuleId: {{access_rule:{{_eq:true}}}}}}]}} - ] - {query.ruleWhereStatement} - }} - order_by: {{ control_id: asc }} - ) + id: mgm_id + name: mgm_name + devices ({devWhereString}) + {{ + id: dev_id + name: dev_name + changelog_rules( + offset: $offset + limit: $limit + where: {{ + _or:[ + {{_and: [{{change_action:{{_eq:""I""}}}}, {{rule: {{access_rule:{{_eq:true}}}}}}]}}, + {{_and: [{{change_action:{{_eq:""D""}}}}, {{ruleByOldRuleId: {{access_rule:{{_eq:true}}}}}}]}}, + {{_and: [{{change_action:{{_eq:""C""}}}}, {{rule: {{access_rule:{{_eq:true}}}}}}, {{ruleByOldRuleId: {{access_rule:{{_eq:true}}}}}}]}} + ] + {query.ruleWhereStatement} + }} + order_by: {{ control_id: asc }} + ) {{ import: import_control {{ time: stop_time }} change_action @@ -437,30 +427,30 @@ query changeReport({paramString}) {{ case ReportType.NatRules: query.FullQuery = Queries.compact($@" - {(filter.Detailed ? RuleQueries.natRuleDetailsForReportFragments : RuleQueries.natRuleOverviewFragments)} - - query natRulesReport ({paramString}) - {{ - management( where: {{ mgm_id: {{_in: $mgmId }}, hide_in_gui: {{_eq: false }} stm_dev_typ: {{dev_typ_is_multi_mgmt: {{_eq: false}} is_pure_routing_device: {{_eq: false}} }} }} order_by: {{ mgm_name: asc }} ) + {(filter.Detailed ? RuleQueries.natRuleDetailsForReportFragments : RuleQueries.natRuleOverviewFragments)} + query natRulesReport ({paramString}) + {{ + management({mgmtWhereString}) {{ id: mgm_id name: mgm_name - devices ( where: {{ hide_in_gui: {{_eq: false }} stm_dev_typ:{{is_pure_routing_device:{{_eq:false}} }} }} order_by: {{ dev_name: asc }} ) - {{ - id: dev_id - name: dev_name - rules( - limit: $limit - offset: $offset - where: {{ nat_rule: {{_eq: true}}, ruleByXlateRule: {{}} {query.ruleWhereStatement} }} - order_by: {{ rule_num_numeric: asc }} ) - {{ - mgm_id: mgm_id - ...{(filter.Detailed ? "natRuleDetails" : "natRuleOverview")} - }} - }} + devices ({devWhereString}) + {{ + id: dev_id + name: dev_name + rules( + limit: $limit + offset: $offset + where: {{ nat_rule: {{_eq: true}}, ruleByXlateRule: {{}} {query.ruleWhereStatement} }} + order_by: {{ rule_num_numeric: asc }} ) + {{ + mgm_id: mgm_id + ...{(filter.Detailed ? "natRuleDetails" : "natRuleOverview")} + }} + }} }} - }}"); + }} + "); break; } diff --git a/roles/lib/files/FWO.Report.Filter/FilterTypes/ReportType.cs b/roles/lib/files/FWO.Report.Filter/FilterTypes/ReportType.cs index 93f3b9485..d4d650809 100644 --- a/roles/lib/files/FWO.Report.Filter/FilterTypes/ReportType.cs +++ b/roles/lib/files/FWO.Report.Filter/FilterTypes/ReportType.cs @@ -10,7 +10,8 @@ public enum ReportType ResolvedRulesTech = 6, Recertification = 7, ResolvedChanges = 8, - ResolvedChangesTech = 9 + ResolvedChangesTech = 9, + UnusedRules = 10 } public static class ReportTypeGroups @@ -24,6 +25,7 @@ public static bool IsRuleReport(this ReportType reportType) case ReportType.ResolvedRulesTech: case ReportType.NatRules: case ReportType.Recertification: + case ReportType.UnusedRules: return true; default: return false; diff --git a/roles/lib/files/FWO.Report.Filter/Parser.cs b/roles/lib/files/FWO.Report.Filter/Parser.cs index 6bcb231f9..133f64aeb 100644 --- a/roles/lib/files/FWO.Report.Filter/Parser.cs +++ b/roles/lib/files/FWO.Report.Filter/Parser.cs @@ -29,107 +29,20 @@ public Parser(List tokens) private AstNode? ParseStart() { - // if (NextTokenExists()) - // { - if (GetNextToken().Kind == TokenKind.Value) - { - // Left = new AstNodeFilterReportType() - // { - // Name = new Token(new Range(0, 0), "", TokenKind.ReportType), - // Operator = new Token(new Range(0, 0), "", TokenKind.EEQ), - // Value = new Token(new Range(0, 0), "rules", TokenKind.Value) - // }, - // Connector = new Token(new Range(0, 0), "", TokenKind.And), - - // Right = ParseTime() - // }; - // } - // else - // { - // AstNodeConnector root = new AstNodeConnector - // { - // Left = new AstNodeFilterReportType() - // { - // Name = CheckToken(TokenKind.ReportType), - // Operator = CheckToken(TokenKind.EQ, TokenKind.EEQ), - return new AstNodeFilterString - { - Name = new Token(new Range(0, 0), "", TokenKind.Value), - Operator = new Token(new Range(0, 0), "", TokenKind.EQ), - Value = CheckToken(TokenKind.Value) - }; - } - else + if (GetNextToken().Kind == TokenKind.Value) + { + return new AstNodeFilterString { - return ParseOr(); - } + Name = new Token(new Range(0, 0), "", TokenKind.Value), + Operator = new Token(new Range(0, 0), "", TokenKind.EQ), + Value = CheckToken(TokenKind.Value) + }; } - // } - - // private AstNode ParseTime() - // { - // if (NextTokenExists() == false || GetNextToken().Kind != TokenKind.Time) - // { - // AstNodeConnector root = new AstNodeConnector - // { - // Left = new AstNodeFilterDateTimeRange() - // { - // Name = new Token(new Range(0, 0), "", TokenKind.Time), - // Operator = new Token(new Range(0, 0), "", TokenKind.EQ), - // Value = new Token(new Range(0, 0), "now", TokenKind.Value) //DateTime.Now.ToString() - // } - // }; - - // if (NextTokenExists()) - // { - // root.Connector = new Token(new Range(0, 0), "", TokenKind.And); - // root.Right = ParseStart(); - // return root; - // } - // else - // { - // return root.Left; - // } - // } - - // else // TokenKinde == Time - // { - // AstNodeConnector root = new AstNodeConnector - // { - // Left = new AstNodeFilterDateTimeRange() - // { - // Name = CheckToken(TokenKind.Time), - // Operator = ParseOperator(), - // Value = CheckToken(TokenKind.Value) - // } - // }; - - // if (NextTokenExists() && GetNextToken().Kind == TokenKind.And) - // { - // root.Connector = CheckToken(TokenKind.And); - // root.Right = ParseStart(); - // return root; - // } - - // else - // { - // return root.Left; - // } - // } - // } - - // private AstNode ParseStart() - // { - // if (GetNextToken().Kind == TokenKind.Value) - // { - // return new AstNodeFilterString - // { - // Name = new Token(new Range(0, 0), "", TokenKind.Value), - // Operator = new Token(new Range(0, 0), "", TokenKind.EQ), - // Value = CheckToken(TokenKind.Value) - // }; - // } - // } + else + { + return ParseOr(); + } + } private AstNode ParseOr() { @@ -237,7 +150,7 @@ TokenKind.Time or TokenKind.LastHit TokenKind.ReportType => new AstNodeFilterReportType() { Name = Name, Operator = Operator, Value = Value }, - TokenKind.DestinationPort or TokenKind.RecertDisplay + TokenKind.DestinationPort or TokenKind.RecertDisplay or TokenKind.Unused => new AstNodeFilterInt() { Name = Name, Operator = Operator, Value = Value }, TokenKind.Source or TokenKind.Destination @@ -257,7 +170,7 @@ private Token ParseFilterName() return CheckToken( TokenKind.LastHit, TokenKind.Owner, TokenKind.Destination, TokenKind.Source, TokenKind.Service, TokenKind.Protocol, TokenKind.DestinationPort, TokenKind.Action, TokenKind.FullText, TokenKind.Gateway, - TokenKind.Management, TokenKind.Remove, TokenKind.RecertDisplay, TokenKind.Disabled); + TokenKind.Management, TokenKind.Remove, TokenKind.RecertDisplay, TokenKind.Disabled, TokenKind.Unused); } private Token CheckToken(params TokenKind[] expectedTokenKinds) diff --git a/roles/lib/files/FWO.Report.Filter/TokenKind.cs b/roles/lib/files/FWO.Report.Filter/TokenKind.cs index 5c7b67fa2..1ac4c2b2c 100644 --- a/roles/lib/files/FWO.Report.Filter/TokenKind.cs +++ b/roles/lib/files/FWO.Report.Filter/TokenKind.cs @@ -22,6 +22,7 @@ public enum TokenKind RecertDisplay, FullText, LastHit, + Unused, BL, // ( BR, // ) And, diff --git a/roles/lib/files/FWO.Report.Filter/TokenSyntax.cs b/roles/lib/files/FWO.Report.Filter/TokenSyntax.cs index a5bc3fa14..0350b6620 100644 --- a/roles/lib/files/FWO.Report.Filter/TokenSyntax.cs +++ b/roles/lib/files/FWO.Report.Filter/TokenSyntax.cs @@ -57,6 +57,12 @@ public static TokenSyntax Get(TokenKind tokenKind) NoWhiteSpaceRequiered: new string[] { } ), + TokenKind.Unused => new TokenSyntax + ( + WhiteSpaceRequiered: new string[] { "not-used-for-days", "unused", "unused-days", "not-used" }, + NoWhiteSpaceRequiered: new string[] { } + ), + TokenKind.Source => new TokenSyntax ( WhiteSpaceRequiered: new string[] { "source", "src" }, diff --git a/roles/lib/files/FWO.Report/Display/RuleDisplayHtml.cs b/roles/lib/files/FWO.Report/Display/RuleDisplayHtml.cs index 2a9f1b701..d3151622e 100644 --- a/roles/lib/files/FWO.Report/Display/RuleDisplayHtml.cs +++ b/roles/lib/files/FWO.Report/Display/RuleDisplayHtml.cs @@ -94,7 +94,7 @@ public string DisplayLastRecertifier(Rule rule) protected string constructLink(string type, string symbol, long id, string name, OutputLocation location, int mgmtId, string style) { - string link = location == OutputLocation.export ? $"#" : $"{location.ToString()}#goto-report-m{mgmtId}-"; + string link = location == OutputLocation.export ? $"#" : $"{location.ToString()}/generation#goto-report-m{mgmtId}-"; return $" {name}"; } diff --git a/roles/lib/files/FWO.Report/ReportBase.cs b/roles/lib/files/FWO.Report/ReportBase.cs index 7bc4963d7..2f4c839dd 100644 --- a/roles/lib/files/FWO.Report/ReportBase.cs +++ b/roles/lib/files/FWO.Report/ReportBase.cs @@ -228,6 +228,7 @@ public static ReportBase ConstructReport(ReportTemplate reportFilter, UserConfig ReportType.ResolvedChangesTech => new ReportChanges(query, userConfig, repType), ReportType.NatRules => new ReportNatRules(query, userConfig, repType), ReportType.Recertification => new ReportRules(query, userConfig, repType), + ReportType.UnusedRules => new ReportRules(query, userConfig, repType), _ => throw new NotSupportedException("Report Type is not supported."), }; } diff --git a/roles/lib/files/FWO.Report/ReportRules.cs b/roles/lib/files/FWO.Report/ReportRules.cs index d8a307f30..2f929d003 100644 --- a/roles/lib/files/FWO.Report/ReportRules.cs +++ b/roles/lib/files/FWO.Report/ReportRules.cs @@ -15,47 +15,23 @@ public class ReportRules : ReportBase { public ReportRules(DynGraphqlQuery query, UserConfig userConfig, ReportType reportType) : base(query, userConfig, reportType) { } - public bool GotReportedRuleIds { get; protected set; } = false; private const int ColumnCount = 12; - public async Task GetReportedRuleIds(ApiConnection apiConnection) - { - List relevantDevIds = DeviceFilter.ExtractSelectedDevIds(Managements); - if (relevantDevIds.Count == 0) - relevantDevIds = DeviceFilter.ExtractAllDevIds(Managements); - - for (int i = 0; i < Managements.Length; i++) - { - Dictionary ruleQueryVariables = new Dictionary(); - if (Managements[i].Import.ImportAggregate.ImportAggregateMax.RelevantImportId != null) - { - ruleQueryVariables["importId"] = Managements[i].Import.ImportAggregate.ImportAggregateMax.RelevantImportId!; - ruleQueryVariables["devIds"] = relevantDevIds; - Rule[] rules = await apiConnection.SendQueryAsync(RuleQueries.getRuleIdsOfImport, ruleQueryVariables); - Managements[i].ReportedRuleIds = rules.Select(x => x.Id).Distinct().ToList(); - } - } - GotReportedRuleIds = true; - } public override async Task GetObjectsInReport(int objectsPerFetch, ApiConnection apiConnection, Func callback) // to be called when exporting { - // get rule ids per import (= management) - if (!GotReportedRuleIds) - await GetReportedRuleIds(apiConnection); - bool gotAllObjects = true; //whether the fetch count limit was reached during fetching if (!GotObjectsInReport) { - for (int i = 0; i < Managements.Length; i++) + foreach (Management management in Managements) { - if (Managements[i].Import.ImportAggregate.ImportAggregateMax.RelevantImportId is not null) + if (management.Import.ImportAggregate.ImportAggregateMax.RelevantImportId is not null) { // set query variables for object query var objQueryVariables = new Dictionary { - { "mgmIds", Managements[i].Id }, + { "mgmIds", management.Id }, { "limit", objectsPerFetch }, { "offset", 0 }, }; @@ -78,9 +54,6 @@ public override async Task GetObjectsForManagementInReport(Dictionary m.Id == mid) ?? throw new ArgumentException("Given management id does not exist for this report"); - if (!GotReportedRuleIds) - await GetReportedRuleIds(apiConnection); - objQueryVariables.Add("ruleIds", "{" + string.Join(", ", management.ReportedRuleIds) + "}"); objQueryVariables.Add("importId", management.Import.ImportAggregate.ImportAggregateMax.RelevantImportId!); @@ -170,6 +143,7 @@ public override async Task Generate(int rulesPerFetch, ApiConnection apiConnecti } await callback(Managements); } + SetReportedRuleIds(); } public override string SetDescription() @@ -190,6 +164,21 @@ public override string SetDescription() return $"{managementCounter} {userConfig.GetText("managements")}, {deviceCounter} {userConfig.GetText("gateways")}, {ruleCounter} {userConfig.GetText("rules")}"; } + private void SetReportedRuleIds() + { + foreach (Management mgt in Managements) + { + foreach (Device dev in mgt.Devices.Where(d => (d.Rules != null && d.Rules.Length > 0))) + { + foreach (Rule rule in dev.Rules) + { + mgt.ReportedRuleIds.Add(rule.Id); + } + } + mgt.ReportedRuleIds = mgt.ReportedRuleIds.Distinct().ToList(); + } + } + public override string ExportToCsv() { if (ReportType.IsResolvedReport()) @@ -367,6 +356,10 @@ private void appendRuleHeadlineHtml(ref StringBuilder report) report.AppendLine($"{userConfig.GetText("ip_matches")}"); report.AppendLine($"{userConfig.GetText("last_hit")}"); } + if(ReportType == ReportType.UnusedRules) + { + report.AppendLine($"{userConfig.GetText("last_hit")}"); + } report.AppendLine($"{userConfig.GetText("name")}"); report.AppendLine($"{userConfig.GetText("source_zone")}"); report.AppendLine($"{userConfig.GetText("source")}"); @@ -402,6 +395,10 @@ private void appendRulesForDeviceHtml(ref StringBuilder report, Device device, R report.AppendLine($"{ruleDisplayHtml.DisplayRecertIpMatches(rule)}"); report.AppendLine($"{ruleDisplayHtml.DisplayLastHit(rule)}"); } + if(ReportType == ReportType.UnusedRules) + { + report.AppendLine($"{ruleDisplayHtml.DisplayLastHit(rule)}"); + } report.AppendLine($"{ruleDisplayHtml.DisplayName(rule)}"); report.AppendLine($"{ruleDisplayHtml.DisplaySourceZone(rule)}"); report.AppendLine($"{ruleDisplayHtml.DisplaySource(rule, OutputLocation.export, ReportType)}"); diff --git a/roles/lib/tasks/install_dot_net.yml b/roles/lib/tasks/install_dot_net.yml index a5b945345..8f21d2dc5 100644 --- a/roles/lib/tasks/install_dot_net.yml +++ b/roles/lib/tasks/install_dot_net.yml @@ -7,7 +7,7 @@ when: ansible_facts['distribution']|lower == 'ubuntu' - set_fact: distribution_version="{{ debian_testing_version }}" - when: ansible_facts['distribution_release']|lower == 'bookworm' + when: ansible_facts['distribution_release']|lower == debian_testing_release_name # for all distros except ubuntu >=22.04 we need to include a package source - block: diff --git a/roles/lib/tasks/main.yml b/roles/lib/tasks/main.yml index 4dea540d0..5aa55cc62 100644 --- a/roles/lib/tasks/main.yml +++ b/roles/lib/tasks/main.yml @@ -58,6 +58,7 @@ - FWO.Report.Filter - FWO.DeviceAutoDiscovery - FWO.Mail + tags: [ 'test' ] - name: finalize handler for datarecovery set_fact: diff --git a/roles/middleware/files/FWO.Middleware.Server/Controllers/GroupController.cs b/roles/middleware/files/FWO.Middleware.Server/Controllers/GroupController.cs index 0485d2a3b..aba96d562 100644 --- a/roles/middleware/files/FWO.Middleware.Server/Controllers/GroupController.cs +++ b/roles/middleware/files/FWO.Middleware.Server/Controllers/GroupController.cs @@ -33,7 +33,6 @@ public GroupController(List ldaps) [Authorize(Roles = "admin, auditor, recertifier")] public async Task>> Get() { - bool admin = User.IsInRole("admin"); try { ConcurrentBag allGroups = new ConcurrentBag(); diff --git a/roles/middleware/files/FWO.Middleware.Server/DailyCheckScheduler.cs b/roles/middleware/files/FWO.Middleware.Server/DailyCheckScheduler.cs index 116f9bd76..c1a360f06 100644 --- a/roles/middleware/files/FWO.Middleware.Server/DailyCheckScheduler.cs +++ b/roles/middleware/files/FWO.Middleware.Server/DailyCheckScheduler.cs @@ -214,7 +214,7 @@ private async Task CheckDemoData() private async Task CheckImports() { - List importStati = await apiConnection.SendQueryAsync>(FWO.Api.Client.Queries.DeviceQueries.getImportStatus); + List importStati = await apiConnection.SendQueryAsync>(FWO.Api.Client.Queries.MonitorQueries.getImportStatus); int importIssues = 0; object jsonData; foreach(ImportStatus imp in importStati.Where(x => !x.ImportDisabled)) diff --git a/roles/middleware/files/FWO.Middleware.Server/JwtWriter.cs b/roles/middleware/files/FWO.Middleware.Server/JwtWriter.cs index e4c7c78a1..cd5d01103 100644 --- a/roles/middleware/files/FWO.Middleware.Server/JwtWriter.cs +++ b/roles/middleware/files/FWO.Middleware.Server/JwtWriter.cs @@ -42,9 +42,9 @@ public async Task CreateJWT(UiUser? user = null, TimeSpan? lifetime = nu ClaimsIdentity subject; if (user != null) - subject = GetClaims(await uiUserHandler.HandleUiUserAtLogin(user)); + subject = SetClaims(await uiUserHandler.HandleUiUserAtLogin(user)); else - subject = GetClaims(new UiUser() { Name = "", Password = "", Dn = "anonymous", Roles = new List { "anonymous" } }); + subject = SetClaims(new UiUser() { Name = "", Password = "", Dn = "anonymous", Roles = new List { "anonymous" } }); // adding uiuser.uiuser_id as x-hasura-user-id to JWT // Create JWToken @@ -111,7 +111,7 @@ private string CreateJWTInternal(string role) return GeneratedToken; } - private ClaimsIdentity GetClaims(UiUser user) + private ClaimsIdentity SetClaims(UiUser user) { ClaimsIdentity claimsIdentity = new ClaimsIdentity(); claimsIdentity.AddClaim(new Claim(ClaimTypes.Name, user.Name)); @@ -127,7 +127,7 @@ private ClaimsIdentity GetClaims(UiUser user) claimsIdentity.AddClaim(new Claim("x-hasura-visible-devices", $"{{ {string.Join(",", user.Tenant.VisibleDevices)} }}")); } - // we need to create an extra list beacause hasura only accepts an array of roles even if there is only one + // we need to create an extra list because hasura only accepts an array of roles even if there is only one List hasuraRolesList = new List(); foreach (string role in user.Roles) @@ -153,6 +153,8 @@ private ClaimsIdentity GetClaims(UiUser user) defaultRole = "reporter-viewall"; else if (hasuraRolesList.Contains("reporter")) defaultRole = "reporter"; + else if (hasuraRolesList.Contains("recertifier")) + defaultRole = "recertifier"; else defaultRole = user.Roles[0]; // pick first role at random (todo: might need to be changed) } diff --git a/roles/middleware/files/FWO.Middleware.Server/Ldap.cs b/roles/middleware/files/FWO.Middleware.Server/Ldap.cs index 7e7dcada8..9d4dec208 100644 --- a/roles/middleware/files/FWO.Middleware.Server/Ldap.cs +++ b/roles/middleware/files/FWO.Middleware.Server/Ldap.cs @@ -184,9 +184,13 @@ private string getGroupSearchFilter(string searchPattern) } } } + catch (LdapException ldapException) + { + Log.WriteInfo("Ldap entry exception", $"Ldap entry search at \"{Address}:{Port}\" lead to exception: {ldapException.Message}"); + } catch (Exception exception) { - Log.WriteError($"Non-LDAP exception {Address}:{Port}", "Unexpected error while trying to validate user", exception); + Log.WriteError($"Non-LDAP exception \"{Address}:{Port}\"", "Unexpected error while trying to validate user", exception); } Log.WriteDebug("Invalid Credentials", $"Invalid login credentials - could not authenticate user \"{ user.Name}\" on {Address}:{Port}."); diff --git a/roles/middleware/files/FWO.Middleware.Server/Program.cs b/roles/middleware/files/FWO.Middleware.Server/Program.cs index 5be811de3..9ac3c2dc8 100644 --- a/roles/middleware/files/FWO.Middleware.Server/Program.cs +++ b/roles/middleware/files/FWO.Middleware.Server/Program.cs @@ -46,8 +46,8 @@ } Action handleSubscriptionException = (Exception exception) => Log.WriteError("Subscription", "Subscription lead to exception.", exception); -ApiSubscription>.SubscriptionUpdate connectedLdapsSubscriptionUpdate = (List ldapsChanges) => { lock (changesLock) { connectedLdaps = ldapsChanges; } }; -ApiSubscription> connectedLdapsSubscription = apiConnection.GetSubscription>(handleSubscriptionException, connectedLdapsSubscriptionUpdate, AuthQueries.getLdapConnectionsSubscription); +GraphQlApiSubscription>.SubscriptionUpdate connectedLdapsSubscriptionUpdate = (List ldapsChanges) => { lock (changesLock) { connectedLdaps = ldapsChanges; } }; +GraphQlApiSubscription> connectedLdapsSubscription = apiConnection.GetSubscription>(handleSubscriptionException, connectedLdapsSubscriptionUpdate, AuthQueries.getLdapConnectionsSubscription); Log.WriteInfo("Found ldap connection to server", string.Join("\n", connectedLdaps.ConvertAll(ldap => $"{ldap.Address}:{ldap.Port}"))); // Create and start report scheduler diff --git a/roles/middleware/files/FWO.Middleware.Server/RecertCheck.cs b/roles/middleware/files/FWO.Middleware.Server/RecertCheck.cs index c9ff036c6..b20acfb7c 100644 --- a/roles/middleware/files/FWO.Middleware.Server/RecertCheck.cs +++ b/roles/middleware/files/FWO.Middleware.Server/RecertCheck.cs @@ -168,7 +168,7 @@ private async Task> generateRecertificationReport(ApiConnection apiCo RecertOverdueOnly = overdueOnly, RecertificationDisplayPeriod = globalConfig.RecertificationNoticePeriod }; - ReportTemplate reportParams = new ReportTemplate("", deviceFilter, (int) ReportType.Recertification, new TimeFilter(), recertFilter); + ReportTemplate reportParams = new ReportTemplate("", deviceFilter, (int) ReportType.Recertification, new TimeFilter(), recertFilter, null); ReportBase? currentReport = ReportBase.ConstructReport(reportParams, userConfig); Management[] managements = new Management[0]; diff --git a/roles/middleware/files/FWO.Middleware.Server/ReportScheduler.cs b/roles/middleware/files/FWO.Middleware.Server/ReportScheduler.cs index efb50820c..5e9e4edc5 100644 --- a/roles/middleware/files/FWO.Middleware.Server/ReportScheduler.cs +++ b/roles/middleware/files/FWO.Middleware.Server/ReportScheduler.cs @@ -23,7 +23,7 @@ public class ReportScheduler private readonly string apiServerUri; private readonly ApiConnection apiConnection; - private readonly ApiSubscription scheduledReportsSubscription; + private readonly GraphQlApiSubscription scheduledReportsSubscription; private readonly JwtWriter jwtWriter; private readonly object ldapLock = new object(); @@ -32,7 +32,7 @@ public class ReportScheduler /// /// Constructor needing connection, jwtWriter and subscription to connected ldaps /// - public ReportScheduler(ApiConnection apiConnection, JwtWriter jwtWriter, ApiSubscription> connectedLdapsSubscription) + public ReportScheduler(ApiConnection apiConnection, JwtWriter jwtWriter, GraphQlApiSubscription> connectedLdapsSubscription) { this.jwtWriter = jwtWriter; this.apiConnection = apiConnection; diff --git a/roles/middleware/tasks/main.yml b/roles/middleware/tasks/main.yml index 387cc5773..24e952a9d 100644 --- a/roles/middleware/tasks/main.yml +++ b/roles/middleware/tasks/main.yml @@ -39,6 +39,7 @@ dest: "{{ middleware_server_base_dir }}" owner: "{{ fworch_user }}" group: "{{ fworch_group }}" + tags: [ 'test' ] - name: install python3-openssl for openssl key generation and unprivileged user package: diff --git a/roles/openldap-server/tasks/main.yml b/roles/openldap-server/tasks/main.yml index 3ca9405fa..d8a7234fd 100644 --- a/roles/openldap-server/tasks/main.yml +++ b/roles/openldap-server/tasks/main.yml @@ -6,6 +6,7 @@ owner: "{{ fworch_user }}" group: "{{ fworch_group }}" become: true + tags: [ 'test', 'unittest' ] - block: ### OS basics @@ -54,6 +55,7 @@ owner: "{{ fworch_user }}" group: "{{ fworch_group }}" when: not is_manger_pw_present_flag.stat.exists + tags: [ 'test' ] - name: Generate the root password hash for the config command: "slappasswd -T {{ ldap_manager_pwd_file }}" diff --git a/roles/openldap-server/tasks/upgrade/6.4.9.yml b/roles/openldap-server/tasks/upgrade/6.4.9.yml new file mode 100644 index 000000000..6210c6b27 --- /dev/null +++ b/roles/openldap-server/tasks/upgrade/6.4.9.yml @@ -0,0 +1,11 @@ +- name: copy associated ldif files to system + template: + src: upgrade/6.4.9.ldif.j2 + dest: "{{ middleware_ldif_dir }}/6.4.9.ldif" + force: true + become: true + +- name: upgrade sample group roles + command: "ldapmodify -H {{ openldap_url }} -D {{ openldap_superuser_dn }} -y {{ ldap_manager_pwd_file }} -x -f {{ middleware_ldif_dir }}/6.4.9.ldif -c" + become: true + ignore_errors: true diff --git a/roles/openldap-server/templates/config.ldif.j2 b/roles/openldap-server/templates/config.ldif.j2 index 27e171f6c..2322c8ee1 100644 --- a/roles/openldap-server/templates/config.ldif.j2 +++ b/roles/openldap-server/templates/config.ldif.j2 @@ -16,7 +16,6 @@ olcModulePath: /usr/lib/ldap olcModuleLoad: {0}back_mdb.la olcModuleLoad: {1}memberof.la olcModuleLoad: {2}refint.la -olcModuleLoad: {3}ppolicy.la # internal schema dn: cn=schema,cn=config @@ -28,9 +27,6 @@ include: file:///etc/ldap/schema/core.ldif include: file:///etc/ldap/schema/cosine.ldif include: file:///etc/ldap/schema/inetorgperson.ldif include: file:///etc/ldap/schema/nis.ldif -{% if not ((ansible_facts['distribution_release']|lower == 'bookworm') or (ansible_distribution|lower == 'ubuntu' and ansible_distribution_version is version ('22', '>='))) %} -include: file:///etc/ldap/schema/ppolicy.ldif -{% endif %} # configure config database dn: olcDatabase=config,cn=config @@ -107,12 +103,3 @@ objectClass: top olcOverlay: refint olcRefintAttribute: memberOf olcRefintAttribute: uniqueMember - -# Password policy overlay -dn: olcOverlay=ppolicy,olcDatabase={1}mdb,cn=config -objectClass: olcConfig -objectClass: top -objectClass: olcOverlayConfig -objectClass: olcPPolicyConfig -olcOverlay: ppolicy -olcPPolicyHashCleartext: TRUE diff --git a/roles/openldap-server/templates/upgrade/6.4.9.ldif.j2 b/roles/openldap-server/templates/upgrade/6.4.9.ldif.j2 new file mode 100644 index 000000000..b93122332 --- /dev/null +++ b/roles/openldap-server/templates/upgrade/6.4.9.ldif.j2 @@ -0,0 +1,13 @@ +dn: cn=recertifier,ou=role,{{ openldap_path }} +changetype: modify +delete: uniquemember +uniquemember: uid=ownergroup_F{{ sample_postfix }},ou=group,{{ openldap_path }} +- +delete: uniquemember +uniquemember: uid=ownergroup_D{{ sample_postfix }},ou=group,{{ openldap_path }} +- +add: uniquemember +uniquemember: cn=ownergroup_F{{ sample_postfix }},ou=group,{{ openldap_path }} +- +add: uniquemember +uniquemember: cn=ownergroup_D{{ sample_postfix }},ou=group,{{ openldap_path }} diff --git a/roles/sample-auth-data/templates/tree_roles_for_sample_operators.ldif.j2 b/roles/sample-auth-data/templates/tree_roles_for_sample_operators.ldif.j2 index eacc056ea..a3eb758d4 100644 --- a/roles/sample-auth-data/templates/tree_roles_for_sample_operators.ldif.j2 +++ b/roles/sample-auth-data/templates/tree_roles_for_sample_operators.ldif.j2 @@ -14,10 +14,10 @@ uniquemember: uid=user1{{ sample_postfix }},ou=tenant1{{ sample_postfix }},ou=op dn: cn=recertifier,ou=role,{{ openldap_path }} changetype: modify add: uniquemember -uniquemember: uid=ownergroup_F{{ sample_postfix }},ou=group,{{ openldap_path }} +uniquemember: cn=ownergroup_F{{ sample_postfix }},ou=group,{{ openldap_path }} dn: cn=recertifier,ou=role,{{ openldap_path }} changetype: modify add: uniquemember -uniquemember: uid=ownergroup_D{{ sample_postfix }},ou=group,{{ openldap_path }} +uniquemember: cn=ownergroup_D{{ sample_postfix }},ou=group,{{ openldap_path }} diff --git a/roles/test/files/FWO.Test/ApiTest.cs b/roles/test/files/FWO.Test/ApiTest.cs index 882358966..00d21b5ad 100644 --- a/roles/test/files/FWO.Test/ApiTest.cs +++ b/roles/test/files/FWO.Test/ApiTest.cs @@ -12,6 +12,7 @@ namespace FWO.Test { [TestFixture] + [Parallelizable] public class ApiTest { ApiConnection apiConnection; diff --git a/roles/test/files/FWO.Test/ExportTest.cs b/roles/test/files/FWO.Test/ExportTest.cs index bf8ca7d91..34b5916ba 100644 --- a/roles/test/files/FWO.Test/ExportTest.cs +++ b/roles/test/files/FWO.Test/ExportTest.cs @@ -8,6 +8,7 @@ namespace FWO.Test { [TestFixture] + [Parallelizable] internal class ExportTest { static NetworkObject TestIp1 = new NetworkObject(){ Id = 1, Name = "TestIp1", IP = "1.2.3.4/32", IpEnd = "", Type = new NetworkObjectType(){ Name = "network" }}; @@ -156,6 +157,55 @@ public void ResolvedRulesTechGenerateHtml() Assert.AreEqual(expectedHtmlResult, removeLinebreaks((removeGenDate(reportRules.ExportToHtml(), true)))); } + [Test] + public void UnusedRulesGenerateHtml() + { + Log.WriteInfo("Test Log", "starting unused rules report html generation"); + ReportRules reportRules = new ReportRules(query, userConfig, ReportType.UnusedRules); + reportRules.Managements = ConstructRuleReport(false); + + string expectedHtmlResult = "Unused Rules Report" + + "" + + "" + + "

    Unused Rules Report

    " + + "

    Filter: TestFilter

    " + + "

    Time of configuration: 2023-04-20T15:50:04Z (UTC)

    " + + "

    Generated on: Z (UTC)

    " + + "

    Devices: TestMgt [TestDev]


    " + + "

    TestMgt


    " + + "

    TestDev


    " + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "
    No.Last HitNameSource ZoneSourceDestination ZoneDestinationServicesActionTrackEnabledUidComment
    12022-04-19TestRule1srczn TestIp1 (1.2.3.4/32)
     TestIp2 (127.0.0.1/32)
    dstzn TestIpRange (1.2.3.4/32-1.2.3.5/32) TestService1 (443/TCP)acceptnoneYuid1comment1
    2TestRule2not
     TestUser1@ TestIp1 (1.2.3.4/32)
     TestUser1@ TestIp2 (127.0.0.1/32)
    not
     TestUser2@ TestIpRange (1.2.3.4/32-1.2.3.5/32)
    not
     TestService2 (6666-7777/UDP)
    denynoneYuid2:123comment2
    " + + "

    Network Objects


    " + + "" + + "" + + "" + + "" + + "
    No.NameTypeIP AddressMembersUidComment
    1TestIp1network1.2.3.4/32
    2TestIp2network127.0.0.1/32
    3TestIpRangeip_range1.2.3.4/32-1.2.3.5/32
    " + + "

    Network Services


    " + + "" + + "" + + "" + + "
    No.NameTypeProtocolPortMembersUidComment
    1TestService1TestService1TCP443
    2TestService2TestService2UDP6666-7777
    " + + "

    Users


    " + + "" + + "" + + "" + + "
    No.NameTypeMembersUidComment
    1TestUser1TestUser1
    2TestUser2TestUser2
    "; + Assert.AreEqual(expectedHtmlResult, removeLinebreaks((removeGenDate(reportRules.ExportToHtml(), true)))); + } + [Test] public void RecertReportGenerateHtml() { @@ -178,7 +228,7 @@ public void RecertReportGenerateHtml() $"

    1. {DateOnly.FromDateTime(DateTime.Now.AddDays(5)).ToString("yyyy-MM-dd")}

    2. {DateOnly.FromDateTime(DateTime.Now.AddDays(-5)).ToString("yyyy-MM-dd")}

    " + "

    1. TestOwner1

    2. TestOwner2

    " + "

    1. TestIp1

    2. TestIp2

    " + - "" + + "2022-04-19" + "TestRule1" + "srczn" + " TestIp1 (1.2.3.4/32)
     TestIp2 (127.0.0.1/32)" + @@ -574,8 +624,8 @@ public void RulesGenerateJson() "\"rule_tos\": [{\"object\": {\"obj_id\": 3,\"obj_name\": \"TestIpRange\",\"obj_ip\": \"1.2.3.4/32\",\"obj_ip_end\": \"1.2.3.5/32\",\"obj_uid\": \"\",\"zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"active\": false,\"obj_create\": 0,\"obj_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"obj_last_seen\": 0,\"type\": {\"name\": \"ip_range\"},\"obj_comment\": \"\",\"obj_member_names\": \"\",\"obj_member_refs\": \"\",\"objgrps\": [],\"objgrp_flats\": []}," + "\"usr\": {\"user_id\": 0,\"user_uid\": \"\",\"user_name\": \"\",\"user_comment\": \"\",\"user_lastname\": \"\",\"user_firstname\": \"\",\"usr_typ_id\": 0,\"type\": {\"usr_typ_name\": \"\"},\"user_create\": 0,\"user_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"user_last_seen\": 0,\"user_member_names\": \"\",\"user_member_refs\": \"\",\"usergrps\": [],\"usergrp_flats\": []}}]," + "\"rule_action\": \"accept\",\"rule_track\": \"none\",\"section_header\": \"\"," + - "\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": null,\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + - "\"translate\": {\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_services\": [],\"rule_src_neg\": false,\"rule_src\": \"\",\"rule_froms\": [],\"rule_dst_neg\": false,\"rule_dst\": \"\",\"rule_tos\": []},\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"DisplayOrderNumber\": 1,\"Certified\": false,\"DeviceId\": 0,\"DeviceName\": \"\"}," + + "\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": \"2022-04-19T00:00:00\",\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"recert_history\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + + "\"translate\": {\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_services\": [],\"rule_src_neg\": false,\"rule_src\": \"\",\"rule_froms\": [],\"rule_dst_neg\": false,\"rule_dst\": \"\",\"rule_tos\": []},\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"dev_id\": 0,\"DisplayOrderNumber\": 1,\"Certified\": false,\"DeviceName\": \"\"}," + "{\"rule_id\": 0,\"rule_uid\": \"uid2:123\",\"mgm_id\": 0,\"rule_num_numeric\": 0,\"rule_name\": \"TestRule2\",\"rule_comment\": \"comment2\",\"rule_disabled\": false," + "\"rule_services\": [{\"service\": {\"svc_id\": 2,\"svc_name\": \"TestService2\",\"svc_uid\": \"\",\"svc_port\": 6666,\"svc_port_end\": 7777,\"svc_source_port\": null,\"svc_source_port_end\": null,\"svc_code\": \"\",\"svc_timeout\": null,\"svc_typ_id\": null,\"active\": false,\"svc_create\": 0,\"svc_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"svc_last_seen\": 0," + "\"service_type\": {\"name\": \"\"},\"svc_comment\": \"\",\"svc_color_id\": null,\"ip_proto_id\": null,\"protocol_name\": {\"id\": 0,\"name\": \"UDP\"},\"svc_member_names\": \"\",\"svc_member_refs\": \"\",\"svcgrps\": [],\"svcgrp_flats\": []}}]," + @@ -588,8 +638,8 @@ public void RulesGenerateJson() "\"rule_tos\": [{\"object\": {\"obj_id\": 3,\"obj_name\": \"TestIpRange\",\"obj_ip\": \"1.2.3.4/32\",\"obj_ip_end\": \"1.2.3.5/32\",\"obj_uid\": \"\",\"zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"active\": false,\"obj_create\": 0,\"obj_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"obj_last_seen\": 0,\"type\": {\"name\": \"ip_range\"},\"obj_comment\": \"\",\"obj_member_names\": \"\",\"obj_member_refs\": \"\",\"objgrps\": [],\"objgrp_flats\": []}," + "\"usr\": {\"user_id\": 2,\"user_uid\": \"\",\"user_name\": \"TestUser2\",\"user_comment\": \"\",\"user_lastname\": \"\",\"user_firstname\": \"\",\"usr_typ_id\": 0,\"type\": {\"usr_typ_name\": \"group\"},\"user_create\": 0,\"user_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"user_last_seen\": 0,\"user_member_names\": \"\",\"user_member_refs\": \"\",\"usergrps\": [],\"usergrp_flats\": []}}]," + "\"rule_action\": \"deny\",\"rule_track\": \"none\",\"section_header\": \"\"," + - "\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": null,\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + - "\"translate\": {\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_services\": [],\"rule_src_neg\": false,\"rule_src\": \"\",\"rule_froms\": [],\"rule_dst_neg\": false,\"rule_dst\": \"\",\"rule_tos\": []},\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"DisplayOrderNumber\": 2,\"Certified\": false,\"DeviceId\": 0,\"DeviceName\": \"\"}],\"changelog_rules\": null,\"rules_aggregate\": {\"aggregate\": {\"count\": 0}}," + + "\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": null,\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"recert_history\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + + "\"translate\": {\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_services\": [],\"rule_src_neg\": false,\"rule_src\": \"\",\"rule_froms\": [],\"rule_dst_neg\": false,\"rule_dst\": \"\",\"rule_tos\": []},\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"dev_id\": 0,\"DisplayOrderNumber\": 2,\"Certified\": false,\"DeviceName\": \"\"}],\"changelog_rules\": null,\"rules_aggregate\": {\"aggregate\": {\"count\": 0}}," + "\"Selected\": false,\"Relevant\": false,\"AwaitMgmt\": false,\"Delete\": false,\"ActionId\": 0}],\"networkObjects\": [],\"serviceObjects\": [],\"userObjects\": []," + "\"reportNetworkObjects\": [{\"obj_id\": 1,\"obj_name\": \"TestIp1\",\"obj_ip\": \"1.2.3.4/32\",\"obj_ip_end\": \"\",\"obj_uid\": \"\",\"zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"active\": false,\"obj_create\": 0,\"obj_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"obj_last_seen\": 0,\"type\": {\"name\": \"network\"},\"obj_comment\": \"\",\"obj_member_names\": \"\",\"obj_member_refs\": \"\",\"objgrps\": [],\"objgrp_flats\": []}," + "{\"obj_id\": 2,\"obj_name\": \"TestIp2\",\"obj_ip\": \"127.0.0.1/32\",\"obj_ip_end\": \"\",\"obj_uid\": \"\",\"zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"active\": false,\"obj_create\": 0,\"obj_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"obj_last_seen\": 0,\"type\": {\"name\": \"network\"},\"obj_comment\": \"\",\"obj_member_names\": \"\",\"obj_member_refs\": \"\",\"objgrps\": [],\"objgrp_flats\": []}," + @@ -677,9 +727,9 @@ public void ChangesGenerateJson() "\"changelog_rules\": [{\"import\": {\"time\": \"2023-04-05T12:00:00\"},\"change_action\": \"I\"," + "\"old\": {\"rule_id\": 0,\"rule_uid\": \"\",\"mgm_id\": 0,\"rule_num_numeric\": 0,\"rule_name\": \"\",\"rule_comment\": \"\",\"rule_disabled\": false," + "\"rule_services\": [],\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_src_neg\": false,\"rule_src\": \"\",\"src_zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"rule_froms\": [],\"rule_dst_neg\": false,\"rule_dst\": \"\",\"dst_zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"rule_tos\": [],\"rule_action\": \"\",\"rule_track\": \"\",\"section_header\": \"\"," + - "\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": null,\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + + "\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": null,\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"recert_history\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + "\"translate\": {\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_services\": [],\"rule_src_neg\": false,\"rule_src\": \"\",\"rule_froms\": [],\"rule_dst_neg\": false,\"rule_dst\": \"\",\"rule_tos\": []}," + - "\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"DisplayOrderNumber\": 0,\"Certified\": false,\"DeviceId\": 0,\"DeviceName\": \"\"},\"new\": {\"rule_id\": 0,\"rule_uid\": \"uid1\",\"mgm_id\": 0,\"rule_num_numeric\": 0,\"rule_name\": \"TestRule1\",\"rule_comment\": \"comment1\",\"rule_disabled\": false," + + "\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"dev_id\": 0,\"DisplayOrderNumber\": 0,\"Certified\": false,\"DeviceName\": \"\"},\"new\": {\"rule_id\": 0,\"rule_uid\": \"uid1\",\"mgm_id\": 0,\"rule_num_numeric\": 0,\"rule_name\": \"TestRule1\",\"rule_comment\": \"comment1\",\"rule_disabled\": false," + "\"rule_services\": [{\"service\": {\"svc_id\": 1,\"svc_name\": \"TestService1\",\"svc_uid\": \"\",\"svc_port\": 443,\"svc_port_end\": 443,\"svc_source_port\": null,\"svc_source_port_end\": null,\"svc_code\": \"\",\"svc_timeout\": null,\"svc_typ_id\": null,\"active\": false,\"svc_create\": 0,\"svc_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"svc_last_seen\": 0,\"service_type\": {\"name\": \"\"},\"svc_comment\": \"\",\"svc_color_id\": null,\"ip_proto_id\": null,\"protocol_name\": {\"id\": 0,\"name\": \"TCP\"},\"svc_member_names\": \"\",\"svc_member_refs\": \"\",\"svcgrps\": [],\"svcgrp_flats\": []}}]," + "\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_src_neg\": false,\"rule_src\": \"\",\"src_zone\": {\"zone_id\": 0,\"zone_name\": \"srczn\"}," + "\"rule_froms\": [{\"object\": {\"obj_id\": 1,\"obj_name\": \"TestIp1\",\"obj_ip\": \"1.2.3.4/32\",\"obj_ip_end\": \"\",\"obj_uid\": \"\",\"zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"active\": false,\"obj_create\": 0,\"obj_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"obj_last_seen\": 0,\"type\": {\"name\": \"network\"},\"obj_comment\": \"\",\"obj_member_names\": \"\",\"obj_member_refs\": \"\",\"objgrps\": [],\"objgrp_flats\": []}," + @@ -688,9 +738,9 @@ public void ChangesGenerateJson() "\"usr\": {\"user_id\": 0,\"user_uid\": \"\",\"user_name\": \"\",\"user_comment\": \"\",\"user_lastname\": \"\",\"user_firstname\": \"\",\"usr_typ_id\": 0,\"type\": {\"usr_typ_name\": \"\"},\"user_create\": 0,\"user_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"user_last_seen\": 0,\"user_member_names\": \"\",\"user_member_refs\": \"\",\"usergrps\": [],\"usergrp_flats\": []}}]," + "\"rule_dst_neg\": false,\"rule_dst\": \"\",\"dst_zone\": {\"zone_id\": 0,\"zone_name\": \"dstzn\"},\"rule_tos\": [{\"object\": {\"obj_id\": 3,\"obj_name\": \"TestIpRange\",\"obj_ip\": \"1.2.3.4/32\",\"obj_ip_end\": \"1.2.3.5/32\",\"obj_uid\": \"\",\"zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"active\": false,\"obj_create\": 0,\"obj_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"obj_last_seen\": 0,\"type\": {\"name\": \"ip_range\"},\"obj_comment\": \"\",\"obj_member_names\": \"\",\"obj_member_refs\": \"\",\"objgrps\": [],\"objgrp_flats\": []}," + "\"usr\": {\"user_id\": 0,\"user_uid\": \"\",\"user_name\": \"\",\"user_comment\": \"\",\"user_lastname\": \"\",\"user_firstname\": \"\",\"usr_typ_id\": 0,\"type\": {\"usr_typ_name\": \"\"},\"user_create\": 0,\"user_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"user_last_seen\": 0,\"user_member_names\": \"\",\"user_member_refs\": \"\",\"usergrps\": [],\"usergrp_flats\": []}}]," + - "\"rule_action\": \"accept\",\"rule_track\": \"none\",\"section_header\": \"\",\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": null,\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + + "\"rule_action\": \"accept\",\"rule_track\": \"none\",\"section_header\": \"\",\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": \"2022-04-19T00:00:00\",\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"recert_history\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + "\"translate\": {\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_services\": [],\"rule_src_neg\": false,\"rule_src\": \"\",\"rule_froms\": [],\"rule_dst_neg\": false,\"rule_dst\": \"\",\"rule_tos\": []}," + - "\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"DisplayOrderNumber\": 1,\"Certified\": false,\"DeviceId\": 0,\"DeviceName\": \"\"},\"DeviceName\": \"\"}," + + "\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"dev_id\": 0,\"DisplayOrderNumber\": 1,\"Certified\": false,\"DeviceName\": \"\"},\"DeviceName\": \"\"}," + "{\"import\": {\"time\": \"2023-04-05T12:00:00\"},\"change_action\": \"C\",\"old\": {\"rule_id\": 0,\"rule_uid\": \"uid1\",\"mgm_id\": 0,\"rule_num_numeric\": 0,\"rule_name\": \"TestRule1\",\"rule_comment\": \"comment1\",\"rule_disabled\": false," + "\"rule_services\": [{\"service\": {\"svc_id\": 1,\"svc_name\": \"TestService1\",\"svc_uid\": \"\",\"svc_port\": 443,\"svc_port_end\": 443,\"svc_source_port\": null,\"svc_source_port_end\": null,\"svc_code\": \"\",\"svc_timeout\": null,\"svc_typ_id\": null,\"active\": false,\"svc_create\": 0,\"svc_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"svc_last_seen\": 0,\"service_type\": {\"name\": \"\"},\"svc_comment\": \"\",\"svc_color_id\": null,\"ip_proto_id\": null,\"protocol_name\": {\"id\": 0,\"name\": \"TCP\"},\"svc_member_names\": \"\",\"svc_member_refs\": \"\",\"svcgrps\": [],\"svcgrp_flats\": []}}]," + "\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_src_neg\": false,\"rule_src\": \"\",\"src_zone\": {\"zone_id\": 0,\"zone_name\": \"srczn\"}," + @@ -699,9 +749,9 @@ public void ChangesGenerateJson() "\"usr\": {\"user_id\": 0,\"user_uid\": \"\",\"user_name\": \"\",\"user_comment\": \"\",\"user_lastname\": \"\",\"user_firstname\": \"\",\"usr_typ_id\": 0,\"type\": {\"usr_typ_name\": \"\"},\"user_create\": 0,\"user_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"user_last_seen\": 0,\"user_member_names\": \"\",\"user_member_refs\": \"\",\"usergrps\": [],\"usergrp_flats\": []}}]," + "\"rule_dst_neg\": false,\"rule_dst\": \"\",\"dst_zone\": {\"zone_id\": 0,\"zone_name\": \"dstzn\"},\"rule_tos\": [{\"object\": {\"obj_id\": 3,\"obj_name\": \"TestIpRange\",\"obj_ip\": \"1.2.3.4/32\",\"obj_ip_end\": \"1.2.3.5/32\",\"obj_uid\": \"\",\"zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"active\": false,\"obj_create\": 0,\"obj_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"obj_last_seen\": 0,\"type\": {\"name\": \"ip_range\"},\"obj_comment\": \"\",\"obj_member_names\": \"\",\"obj_member_refs\": \"\",\"objgrps\": [],\"objgrp_flats\": []}," + "\"usr\": {\"user_id\": 0,\"user_uid\": \"\",\"user_name\": \"\",\"user_comment\": \"\",\"user_lastname\": \"\",\"user_firstname\": \"\",\"usr_typ_id\": 0,\"type\": {\"usr_typ_name\": \"\"},\"user_create\": 0,\"user_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"user_last_seen\": 0,\"user_member_names\": \"\",\"user_member_refs\": \"\",\"usergrps\": [],\"usergrp_flats\": []}}]," + - "\"rule_action\": \"accept\",\"rule_track\": \"none\",\"section_header\": \"\",\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": null,\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + + "\"rule_action\": \"accept\",\"rule_track\": \"none\",\"section_header\": \"\",\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": \"2022-04-19T00:00:00\",\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"recert_history\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + "\"translate\": {\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_services\": [],\"rule_src_neg\": false,\"rule_src\": \"\",\"rule_froms\": [],\"rule_dst_neg\": false,\"rule_dst\": \"\",\"rule_tos\": []}," + - "\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"DisplayOrderNumber\": 1,\"Certified\": false,\"DeviceId\": 0,\"DeviceName\": \"\"},\"new\": {\"rule_id\": 0,\"rule_uid\": \"\",\"mgm_id\": 0,\"rule_num_numeric\": 0,\"rule_name\": \"TestRule1\",\"rule_comment\": \"new comment\",\"rule_disabled\": false," + + "\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"dev_id\": 0,\"DisplayOrderNumber\": 1,\"Certified\": false,\"DeviceName\": \"\"},\"new\": {\"rule_id\": 0,\"rule_uid\": \"\",\"mgm_id\": 0,\"rule_num_numeric\": 0,\"rule_name\": \"TestRule1\",\"rule_comment\": \"new comment\",\"rule_disabled\": false," + "\"rule_services\": [{\"service\": {\"svc_id\": 1,\"svc_name\": \"TestService1\",\"svc_uid\": \"\",\"svc_port\": 443,\"svc_port_end\": 443,\"svc_source_port\": null,\"svc_source_port_end\": null,\"svc_code\": \"\",\"svc_timeout\": null,\"svc_typ_id\": null,\"active\": false,\"svc_create\": 0,\"svc_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"svc_last_seen\": 0,\"service_type\": {\"name\": \"\"},\"svc_comment\": \"\",\"svc_color_id\": null,\"ip_proto_id\": null,\"protocol_name\": {\"id\": 0,\"name\": \"TCP\"},\"svc_member_names\": \"\",\"svc_member_refs\": \"\",\"svcgrps\": [],\"svcgrp_flats\": []}}]," + "\"rule_svc_neg\": true,\"rule_svc\": \"\",\"rule_src_neg\": false,\"rule_src\": \"\",\"src_zone\": {\"zone_id\": 0,\"zone_name\": \"srczn\"},\"rule_froms\": [{\"object\": {\"obj_id\": 5,\"obj_name\": \"TestIp1Changed\",\"obj_ip\": \"2.3.4.5/32\",\"obj_ip_end\": \"\",\"obj_uid\": \"\",\"zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"active\": false,\"obj_create\": 0,\"obj_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"obj_last_seen\": 0,\"type\": {\"name\": \"network\"},\"obj_comment\": \"\",\"obj_member_names\": \"\",\"obj_member_refs\": \"\",\"objgrps\": [],\"objgrp_flats\": []}," + "\"usr\": {\"user_id\": 0,\"user_uid\": \"\",\"user_name\": \"\",\"user_comment\": \"\",\"user_lastname\": \"\",\"user_firstname\": \"\",\"usr_typ_id\": 0,\"type\": {\"usr_typ_name\": \"\"},\"user_create\": 0,\"user_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"user_last_seen\": 0,\"user_member_names\": \"\",\"user_member_refs\": \"\",\"usergrps\": [],\"usergrp_flats\": []}},{\"object\": {\"obj_id\": 2,\"obj_name\": \"TestIp2\",\"obj_ip\": \"127.0.0.1/32\",\"obj_ip_end\": \"\",\"obj_uid\": \"\",\"zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"active\": false,\"obj_create\": 0,\"obj_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"obj_last_seen\": 0,\"type\": {\"name\": \"network\"},\"obj_comment\": \"\",\"obj_member_names\": \"\",\"obj_member_refs\": \"\",\"objgrps\": [],\"objgrp_flats\": []}," + @@ -709,9 +759,9 @@ public void ChangesGenerateJson() "\"rule_dst_neg\": false,\"rule_dst\": \"\",\"dst_zone\": {\"zone_id\": 0,\"zone_name\": \"dstzn\"},\"rule_tos\": [{\"object\": {\"obj_id\": 3,\"obj_name\": \"TestIpRange\",\"obj_ip\": \"1.2.3.4/32\",\"obj_ip_end\": \"1.2.3.5/32\",\"obj_uid\": \"\",\"zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"active\": false,\"obj_create\": 0,\"obj_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"obj_last_seen\": 0,\"type\": {\"name\": \"ip_range\"},\"obj_comment\": \"\",\"obj_member_names\": \"\",\"obj_member_refs\": \"\",\"objgrps\": [],\"objgrp_flats\": []}," + "\"usr\": {\"user_id\": 0,\"user_uid\": \"\",\"user_name\": \"\",\"user_comment\": \"\",\"user_lastname\": \"\",\"user_firstname\": \"\",\"usr_typ_id\": 0,\"type\": {\"usr_typ_name\": \"\"},\"user_create\": 0,\"user_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"user_last_seen\": 0,\"user_member_names\": \"\",\"user_member_refs\": \"\",\"usergrps\": [],\"usergrp_flats\": []}},{\"object\": {\"obj_id\": 4,\"obj_name\": \"TestIpNew\",\"obj_ip\": \"10.0.6.1/32\",\"obj_ip_end\": \"\",\"obj_uid\": \"\",\"zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"active\": false,\"obj_create\": 0,\"obj_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"obj_last_seen\": 0,\"type\": {\"name\": \"network\"},\"obj_comment\": \"\",\"obj_member_names\": \"\",\"obj_member_refs\": \"\",\"objgrps\": [],\"objgrp_flats\": []}," + "\"usr\": {\"user_id\": 0,\"user_uid\": \"\",\"user_name\": \"\",\"user_comment\": \"\",\"user_lastname\": \"\",\"user_firstname\": \"\",\"usr_typ_id\": 0,\"type\": {\"usr_typ_name\": \"\"},\"user_create\": 0,\"user_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"user_last_seen\": 0,\"user_member_names\": \"\",\"user_member_refs\": \"\",\"usergrps\": [],\"usergrp_flats\": []}}]," + - "\"rule_action\": \"accept\",\"rule_track\": \"none\",\"section_header\": \"\",\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": null,\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + + "\"rule_action\": \"accept\",\"rule_track\": \"none\",\"section_header\": \"\",\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": \"2022-04-19T00:00:00\",\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"recert_history\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + "\"translate\": {\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_services\": [],\"rule_src_neg\": false,\"rule_src\": \"\",\"rule_froms\": [],\"rule_dst_neg\": false,\"rule_dst\": \"\",\"rule_tos\": []}," + - "\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"DisplayOrderNumber\": 1,\"Certified\": false,\"DeviceId\": 0,\"DeviceName\": \"\"},\"DeviceName\": \"\"}," + + "\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"dev_id\": 0,\"DisplayOrderNumber\": 1,\"Certified\": false,\"DeviceName\": \"\"},\"DeviceName\": \"\"}," + "{\"import\": {\"time\": \"2023-04-05T12:00:00\"},\"change_action\": \"C\",\"old\": {\"rule_id\": 0,\"rule_uid\": \"uid2:123\",\"mgm_id\": 0,\"rule_num_numeric\": 0,\"rule_name\": \"TestRule2\",\"rule_comment\": \"comment2\",\"rule_disabled\": false," + "\"rule_services\": [{\"service\": {\"svc_id\": 2,\"svc_name\": \"TestService2\",\"svc_uid\": \"\",\"svc_port\": 6666,\"svc_port_end\": 7777,\"svc_source_port\": null,\"svc_source_port_end\": null,\"svc_code\": \"\",\"svc_timeout\": null,\"svc_typ_id\": null,\"active\": false,\"svc_create\": 0,\"svc_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"svc_last_seen\": 0,\"service_type\": {\"name\": \"\"},\"svc_comment\": \"\",\"svc_color_id\": null,\"ip_proto_id\": null,\"protocol_name\": {\"id\": 0,\"name\": \"UDP\"},\"svc_member_names\": \"\",\"svc_member_refs\": \"\",\"svcgrps\": [],\"svcgrp_flats\": []}}]," + "\"rule_svc_neg\": true,\"rule_svc\": \"\",\"rule_src_neg\": true,\"rule_src\": \"\",\"src_zone\": {\"zone_id\": 0,\"zone_name\": \"\"}," + @@ -720,9 +770,9 @@ public void ChangesGenerateJson() "\"usr\": {\"user_id\": 1,\"user_uid\": \"\",\"user_name\": \"TestUser1\",\"user_comment\": \"\",\"user_lastname\": \"\",\"user_firstname\": \"\",\"usr_typ_id\": 0,\"type\": {\"usr_typ_name\": \"\"},\"user_create\": 0,\"user_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"user_last_seen\": 0,\"user_member_names\": \"\",\"user_member_refs\": \"\",\"usergrps\": [],\"usergrp_flats\": []}}]," + "\"rule_dst_neg\": true,\"rule_dst\": \"\",\"dst_zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"rule_tos\": [{\"object\": {\"obj_id\": 3,\"obj_name\": \"TestIpRange\",\"obj_ip\": \"1.2.3.4/32\",\"obj_ip_end\": \"1.2.3.5/32\",\"obj_uid\": \"\",\"zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"active\": false,\"obj_create\": 0,\"obj_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"obj_last_seen\": 0,\"type\": {\"name\": \"ip_range\"},\"obj_comment\": \"\",\"obj_member_names\": \"\",\"obj_member_refs\": \"\",\"objgrps\": [],\"objgrp_flats\": []}," + "\"usr\": {\"user_id\": 2,\"user_uid\": \"\",\"user_name\": \"TestUser2\",\"user_comment\": \"\",\"user_lastname\": \"\",\"user_firstname\": \"\",\"usr_typ_id\": 0,\"type\": {\"usr_typ_name\": \"group\"},\"user_create\": 0,\"user_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"user_last_seen\": 0,\"user_member_names\": \"\",\"user_member_refs\": \"\",\"usergrps\": [],\"usergrp_flats\": []}}]," + - "\"rule_action\": \"deny\",\"rule_track\": \"none\",\"section_header\": \"\",\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": null,\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + + "\"rule_action\": \"deny\",\"rule_track\": \"none\",\"section_header\": \"\",\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": null,\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"recert_history\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + "\"translate\": {\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_services\": [],\"rule_src_neg\": false,\"rule_src\": \"\",\"rule_froms\": [],\"rule_dst_neg\": false,\"rule_dst\": \"\",\"rule_tos\": []}," + - "\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"DisplayOrderNumber\": 2,\"Certified\": false,\"DeviceId\": 0,\"DeviceName\": \"\"},\"new\": {\"rule_id\": 0,\"rule_uid\": \"uid2:123\",\"mgm_id\": 0,\"rule_num_numeric\": 0,\"rule_name\": \"TestRule2\",\"rule_comment\": \"comment2\",\"rule_disabled\": true," + + "\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"dev_id\": 0,\"DisplayOrderNumber\": 2,\"Certified\": false,\"DeviceName\": \"\"},\"new\": {\"rule_id\": 0,\"rule_uid\": \"uid2:123\",\"mgm_id\": 0,\"rule_num_numeric\": 0,\"rule_name\": \"TestRule2\",\"rule_comment\": \"comment2\",\"rule_disabled\": true," + "\"rule_services\": [{\"service\": {\"svc_id\": 2,\"svc_name\": \"TestService2\",\"svc_uid\": \"\",\"svc_port\": 6666,\"svc_port_end\": 7777,\"svc_source_port\": null,\"svc_source_port_end\": null,\"svc_code\": \"\",\"svc_timeout\": null,\"svc_typ_id\": null,\"active\": false,\"svc_create\": 0,\"svc_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"svc_last_seen\": 0,\"service_type\": {\"name\": \"\"},\"svc_comment\": \"\",\"svc_color_id\": null,\"ip_proto_id\": null,\"protocol_name\": {\"id\": 0,\"name\": \"UDP\"},\"svc_member_names\": \"\",\"svc_member_refs\": \"\",\"svcgrps\": [],\"svcgrp_flats\": []}}]," + "\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_src_neg\": true,\"rule_src\": \"\",\"src_zone\": {\"zone_id\": 0,\"zone_name\": \"\"}," + "\"rule_froms\": [{\"object\": {\"obj_id\": 1,\"obj_name\": \"TestIp1\",\"obj_ip\": \"1.2.3.4/32\",\"obj_ip_end\": \"\",\"obj_uid\": \"\",\"zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"active\": false,\"obj_create\": 0,\"obj_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"obj_last_seen\": 0,\"type\": {\"name\": \"network\"},\"obj_comment\": \"\",\"obj_member_names\": \"\",\"obj_member_refs\": \"\",\"objgrps\": [],\"objgrp_flats\": []}," + @@ -730,9 +780,9 @@ public void ChangesGenerateJson() "\"usr\": {\"user_id\": 1,\"user_uid\": \"\",\"user_name\": \"TestUser1\",\"user_comment\": \"\",\"user_lastname\": \"\",\"user_firstname\": \"\",\"usr_typ_id\": 0,\"type\": {\"usr_typ_name\": \"\"},\"user_create\": 0,\"user_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"user_last_seen\": 0,\"user_member_names\": \"\",\"user_member_refs\": \"\",\"usergrps\": [],\"usergrp_flats\": []}}]," + "\"rule_dst_neg\": false,\"rule_dst\": \"\",\"dst_zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"rule_tos\": [{\"object\": {\"obj_id\": 3,\"obj_name\": \"TestIpRange\",\"obj_ip\": \"1.2.3.4/32\",\"obj_ip_end\": \"1.2.3.5/32\",\"obj_uid\": \"\",\"zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"active\": false,\"obj_create\": 0,\"obj_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"obj_last_seen\": 0,\"type\": {\"name\": \"ip_range\"},\"obj_comment\": \"\",\"obj_member_names\": \"\",\"obj_member_refs\": \"\",\"objgrps\": [],\"objgrp_flats\": []}," + "\"usr\": {\"user_id\": 2,\"user_uid\": \"\",\"user_name\": \"TestUser2\",\"user_comment\": \"\",\"user_lastname\": \"\",\"user_firstname\": \"\",\"usr_typ_id\": 0,\"type\": {\"usr_typ_name\": \"group\"},\"user_create\": 0,\"user_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"user_last_seen\": 0,\"user_member_names\": \"\",\"user_member_refs\": \"\",\"usergrps\": [],\"usergrp_flats\": []}}]," + - "\"rule_action\": \"deny\",\"rule_track\": \"none\",\"section_header\": \"\",\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": null,\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + + "\"rule_action\": \"deny\",\"rule_track\": \"none\",\"section_header\": \"\",\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": null,\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"recert_history\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + "\"translate\": {\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_services\": [],\"rule_src_neg\": false,\"rule_src\": \"\",\"rule_froms\": [],\"rule_dst_neg\": false,\"rule_dst\": \"\",\"rule_tos\": []}," + - "\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"DisplayOrderNumber\": 2,\"Certified\": false,\"DeviceId\": 0,\"DeviceName\": \"\"},\"DeviceName\": \"\"}," + + "\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"dev_id\": 0,\"DisplayOrderNumber\": 2,\"Certified\": false,\"DeviceName\": \"\"},\"DeviceName\": \"\"}," + "{\"import\": {\"time\": \"2023-04-05T12:00:00\"},\"change_action\": \"D\",\"old\": {\"rule_id\": 0,\"rule_uid\": \"uid2:123\",\"mgm_id\": 0,\"rule_num_numeric\": 0,\"rule_name\": \"TestRule2\",\"rule_comment\": \"comment2\",\"rule_disabled\": false," + "\"rule_services\": [{\"service\": {\"svc_id\": 2,\"svc_name\": \"TestService2\",\"svc_uid\": \"\",\"svc_port\": 6666,\"svc_port_end\": 7777,\"svc_source_port\": null,\"svc_source_port_end\": null,\"svc_code\": \"\",\"svc_timeout\": null,\"svc_typ_id\": null,\"active\": false,\"svc_create\": 0,\"svc_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"svc_last_seen\": 0,\"service_type\": {\"name\": \"\"},\"svc_comment\": \"\",\"svc_color_id\": null,\"ip_proto_id\": null,\"protocol_name\": {\"id\": 0,\"name\": \"UDP\"},\"svc_member_names\": \"\",\"svc_member_refs\": \"\",\"svcgrps\": [],\"svcgrp_flats\": []}}]," + "\"rule_svc_neg\": true,\"rule_svc\": \"\",\"rule_src_neg\": true,\"rule_src\": \"\",\"src_zone\": {\"zone_id\": 0,\"zone_name\": \"\"}," + @@ -741,13 +791,13 @@ public void ChangesGenerateJson() "\"usr\": {\"user_id\": 1,\"user_uid\": \"\",\"user_name\": \"TestUser1\",\"user_comment\": \"\",\"user_lastname\": \"\",\"user_firstname\": \"\",\"usr_typ_id\": 0,\"type\": {\"usr_typ_name\": \"\"},\"user_create\": 0,\"user_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"user_last_seen\": 0,\"user_member_names\": \"\",\"user_member_refs\": \"\",\"usergrps\": [],\"usergrp_flats\": []}}]," + "\"rule_dst_neg\": true,\"rule_dst\": \"\",\"dst_zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"rule_tos\": [{\"object\": {\"obj_id\": 3,\"obj_name\": \"TestIpRange\",\"obj_ip\": \"1.2.3.4/32\",\"obj_ip_end\": \"1.2.3.5/32\",\"obj_uid\": \"\",\"zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"active\": false,\"obj_create\": 0,\"obj_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"obj_last_seen\": 0,\"type\": {\"name\": \"ip_range\"},\"obj_comment\": \"\",\"obj_member_names\": \"\",\"obj_member_refs\": \"\",\"objgrps\": [],\"objgrp_flats\": []}," + "\"usr\": {\"user_id\": 2,\"user_uid\": \"\",\"user_name\": \"TestUser2\",\"user_comment\": \"\",\"user_lastname\": \"\",\"user_firstname\": \"\",\"usr_typ_id\": 0,\"type\": {\"usr_typ_name\": \"group\"},\"user_create\": 0,\"user_create_time\": {\"time\": \"0001-01-01T00:00:00\"},\"user_last_seen\": 0,\"user_member_names\": \"\",\"user_member_refs\": \"\",\"usergrps\": [],\"usergrp_flats\": []}}]," + - "\"rule_action\": \"deny\",\"rule_track\": \"none\",\"section_header\": \"\",\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": null,\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + + "\"rule_action\": \"deny\",\"rule_track\": \"none\",\"section_header\": \"\",\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": null,\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"recert_history\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + "\"translate\": {\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_services\": [],\"rule_src_neg\": false,\"rule_src\": \"\",\"rule_froms\": [],\"rule_dst_neg\": false,\"rule_dst\": \"\",\"rule_tos\": []}," + - "\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"DisplayOrderNumber\": 2,\"Certified\": false,\"DeviceId\": 0,\"DeviceName\": \"\"},\"new\": {\"rule_id\": 0,\"rule_uid\": \"\",\"mgm_id\": 0,\"rule_num_numeric\": 0,\"rule_name\": \"\",\"rule_comment\": \"\",\"rule_disabled\": false," + + "\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"dev_id\": 0,\"DisplayOrderNumber\": 2,\"Certified\": false,\"DeviceName\": \"\"},\"new\": {\"rule_id\": 0,\"rule_uid\": \"\",\"mgm_id\": 0,\"rule_num_numeric\": 0,\"rule_name\": \"\",\"rule_comment\": \"\",\"rule_disabled\": false," + "\"rule_services\": [],\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_src_neg\": false,\"rule_src\": \"\",\"src_zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"rule_froms\": [],\"rule_dst_neg\": false,\"rule_dst\": \"\",\"dst_zone\": {\"zone_id\": 0,\"zone_name\": \"\"},\"rule_tos\": [],\"rule_action\": \"\",\"rule_track\": \"\",\"section_header\": \"\"," + - "\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": null,\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + + "\"rule_metadatum\": {\"rule_metadata_id\": 0,\"rule_created\": null,\"rule_last_modified\": null,\"rule_first_hit\": null,\"rule_last_hit\": null,\"rule_last_certified\": null,\"rule_last_certifier_dn\": \"\",\"rule_to_be_removed\": false,\"rule_decert_date\": null,\"rule_recertification_comment\": \"\",\"recertification\": [],\"recert_history\": [],\"NextRecert\": \"0001-01-01T00:00:00\",\"LastCertifierName\": \"\",\"Recert\": false,\"Style\": \"\"}," + "\"translate\": {\"rule_svc_neg\": false,\"rule_svc\": \"\",\"rule_services\": [],\"rule_src_neg\": false,\"rule_src\": \"\",\"rule_froms\": [],\"rule_dst_neg\": false,\"rule_dst\": \"\",\"rule_tos\": []}," + - "\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"DisplayOrderNumber\": 0,\"Certified\": false,\"DeviceId\": 0,\"DeviceName\": \"\"},\"DeviceName\": \"\"}],\"rules_aggregate\": {\"aggregate\": {\"count\": 0}},\"Selected\": false,\"Relevant\": false,\"AwaitMgmt\": false,\"Delete\": false,\"ActionId\": 0}]," + + "\"owner_name\": \"\",\"owner_id\": null,\"matches\": \"\",\"dev_id\": 0,\"DisplayOrderNumber\": 0,\"Certified\": false,\"DeviceName\": \"\"},\"DeviceName\": \"\"}],\"rules_aggregate\": {\"aggregate\": {\"count\": 0}},\"Selected\": false,\"Relevant\": false,\"AwaitMgmt\": false,\"Delete\": false,\"ActionId\": 0}]," + "\"networkObjects\": [],\"serviceObjects\": [],\"userObjects\": [],\"reportNetworkObjects\": [],\"reportServiceObjects\": [],\"reportUserObjects\": [],\"deviceType\": {\"id\": 0,\"name\": \"\",\"version\": \"\",\"manufacturer\": \"\",\"isPureRoutingDevice\": false,\"isManagement\": false}," + "\"import\": {\"aggregate\": {\"max\": {\"id\": null}}},\"RelevantImportId\": null,\"Ignore\": false,\"AwaitDevice\": false,\"Delete\": false,\"ActionId\": 0,\"ReportedRuleIds\": [],\"ReportedNetworkServiceIds\": [],\"objects_aggregate\": {\"aggregate\": {\"count\": 0}},\"services_aggregate\": {\"aggregate\": {\"count\": 0}},\"usrs_aggregate\": {\"aggregate\": {\"count\": 0}},\"rules_aggregate\": {\"aggregate\": {\"count\": 0}}}]"; Assert.AreEqual(expectedJsonResult, removeLinebreaks((removeGenDate(reportChanges.ExportToJson(), false, true)))); @@ -887,7 +937,8 @@ private Rule InitRule1(bool resolved) DestinationNegated = false, Tos = InitTos(resolved), ServiceNegated = false, - Services = InitServices(TestService1, resolved) + Services = InitServices(TestService1, resolved), + Metadata = new RuleMetadata(){ LastHit = new DateTime(2022,04,19) } }; } diff --git a/roles/test/files/FWO.Test/LockTest.cs b/roles/test/files/FWO.Test/LockTest.cs index 5d83dda0e..789b97be8 100644 --- a/roles/test/files/FWO.Test/LockTest.cs +++ b/roles/test/files/FWO.Test/LockTest.cs @@ -1,5 +1,6 @@ using FWO.Logging; using NUnit.Framework; +using System; using System.Reflection; namespace FWO.Test @@ -9,80 +10,130 @@ namespace FWO.Test public class LockTest { private string lockFilePath = $"/var/fworch/lock/{Assembly.GetEntryAssembly()?.GetName().Name}_log.lock"; + private static Random random = new Random(); [SetUp] - public void SetUp() + public async Task SetUp() { - if (File.Exists(lockFilePath)) - { - File.Delete(lockFilePath); - } + await ExecuteFileAction(() => + { + if (File.Exists(lockFilePath)) + { + File.Delete(lockFilePath); + } + return Task.CompletedTask; + }); // Implicitly call static constructor so backround lock process is started Log.WriteInfo("Startup", "Starting Lock Tests..."); } [TearDown] - public void TearDown() + public async Task TearDown() { - if (File.Exists(lockFilePath)) - { - File.Delete(lockFilePath); - } + await ExecuteFileAction(() => + { + if (File.Exists(lockFilePath)) + { + File.Delete(lockFilePath); + } + return Task.CompletedTask; + }); } [Test] - [Parallelizable] - public async Task LogLockUi() + public async Task LogLock() { // Request lock - using (var writer = new StreamWriter(lockFilePath)) - { - await writer.WriteLineAsync("REQUESTED"); - } - - await Task.Delay(1200); - - // Assure lock is granted after request - using (var reader = new StreamReader(lockFilePath)) + await ExecuteFileAction(async () => { - Assert.That((await reader.ReadToEndAsync()).Trim().EndsWith("GRANTED")); - } - - // Assure write is NOT possible after lock was granted - Task logWriter = Task.Run(() => - { - Log.WriteDebug("TEST_TILE", "TEST_TEXT"); + using (var writer = new StreamWriter(lockFilePath)) + { + await writer.WriteLineAsync("REQUESTED"); + } }); - await Task.Delay(500); - - Assert.That(logWriter.IsCompleted, Is.False); - - // Release lock - using (var writer = new StreamWriter(lockFilePath)) - { - await writer.WriteLineAsync("RELEASED"); - } - - await Task.Delay(1200); - - // Assure write IS possible after lock was released - Assert.That(logWriter.IsCompleted, Is.True); - - // Request lock - using (var writer = new StreamWriter(lockFilePath)) - { - await writer.WriteLineAsync("REQUESTED"); - } - - await Task.Delay(11_200); - - // If not release in time make sure that the lock will be forcefully released - using (var reader = new StreamReader(lockFilePath)) - { - Assert.That((await reader.ReadToEndAsync()).Trim().EndsWith("FORCEFULLY RELEASED")); - } - } - } + await Task.Delay(2000); + + // Assure lock is granted after request + await ExecuteFileAction(async () => + { + using (var reader = new StreamReader(lockFilePath)) + { + Assert.That((await reader.ReadToEndAsync()).Trim().EndsWith("GRANTED")); + } + }); + + // Assure write is NOT possible after lock was granted + Task logWriter = Task.Run(() => + { + Log.WriteDebug("TEST_TITLE", "TEST_TEXT"); + }); + + await Task.Delay(500); + + Assert.That(logWriter.IsCompleted, Is.False); + + // Release lock + await ExecuteFileAction(async () => + { + using (var writer = new StreamWriter(lockFilePath)) + { + await writer.WriteLineAsync("RELEASED"); + } + }); + + await Task.Delay(2000); + + // Assure write IS possible after lock was released + Assert.That(logWriter.IsCompletedSuccessfully, Is.True); + + // Request lock + await ExecuteFileAction(async () => + { + using (var writer = new StreamWriter(lockFilePath)) + { + await writer.WriteLineAsync("REQUESTED"); + } + }); + + await Task.Delay(12_000); + + // If not release in time make sure that the lock will be forcefully released + await ExecuteFileAction(async () => + { + using (var reader = new StreamReader(lockFilePath)) + { + Assert.That((await reader.ReadToEndAsync()).Trim().EndsWith("FORCEFULLY RELEASED")); + } + }); + } + + private static async Task ExecuteFileAction(Func action) + { + bool success = false; + int maxRetryAttempts = 50; + int retryCount = 0; + + // Handle IO Exception like file blocking from another process by retrying with a random delay + while (!success && retryCount < maxRetryAttempts) + { + try + { + await action(); + success = true; + } + catch (IOException) + { + retryCount++; + } + await Task.Delay(random.Next(50, 100)); + } + + if (!success) + { + Assert.Fail($"Lock file access failed after {maxRetryAttempts} retries."); + } + } + } } diff --git a/roles/test/files/FWO.Test/SimulatedApiConnection.cs b/roles/test/files/FWO.Test/SimulatedApiConnection.cs index 502be00fa..8bc699ff8 100644 --- a/roles/test/files/FWO.Test/SimulatedApiConnection.cs +++ b/roles/test/files/FWO.Test/SimulatedApiConnection.cs @@ -9,7 +9,7 @@ namespace FWO.Test { internal class SimulatedApiConnection : ApiConnection { - public override ApiSubscription GetSubscription(Action exceptionHandler, ApiSubscription.SubscriptionUpdate subscriptionUpdateHandler, string subscription, object? variables = null, string? operationName = null) + public override GraphQlApiSubscription GetSubscription(Action exceptionHandler, GraphQlApiSubscription.SubscriptionUpdate subscriptionUpdateHandler, string subscription, object? variables = null, string? operationName = null) { throw new NotImplementedException(); } @@ -28,5 +28,20 @@ public override void SetRole(string role) { throw new NotImplementedException(); } + + public override void SetProperRole(System.Security.Claims.ClaimsPrincipal user, List targetRoleList) + { + throw new NotImplementedException(); + } + + public override void SwitchBack() + { + throw new NotImplementedException(); + } + + protected override void Dispose(bool disposing) + { + throw new NotImplementedException(); + } } } diff --git a/roles/test/files/FWO.Test/SimulatedUserConfig.cs b/roles/test/files/FWO.Test/SimulatedUserConfig.cs index 7d748663d..16438aa6d 100644 --- a/roles/test/files/FWO.Test/SimulatedUserConfig.cs +++ b/roles/test/files/FWO.Test/SimulatedUserConfig.cs @@ -9,6 +9,7 @@ internal class SimulatedUserConfig : UserConfig {"Rules","Rules Report"}, {"ResolvedRules","Rules Report (resolved)"}, {"ResolvedRulesTech","Rules Report (technical)"}, + {"UnusedRules","Unused Rules Report"}, {"Recertification","Recertification Report"}, {"NatRules","NAT Rules Report"}, {"Changes","Changes Report"}, diff --git a/roles/test/tasks/main.yml b/roles/test/tasks/main.yml index 61c01d7e6..b59298d5c 100644 --- a/roles/test/tasks/main.yml +++ b/roles/test/tasks/main.yml @@ -62,6 +62,7 @@ - name: auth testing import_tasks: test-auth.yml + when: "not run_on_github|bool" - name: api testing import_tasks: test-api.yml diff --git a/roles/test/tasks/test-csharp.yml b/roles/test/tasks/test-csharp.yml index e888b8a34..5a7e36987 100644 --- a/roles/test/tasks/test-csharp.yml +++ b/roles/test/tasks/test-csharp.yml @@ -17,6 +17,7 @@ ignore_errors: false environment: "{{ proxy_env }}" -- name: show test results +- name: show csharp test results in case of errors debug: var: csharp_tests + when: csharp_tests.rc != 0 diff --git a/roles/ui/files/FWO.UI/Auth/AuthStateProvider.cs b/roles/ui/files/FWO.UI/Auth/AuthStateProvider.cs index c3f41fe40..95edf0aeb 100644 --- a/roles/ui/files/FWO.UI/Auth/AuthStateProvider.cs +++ b/roles/ui/files/FWO.UI/Auth/AuthStateProvider.cs @@ -12,108 +12,97 @@ using FWO.Logging; using Microsoft.AspNetCore.Components.Server.ProtectedBrowserStorage; using System.Security.Authentication; +using System.Security.Principal; namespace FWO.Ui.Auth { public class AuthStateProvider : AuthenticationStateProvider { - private ClaimsPrincipal? authenticatedUser; + private ClaimsPrincipal user = new ClaimsPrincipal(new ClaimsIdentity()); public override Task GetAuthenticationStateAsync() { - var identity = new ClaimsIdentity(); - var user = new ClaimsPrincipal(identity); return Task.FromResult(new AuthenticationState(user)); } - public async Task AuthenticateUser(string jwtString, UserConfig userConfig, ApiConnection apiConnection, CircuitHandlerService circuitHandler) + public async Task> Authenticate(string username, string password, ApiConnection apiConnection, MiddlewareClient middlewareClient, + UserConfig userConfig, ProtectedSessionStorage sessionStorage, CircuitHandlerService circuitHandler) + { + // There is no jwt in session storage. Get one from auth module. + AuthenticationTokenGetParameters authenticationParameters = new AuthenticationTokenGetParameters { Username = username, Password = password }; + RestResponse apiAuthResponse = await middlewareClient.AuthenticateUser(authenticationParameters); + + if (apiAuthResponse.StatusCode == HttpStatusCode.OK) + { + string jwtString = apiAuthResponse.Data ?? throw new Exception("no response data"); + await Authenticate(jwtString, apiConnection, middlewareClient, userConfig, circuitHandler, sessionStorage); + Log.WriteAudit("AuthenticateUser", $"user {username} successfully authenticated"); + } + + return apiAuthResponse; + } + + public async Task Authenticate(string jwtString, ApiConnection apiConnection, MiddlewareClient middlewareClient, + UserConfig userConfig, CircuitHandlerService circuitHandler, ProtectedSessionStorage sessionStorage) { - JwtReader jwt = new JwtReader(jwtString); + // Try to auth with jwt (validates it and creates user context on UI side). + JwtReader jwtReader = new JwtReader(jwtString); - if (jwt.Validate()) + if (jwtReader.Validate()) { + // importer is not allowed to login + if (jwtReader.ContainsRole("importer")) + { + throw new AuthenticationException("login_importer_error"); + } + + // Save jwt in session storage. + await sessionStorage.SetAsync("jwt", jwtString); + + // Tell api connection to use jwt as authentication + apiConnection.SetAuthHeader(jwtString); + + // Tell middleware connection to use jwt as authentication + middlewareClient.SetAuthenticationToken(jwtString); + + // Add jwt expiry timer + JwtEventService.AddJwtTimers(userConfig.User.Dn, (int)jwtReader.TimeUntilExpiry().TotalMilliseconds, 1000 * 60 * userConfig.SessionTimeoutNoticePeriod); + + // Set user claims based on the jwt claims ClaimsIdentity identity = new ClaimsIdentity ( - claims: jwt.GetClaims(), + claims: jwtReader.GetClaims(), authenticationType: "ldap", nameType: JwtRegisteredClaimNames.UniqueName, roleType: "role" ); - authenticatedUser = new ClaimsPrincipal(identity); + user = new ClaimsPrincipal(identity); - await userConfig.SetUserInformation(authenticatedUser.FindFirstValue("x-hasura-uuid"), apiConnection); + await userConfig.SetUserInformation(user.FindFirstValue("x-hasura-uuid"), apiConnection); circuitHandler.User = userConfig.User; userConfig.User.Jwt = jwtString; - if(!userConfig.User.PasswordMustBeChanged) + if (!userConfig.User.PasswordMustBeChanged) { - NotifyAuthenticationStateChanged(Task.FromResult(new AuthenticationState(authenticatedUser))); + NotifyAuthenticationStateChanged(Task.FromResult(new AuthenticationState(user))); } } - else { Deauthenticate(); - } - } - - public async Task> Login(string username, string password, ApiConnection apiConnection, MiddlewareClient middlewareClient, - UserConfig userConfig, ProtectedSessionStorage sessionStorage, CircuitHandlerService circuitHandler) - { - // There is no jwt in session storage. Get one from auth module. - AuthenticationTokenGetParameters authenticationParameters = new AuthenticationTokenGetParameters { Username = username, Password = password }; - RestResponse apiAuthResponse = await middlewareClient.AuthenticateUser(authenticationParameters); - - if (apiAuthResponse.StatusCode == HttpStatusCode.OK) - { - string jwt = apiAuthResponse.Data ?? throw new Exception("no response data"); - JwtReader reader = new JwtReader(jwt); - reader.Validate(); - - // importer is not allowed to login - if (reader.ContainsRole("importer")) - { - throw new AuthenticationException("login_importer_error"); - } - - Log.WriteAudit("AuthenticateUser", $"user {username} successfully authenticated"); - - // Save it in session storage. - await sessionStorage.SetAsync("jwt", jwt); - - // Add all user relevant information to the current session. Also used when reloading page. - await CreateUserContext(jwt, apiConnection, middlewareClient, userConfig, circuitHandler); - - // Add jwt expiry timer - JwtEventService.AddJwtTimers(userConfig.User.Dn, (int)reader.TimeUntilExpiry().TotalMilliseconds, 1000 * 60 * userConfig.SessionTimeoutNoticePeriod); } - return apiAuthResponse; } public void Deauthenticate() - { - ClaimsIdentity identity = new ClaimsIdentity(); - ClaimsPrincipal emptyUser = new ClaimsPrincipal(identity); - - NotifyAuthenticationStateChanged(Task.FromResult(new AuthenticationState(emptyUser))); - } - - public async Task CreateUserContext(string jwt, ApiConnection apiConnection, MiddlewareClient middlewareClient, UserConfig userConfig, CircuitHandlerService circuitHandler) { - // Tell api connection to use jwt as authentication - apiConnection.SetAuthHeader(jwt); - - // Tell middleware connection to use jwt as authentication - middlewareClient.SetAuthenticationToken(jwt); - - // Try to auth with jwt (validates it and creates user context on UI side). - await AuthenticateUser(jwt, userConfig, apiConnection, circuitHandler); + user = new ClaimsPrincipal(new ClaimsIdentity()); + NotifyAuthenticationStateChanged(Task.FromResult(new AuthenticationState(user))); } public void ConfirmPasswordChanged() { - NotifyAuthenticationStateChanged(Task.FromResult(new AuthenticationState(authenticatedUser ?? throw new Exception("Password cannot be changed because user was not authenticated")))); + NotifyAuthenticationStateChanged(Task.FromResult(new AuthenticationState(user ?? throw new Exception("Password cannot be changed because user was not authenticated")))); } } } diff --git a/roles/ui/files/FWO.UI/Pages/Certification.razor b/roles/ui/files/FWO.UI/Pages/Certification.razor index cc688ee04..dff06d8ac 100644 --- a/roles/ui/files/FWO.UI/Pages/Certification.razor +++ b/roles/ui/files/FWO.UI/Pages/Certification.razor @@ -9,6 +9,7 @@ @using FWO.Middleware.Client @page "/certification" +@attribute [Authorize(Roles = "admin, recertifier, auditor")] @inject ApiConnection apiConnection @inject UserConfig userConfig @@ -20,11 +21,11 @@
    @if (processing == false) { - + } else { - + }
    @@ -73,22 +74,22 @@ @if(rulesFound) { - + } else { - + } - + }
    + @bind-SelectedRules="selectedRules" SelectedReportType="ReportType.Recertification"/> @@ -107,8 +108,8 @@
    - - + +
    @@ -172,19 +173,7 @@ try { - if(authenticationStateTask!.Result.User.IsInRole("recertifier")) - { - apiConnection.SetRole("recertifier"); - } - else if(authenticationStateTask!.Result.User.IsInRole("admin")) - { - apiConnection.SetRole("admin"); - } - else - { - apiConnection.SetRole("auditor"); - } - + apiConnection.SetProperRole(authenticationStateTask!.Result.User, new List { "recertifier", "admin", "auditor"}); rulesFound = false; ticketCreator = new TicketCreator(apiConnection, userConfig); @@ -322,7 +311,7 @@ RecertSingleLinePerRule = recertSingleLinePerRule, RecertificationDisplayPeriod = recertLookAheadDays }; - reportParams = new ReportTemplate("", deviceFilter, (int) ReportType.Recertification, new TimeFilter(), recertFilter); + reportParams = new ReportTemplate("", deviceFilter, (int) ReportType.Recertification, new TimeFilter(), recertFilter, null); currentReport = ReportBase.ConstructReport(reportParams, userConfig); } @@ -380,9 +369,12 @@ await DoRecerts(); // create delete tickets - foreach(var device in deleteList) + if(userConfig.RecAutoCreateDeleteTicket) { - await ticketCreator.CreateRuleDeleteTicket(device.Key, device.Value, actComment, DateTime.Now.AddDays(userConfig.RuleRemovalGracePeriod)); + foreach(var device in deleteList) + { + await ticketCreator.CreateDecertRuleDeleteTicket(device.Key, device.Value, actComment, DateTime.Now.AddDays(userConfig.RuleRemovalGracePeriod)); + } } // reload updated report @@ -407,7 +399,6 @@ { foreach (Rule rule in device.Rules) { - // await InitRecert(rule); // just for test if(rule.Metadata.Recert || rule.Metadata.ToBeRemoved) { rule.DeviceId = device.Id; diff --git a/roles/ui/files/FWO.UI/Pages/Compliance/IpAddressInput.razor b/roles/ui/files/FWO.UI/Pages/Compliance/IpAddressInput.razor new file mode 100644 index 000000000..fee3305dc --- /dev/null +++ b/roles/ui/files/FWO.UI/Pages/Compliance/IpAddressInput.razor @@ -0,0 +1,44 @@ +@using NetTools; + +@inject UserConfig userConfig + + + +@code +{ + private string ipAddressInputClass = ""; + + private IPAddressRange? ipRange; + + [Parameter] + public IPAddressRange? IpRange + { + get => ipRange; + set + { + if (ipRange != value) + { + ipRange = value; + IpRangeChanged.InvokeAsync(ipRange); + } + } + } + + [Parameter] + public EventCallback IpRangeChanged { get; set; } + + private void TryParseIpRange(string ipAddressInput) + { + if (IPAddressRange.TryParse(ipAddressInput, out IPAddressRange parsedIpRange)) + { + IpRange = parsedIpRange; + ipAddressInputClass = "is-valid"; + } + else + { + IpRange = null; + ipAddressInputClass = "is-invalid"; + } + } +} \ No newline at end of file diff --git a/roles/ui/files/FWO.UI/Pages/Compliance/ZoneTable.razor b/roles/ui/files/FWO.UI/Pages/Compliance/ZoneTable.razor new file mode 100644 index 000000000..131381d8d --- /dev/null +++ b/roles/ui/files/FWO.UI/Pages/Compliance/ZoneTable.razor @@ -0,0 +1,68 @@ +@using NetTools; + +@inject UserConfig userConfig +@inject NetworkZoneService networkZoneService + + + + + + + + + + + + + + + + + + + + +
    @(userConfig.GetText("subzones")):
    + @if (networkZone.Subzones.Length > 0) + { + ComplianceNetworkZone zone = networkZone; + + } + else + { + @(userConfig.GetText("None")) + } +
    +
    + +@code +{ + [Parameter] + public ComplianceNetworkZone? Superzone { get; set; } = null; + + [Parameter] + public ComplianceNetworkZone[] NetworkZones { get; set; } = new ComplianceNetworkZone[0]; +} diff --git a/roles/ui/files/FWO.UI/Pages/Compliance/ZonesChecks.razor b/roles/ui/files/FWO.UI/Pages/Compliance/ZonesChecks.razor new file mode 100644 index 000000000..f6f217b1f --- /dev/null +++ b/roles/ui/files/FWO.UI/Pages/Compliance/ZonesChecks.razor @@ -0,0 +1,206 @@ +@using NetTools; + +@page "/compliance/zones/checks" + +@layout ComplianceLayout + +@inject UserConfig userConfig +@inject NetworkZoneService networkZoneService + +

    @userConfig.GetText("network_zone_check")

    + +
    +
    + +
    + +
    +
    +
    + +
    + +
    +
    +
    +
    + +
    +
    + @if (displayOutput) + { + + @if (!compliant) + { +
    +
    + @(userConfig.GetText("rule_violations")) +
    +
    + + + @foreach ((ComplianceNetworkZone, ComplianceNetworkZone) forbiddenCommunication in forbiddenCommunicationsOutput) + { + + + + + + } + +
    @(forbiddenCommunication.Item1.Name)@(forbiddenCommunication.Item2.Name)
    +@*
      + @foreach ((ComplianceNetworkZone, ComplianceNetworkZone) forbiddenCommunication in forbiddenCommunicationsOutput) + { +
    • @(forbiddenCommunication.Item1.Name) → @(forbiddenCommunication.Item2.Name)
    • + } +
    *@ +
    +
    + } + } +
    +
    + + +@code +{ + IPAddressRange? sourceIpRange; + IPAddressRange? destinationIpRange; + bool displayOutput = false; + bool compliant = false; + List<(ComplianceNetworkZone, ComplianceNetworkZone)> forbiddenCommunicationsOutput = new List<(ComplianceNetworkZone, ComplianceNetworkZone)>(); + + private void CheckIpRangeInputCompliance() + { + displayOutput = false; + if (sourceIpRange != null && destinationIpRange != null) + { + displayOutput = true; + compliant = CheckCompliance + ( + new List() { sourceIpRange }, + new List() { destinationIpRange }, + out forbiddenCommunicationsOutput + ); + // output = $"{userConfig.GetText("rule_conform")}: {Config.ShowBool(compliant)}. \n"; + // if (!compliant) + // { + // output += $"{userConfig.GetText("rule_violations")}: {string.Join(", ", forbiddenCommunicationOutput.ConvertAll(x => $"{x.Item1.Name} \u2192 {x.Item2.Name}"))}"; + // } + } + } + + private bool CheckRuleCompliance(Rule rule, out List<(ComplianceNetworkZone, ComplianceNetworkZone)> forbiddenCommunication) + { + List froms = new List(); + List tos = new List(); + + foreach (NetworkLocation networkLocation in rule.Froms) + { + // Determine all source ip ranges + froms.AddRange(ParseIpRange(networkLocation.Object)); + } + foreach (NetworkLocation networkLocation in rule.Tos) + { + // Determine all destination ip ranges + tos.AddRange(ParseIpRange(networkLocation.Object)); + } + + return CheckCompliance(froms, tos, out forbiddenCommunication); + } + + private bool CheckCompliance(List source, List destination, out List<(ComplianceNetworkZone, ComplianceNetworkZone)> forbiddenCommunication) + { + // Determine all matching source zones + List sourceZones = DetermineZones(source); + + // Determine all macthing destination zones + List destinationZones = DetermineZones(destination); + + forbiddenCommunication = new List<(ComplianceNetworkZone, ComplianceNetworkZone)>(); + + foreach (ComplianceNetworkZone sourceZone in sourceZones) + { + foreach (ComplianceNetworkZone destinationZone in destinationZones) + { + if (!sourceZone.CommunicationAllowedTo(destinationZone)) + { + forbiddenCommunication.Add((sourceZone, destinationZone)); + } + } + } + + return forbiddenCommunication.Count == 0; + } + + + private List DetermineZones(List ranges) + { + List result = new List(); + List> unseenIpAddressRanges = new List>(); + + for (int i = 0; i < ranges.Count; i++) + { + unseenIpAddressRanges.Add(new List() + { + new IPAddressRange(ranges[i].Begin, ranges[i].End) + }); + } + + foreach (ComplianceNetworkZone zone in networkZoneService.NetworkZones) + { + if (zone.OverlapExists(ranges, unseenIpAddressRanges)) + { + result.Add(zone); + } + } + + // Get ip ranges that are not in any zone + List undefinedIpRanges = unseenIpAddressRanges.SelectMany(x => x).ToList(); + if (undefinedIpRanges.Count() > 0) + { + result.Add + ( + new ComplianceNetworkZone() + { + Name = userConfig.GetText("internet_local_zone"), + } + ); + } + + return result; + } + + private List ParseIpRange(NetworkObject networkObject) + { + List ranges = new List(); + + if (networkObject.Type == new NetworkObjectType() { Name = "range" }) + { + ranges.Add(IPAddressRange.Parse($"{networkObject.IP}-{networkObject.IpEnd}")); + } + else if (networkObject.Type != new NetworkObjectType() { Name = "group" }) + { + for (int j = 0; j < networkObject.ObjectGroupFlats.Length; j++) + { + if (networkObject.ObjectGroupFlats[j].Object != null) + { + ranges.AddRange(ParseIpRange(networkObject.ObjectGroupFlats[j].Object!)); + } + } + } + else + { + // CIDR notation or single (host) IP can be parsed directly + ranges.Add(IPAddressRange.Parse(networkObject.IP)); + } + + return ranges; + } +} diff --git a/roles/ui/files/FWO.UI/Pages/Compliance/ZonesConfiguration.razor b/roles/ui/files/FWO.UI/Pages/Compliance/ZonesConfiguration.razor new file mode 100644 index 000000000..9127dfc0c --- /dev/null +++ b/roles/ui/files/FWO.UI/Pages/Compliance/ZonesConfiguration.razor @@ -0,0 +1,434 @@ +@using NetTools; +@using System.Diagnostics; + +@page "/compliance/zones/configuration" + +@layout ComplianceLayout + +@inject ApiConnection apiConnection +@inject UserConfig userConfig +@inject NetworkZoneService networkZoneService + +

    @userConfig.GetText("network_zone_config")

    + + + + + + + + +@*Show Top-Level Zones*@ + + +@if (networkZoneInEdit != null) +{ + + +
    +
    +
    + + +
    +
    + + +
    +
    + + +
    - +
    } @@ -100,7 +100,7 @@
    - +
    } @@ -124,13 +124,13 @@
    - + - + - +
    diff --git a/roles/ui/files/FWO.UI/Pages/Settings/SettingsCustomizing.razor b/roles/ui/files/FWO.UI/Pages/Settings/SettingsCustomizing.razor index f61b6b789..15e9716ed 100644 --- a/roles/ui/files/FWO.UI/Pages/Settings/SettingsCustomizing.razor +++ b/roles/ui/files/FWO.UI/Pages/Settings/SettingsCustomizing.razor @@ -43,12 +43,12 @@ @@ -81,10 +81,10 @@
    - + - +

    diff --git a/roles/ui/files/FWO.UI/Pages/Settings/SettingsDefaults.razor b/roles/ui/files/FWO.UI/Pages/Settings/SettingsDefaults.razor index 9e090e722..64e40003d 100644 --- a/roles/ui/files/FWO.UI/Pages/Settings/SettingsDefaults.razor +++ b/roles/ui/files/FWO.UI/Pages/Settings/SettingsDefaults.razor @@ -28,13 +28,13 @@
    - +
    - +
    @*
    @@ -46,14 +46,14 @@
    - +

    - +
    @@ -67,57 +67,45 @@
    @if (configData!.AutoFillRightSidebar) { - + } else { - + }
    - +

    - -
    - -
    -
    -
    -
    - +
    - +
    - +
    - +
    +
    - -
    - -
    -
    -
    - +
    - +

    - +
    @@ -136,13 +124,13 @@
    - +
    - +

    @@ -155,10 +143,10 @@
    - + - +

    diff --git a/roles/ui/files/FWO.UI/Pages/Settings/SettingsEmail.razor b/roles/ui/files/FWO.UI/Pages/Settings/SettingsEmail.razor index a726b3fc9..345fdc9e3 100644 --- a/roles/ui/files/FWO.UI/Pages/Settings/SettingsEmail.razor +++ b/roles/ui/files/FWO.UI/Pages/Settings/SettingsEmail.razor @@ -69,12 +69,12 @@
    - - + + - - + +
    diff --git a/roles/ui/files/FWO.UI/Pages/Settings/SettingsGateways.razor b/roles/ui/files/FWO.UI/Pages/Settings/SettingsGateways.razor index f37003af0..880c8cef8 100644 --- a/roles/ui/files/FWO.UI/Pages/Settings/SettingsGateways.razor +++ b/roles/ui/files/FWO.UI/Pages/Settings/SettingsGateways.razor @@ -13,18 +13,18 @@ @(userConfig.GetText("U5112"))
    - +