');
@@ -170,10 +174,12 @@ INSERT INTO txt VALUES ('getting_started', 'German', 'Einstiegshilfe');
INSERT INTO txt VALUES ('getting_started', 'English', 'Quick start');
INSERT INTO txt VALUES ('getting_started_facts', 'German', '
Die folgenden Hauptmenüpunkte stehen (je nach Rollenzugehörigkeit) zur Verfügung:
-
Reporting: Erlaubt das Generieren verschiedener Reports
Einstellungen: Alle Einstellungen wie z.B. Sprache der Benutzeroberfläche oder
das Einbinden Ihrer eigenen Firewall-Systeme.
@@ -185,10 +191,12 @@ Die folgenden Hauptmenüpunkte stehen (je nach Rollenzugehörigkeit) zur
INSERT INTO txt VALUES ('getting_started_facts', 'English', '
The following top-level menu items are available (depending on role memberships):
-
Reporting: Ad-hoc generation of all available reports
Settings: All settings like e.g. language of the user interface or
integration of your own firewalls.
@@ -238,6 +246,10 @@ INSERT INTO txt VALUES ('report_type', 'German', 'Report-Typ');
INSERT INTO txt VALUES ('report_type', 'English', 'Report Type');
INSERT INTO txt VALUES ('report_time', 'German', 'Report-Zeit');
INSERT INTO txt VALUES ('report_time', 'English', 'Report Time');
+INSERT INTO txt VALUES ('unused_days', 'German', 'Unbenutzt seit (in Tagen)');
+INSERT INTO txt VALUES ('unused_days', 'English', 'Unused since (in days)');
+INSERT INTO txt VALUES ('generation', 'German', 'Generierung');
+INSERT INTO txt VALUES ('generation', 'English', 'Generation');
INSERT INTO txt VALUES ('change', 'German', 'Ändern');
INSERT INTO txt VALUES ('change', 'English', 'Change');
INSERT INTO txt VALUES ('shortcut', 'German', 'Abkürzung');
@@ -464,6 +476,14 @@ INSERT INTO txt VALUES ('generated_on', 'German', 'Erstellt am');
INSERT INTO txt VALUES ('generated_on', 'English', 'Generated on');
INSERT INTO txt VALUES ('date_of_config', 'German', 'Zeit der Konfiguration');
INSERT INTO txt VALUES ('date_of_config', 'English', 'Time of configuration');
+INSERT INTO txt VALUES ('create_delete_ticket', 'German', 'Löschantrag stellen');
+INSERT INTO txt VALUES ('create_delete_ticket', 'English', 'Create Delete Ticket');
+INSERT INTO txt VALUES ('rules_to_delete', 'German', 'Zu löschende Regeln');
+INSERT INTO txt VALUES ('rules_to_delete', 'English', 'Rules to delete');
+INSERT INTO txt VALUES ('delete_unused_rule', 'German', 'Unbenutzte Regel löschen');
+INSERT INTO txt VALUES ('delete_unused_rule', 'English', 'Delete unused rule');
+INSERT INTO txt VALUES ('delete_unused_rules', 'German', 'Unbenutzte Regeln löschen');
+INSERT INTO txt VALUES ('delete_unused_rules', 'English', 'Delete unused rules');
-- schedule
INSERT INTO txt VALUES ('schedule', 'German', 'Terminplan');
@@ -496,7 +516,7 @@ INSERT INTO txt VALUES ('Months', 'German', 'Monat(e)');
INSERT INTO txt VALUES ('Months', 'English', 'Month(s)');
INSERT INTO txt VALUES ('Years', 'German', 'Jahr(e)');
INSERT INTO txt VALUES ('Years', 'English', 'Year(s)');
-INSERT INTO txt VALUES ('schedule_fetch', 'German', 'Abholen der Termine');
+INSERT INTO txt VALUES ('schedule_fetch', 'German', 'Laden der Termine');
INSERT INTO txt VALUES ('schedule_fetch', 'English', 'Report Schedule Fetch');
INSERT INTO txt VALUES ('save_scheduled_report','German', 'Termin speichern');
INSERT INTO txt VALUES ('save_scheduled_report','English', 'Save scheduled report');
@@ -504,6 +524,10 @@ INSERT INTO txt VALUES ('edit_scheduled_report','German', 'Termin bearbeiten');
INSERT INTO txt VALUES ('edit_scheduled_report','English', 'Edit scheduled report');
INSERT INTO txt VALUES ('delete_scheduled_report','German', 'Termin löschen');
INSERT INTO txt VALUES ('delete_scheduled_report','English','Delete scheduled report');
+INSERT INTO txt VALUES ('schedule_tile', 'German', 'Terminplan');
+INSERT INTO txt VALUES ('schedule_tile', 'English', 'Report schedule');
+INSERT INTO txt VALUES ('schedule_upd_err_msg', 'German', 'Das Laden der terminierten Reports führte zu einem Fehler.');
+INSERT INTO txt VALUES ('schedule_upd_err_msg', 'English', 'Loading the scheduled reports resulted in an error.');
-- archive
INSERT INTO txt VALUES ('download', 'German', 'Herunterladen');
@@ -526,6 +550,10 @@ INSERT INTO txt VALUES ('fetch_report', 'German', 'Erstellten Report holen
INSERT INTO txt VALUES ('fetch_report', 'English', 'Fetch downloads of generated report');
INSERT INTO txt VALUES ('delete_report', 'German', 'Erstellten Report löschen');
INSERT INTO txt VALUES ('delete_report', 'English', 'Delete generated report');
+INSERT INTO txt VALUES ('archive_tile', 'German', 'Report Archivierung');
+INSERT INTO txt VALUES ('archive_tile', 'English', 'Report archiving');
+INSERT INTO txt VALUES ('archive_upd_err_msg', 'German', 'Das Laden der archivierten Reports führte zu einem Fehler.');
+INSERT INTO txt VALUES ('archive_upd_err_msg', 'English', 'Loading the archived reports resulted in an error.');
-- workflow
INSERT INTO txt VALUES ('request', 'German', 'Antrag');
@@ -822,6 +850,8 @@ INSERT INTO txt VALUES ('ResolvedChanges', 'German', 'Changes-Report (aufgel&
INSERT INTO txt VALUES ('ResolvedChanges', 'English', 'Changes Report (resolved)');
INSERT INTO txt VALUES ('ResolvedChangesTech', 'German', 'Changes-Report (technisch)');
INSERT INTO txt VALUES ('ResolvedChangesTech', 'English', 'Changes Report (technical)');
+INSERT INTO txt VALUES ('UnusedRules', 'German', 'Unbenutzte-Regel-Report');
+INSERT INTO txt VALUES ('UnusedRules', 'English', 'Unused Rules Report');
-- network analysis
INSERT INTO txt VALUES ('network_analysis', 'German', 'Netzanalyse');
@@ -870,6 +900,14 @@ INSERT INTO txt VALUES ('last_certify_date', 'German', 'Datum der letzten R
INSERT INTO txt VALUES ('last_certify_date', 'English', 'Last recertification date');
INSERT INTO txt VALUES ('marked_to_be_removed', 'German', 'Als zu löschen markiert');
INSERT INTO txt VALUES ('marked_to_be_removed', 'English', 'Marked to be removed');
+INSERT INTO txt VALUES ('recert_history', 'German', 'Rezertifizierungshistorie');
+INSERT INTO txt VALUES ('recert_history', 'English', 'Recertification history');
+INSERT INTO txt VALUES ('recertified_by', 'German', 'rezertifiziert von');
+INSERT INTO txt VALUES ('recertified_by', 'English', 'recertified by');
+INSERT INTO txt VALUES ('decertified_by', 'German', 'dezertifiziert von');
+INSERT INTO txt VALUES ('decertified_by', 'English', 'decertified by');
+INSERT INTO txt VALUES ('as_owner', 'German', 'als Eigentümer');
+INSERT INTO txt VALUES ('as_owner', 'English', 'as Owner');
INSERT INTO txt VALUES ('decert_date', 'German', 'Dezertifizierungsdatum');
INSERT INTO txt VALUES ('decert_date', 'English', 'Decertification date');
INSERT INTO txt VALUES ('recert_comment', 'German', 'Zertifizierungskommentar');
@@ -921,6 +959,8 @@ INSERT INTO txt VALUES ('password_policy', 'German', 'Passworteinstellunge
INSERT INTO txt VALUES ('password_policy', 'English', 'Password Policy');
INSERT INTO txt VALUES ('email_settings', 'German', 'Email-Einstellungen');
INSERT INTO txt VALUES ('email_settings', 'English', 'Email settings');
+INSERT INTO txt VALUES ('importer_settings', 'German', 'Importer-Einstellungen');
+INSERT INTO txt VALUES ('importer_settings', 'English', 'Importer settings');
INSERT INTO txt VALUES ('edit_email', 'German', 'Email-Einstellungen editieren');
INSERT INTO txt VALUES ('edit_email', 'English', 'Edit email settings');
INSERT INTO txt VALUES ('email_sender', 'German', 'Email-Absendeadresse');
@@ -1169,6 +1209,10 @@ INSERT INTO txt VALUES ('maxMessages', 'German', 'Max Anzahl Nachricht
INSERT INTO txt VALUES ('maxMessages', 'English', 'Max number of messages');
INSERT INTO txt VALUES ('messageViewTime', 'German', 'Nachrichten-Anzeigedauer (in Sekunden)');
INSERT INTO txt VALUES ('messageViewTime', 'English', 'Message view time (in seconds)');
+INSERT INTO txt VALUES ('unusedTolerance', 'German', 'Als unbenutzt gewertet nach (in Tagen)');
+INSERT INTO txt VALUES ('unusedTolerance', 'English', 'Regarded as unused from (in days)');
+INSERT INTO txt VALUES ('creationTolerance', 'German', 'Toleranz ab Erzeugungsdatum (in Tagen)');
+INSERT INTO txt VALUES ('creationTolerance', 'English', 'Tolerance from creation date (in days)');
INSERT INTO txt VALUES ('dataRetentionTime', 'German', 'Datenaufbewahrungszeit (in Tagen)');
INSERT INTO txt VALUES ('dataRetentionTime', 'English', 'Data retention time (in days)');
INSERT INTO txt VALUES ('dailyCheckStartAt', 'German', 'Startzeit täglicher Check');
@@ -1185,6 +1229,14 @@ INSERT INTO txt VALUES ('importSuppressCertificateWarnings', 'German', 'Ze
INSERT INTO txt VALUES ('importSuppressCertificateWarnings', 'English', 'Suppress certificate warnings');
INSERT INTO txt VALUES ('fwApiElementsPerFetch','German', 'FW API - Pro Abruf geholte Elemente');
INSERT INTO txt VALUES ('fwApiElementsPerFetch','English', 'FW API - Elements per fetch');
+INSERT INTO txt VALUES ('impChangeNotifyRecipients','German', 'Empfänger-Email-Adressen für Änderungen');
+INSERT INTO txt VALUES ('impChangeNotifyRecipients','English', 'Recipient email addresses for change notifications');
+INSERT INTO txt VALUES ('impChangeNotifySubject', 'German', 'Titel der Änderungsbenachrichtigung');
+INSERT INTO txt VALUES ('impChangeNotifySubject', 'English', 'Subject of change notification emails');
+INSERT INTO txt VALUES ('impChangeNotifyBody', 'German', 'Text der Änderungsbenachrichtigung');
+INSERT INTO txt VALUES ('impChangeNotifyBody', 'English', 'Body of change notification emails');
+INSERT INTO txt VALUES ('impChangeNotifyActive', 'German', 'Änderungsbenachrichtigung aktiv?');
+INSERT INTO txt VALUES ('impChangeNotifyActive', 'English', 'Change notification active?');
INSERT INTO txt VALUES ('autoDiscoverSleepTime','German', 'Autodiscover-Intervall (in Stunden)');
INSERT INTO txt VALUES ('autoDiscoverSleepTime','English', 'Auto-discovery sleep time (in hours)');
INSERT INTO txt VALUES ('autoDiscoverStartAt', 'German', 'Autodiscover-Start');
@@ -1610,7 +1662,56 @@ INSERT INTO txt VALUES ('configuration', 'German', 'Konfiguration');
INSERT INTO txt VALUES ('configuration', 'English', 'Configuration');
INSERT INTO txt VALUES ('owner_import', 'German', 'Eigentümer-Import');
INSERT INTO txt VALUES ('owner_import', 'English', 'Owner Import');
-
+-- compliance
+INSERT INTO txt VALUES ('compliance', 'German', 'Compliance');
+INSERT INTO txt VALUES ('compliance', 'English', 'Compliance');
+INSERT INTO txt VALUES ('network_zones', 'German', 'Netzwerkzonen');
+INSERT INTO txt VALUES ('network_zones', 'English', 'Network zones');
+INSERT INTO txt VALUES ('matrix', 'German', 'Matrix');
+INSERT INTO txt VALUES ('matrix', 'English', 'Matrix');
+INSERT INTO txt VALUES ('checks', 'German', 'Überprüfung');
+INSERT INTO txt VALUES ('checks', 'English', 'Checks');
+INSERT INTO txt VALUES ('check', 'German', 'Überprüfen');
+INSERT INTO txt VALUES ('check', 'English', 'Check');
+INSERT INTO txt VALUES ('zone_comm_matrix', 'German', 'Netzwerkzonen-Kommunikationsmatrix');
+INSERT INTO txt VALUES ('zone_comm_matrix', 'English', 'Network zone communication matrix');
+INSERT INTO txt VALUES ('network_zone_config', 'German', 'Netzwerkzonen-Konfiguration');
+INSERT INTO txt VALUES ('network_zone_config', 'English', 'Network zone configuration');
+INSERT INTO txt VALUES ('network_zone_check', 'German', 'Netzwerkzonen-Complianceprüfung');
+INSERT INTO txt VALUES ('network_zone_check', 'English', 'Network zone compliance check');
+INSERT INTO txt VALUES ('allowed_communication','German', 'Erlaubte Kommunikation');
+INSERT INTO txt VALUES ('allowed_communication','English', 'Allowed communication');
+INSERT INTO txt VALUES ('subzones', 'German', 'Subzonen');
+INSERT INTO txt VALUES ('subzones', 'English', 'Subzones');
+INSERT INTO txt VALUES ('superzone', 'German', 'Superzone');
+INSERT INTO txt VALUES ('superzone', 'English', 'Superzone');
+INSERT INTO txt VALUES ('edit_zone_title', 'German', 'Netzwerkzone editieren');
+INSERT INTO txt VALUES ('edit_zone_title', 'English', 'Edit network zone');
+INSERT INTO txt VALUES ('add_ip_addresses', 'German', 'IP Adresse(n) hinzufügen');
+INSERT INTO txt VALUES ('add_ip_addresses', 'English', 'Add IP Addresses');
+INSERT INTO txt VALUES ('delete_zone_title', 'German', 'Netzwerkzone löschen');
+INSERT INTO txt VALUES ('delete_zone_title', 'English', 'Delete network zone');
+INSERT INTO txt VALUES ('delete_zone_text', 'German', 'Sind Sie sich sicher, dass sie die Netzwerkzone % löschen wollen?');
+INSERT INTO txt VALUES ('delete_zone_text', 'English', 'Are you sure you want to delete the network zone %?');
+INSERT INTO txt VALUES ('to', 'German', 'Nach');
+INSERT INTO txt VALUES ('to', 'English', 'To');
+INSERT INTO txt VALUES ('allowed_comm_dests', 'German', 'Erlaubte Kommunikation (Nach)');
+INSERT INTO txt VALUES ('allowed_comm_dests', 'English', 'Allowed communication (to)');
+INSERT INTO txt VALUES ('allowed_comm_srcs', 'German', 'Erlaubte Kommunikation (Von)');
+INSERT INTO txt VALUES ('allowed_comm_srcs', 'English', 'Allowed communication (from)');
+INSERT INTO txt VALUES ('relogin', 'German', 'Erneut anmelden');
+INSERT INTO txt VALUES ('relogin', 'English', 'Re-Login');
+INSERT INTO txt VALUES ('relogin_error', 'German', 'Fehler bei der erneuten Anmeldung');
+INSERT INTO txt VALUES ('relogin_error', 'English', 'Re-Login error');
+INSERT INTO txt VALUES ('internet_local_zone', 'German', 'Internet / Lokal');
+INSERT INTO txt VALUES ('internet_local_zone', 'English', 'Internet / Local');
+INSERT INTO txt VALUES ('rule_conform', 'German', 'Regelkonform');
+INSERT INTO txt VALUES ('rule_conform', 'English', 'In accordance with the rules');
+INSERT INTO txt VALUES ('rule_violations', 'German', 'Regelverletzungen');
+INSERT INTO txt VALUES ('rule_violations', 'English', 'Rule violations');
+INSERT INTO txt VALUES ('no_network_zones', 'German', 'Es existieren bisher keine Netzwerkzonen. Bitte legen Sie diese im Abschnitt "Konfiguration" an.');
+INSERT INTO txt VALUES ('no_network_zones', 'English', 'No network zones exist yet. Please create them in the "Configuration" section.');
+
-- text codes (roughly) categorized:
-- U: user texts (explanation or confirmation texts)
@@ -1634,6 +1735,22 @@ INSERT INTO txt VALUES ('owner_import', 'English', 'Owner Import');
-- 7000-7999: Monitoring
-- 8000-8999: Workflow
+-- generic success messages
+INSERT INTO txt VALUES ('S_add_title', 'German', 'Erstellen erfolgreich');
+INSERT INTO txt VALUES ('S_add_title', 'English', 'Creation successful');
+INSERT INTO txt VALUES ('S_add_message', 'German', 'Das Erstellen des Elements wurde erfolgreich abgeschlossen.');
+INSERT INTO txt VALUES ('S_add_message', 'English', 'The creation of the element has been completed successfully.');
+
+INSERT INTO txt VALUES ('S_modify_title', 'German', 'Modifizieren erfolgreich');
+INSERT INTO txt VALUES ('S_modify_title', 'English', 'Modification successful');
+INSERT INTO txt VALUES ('S_modify_message', 'German', 'Das Modifizieren des Elements wurde erfolgreich abgeschlossen.');
+INSERT INTO txt VALUES ('S_modify_message', 'English', 'The modification of the element has been completed successfully.');
+
+INSERT INTO txt VALUES ('S_delete_title', 'German', 'Löschen erfolgreich');
+INSERT INTO txt VALUES ('S_delete_title', 'English', 'Deletion successful');
+INSERT INTO txt VALUES ('S_delete_message', 'German', 'Das Löschen des Elements wurde erfolgreich abgeschlossen.');
+INSERT INTO txt VALUES ('S_delete_message', 'English', 'The deletion of the element has been completed successfully.');
+
-- user messages
INSERT INTO txt VALUES ('U0001', 'German', 'Eingabetext wurde um nicht erlaubte Zeichen gekürzt');
INSERT INTO txt VALUES ('U0001', 'English', 'Input text has been shortened by not allowed characters');
@@ -1754,6 +1871,8 @@ INSERT INTO txt VALUES ('U5318', 'German', 'Sind sie sicher, dass sie die Einst
INSERT INTO txt VALUES ('U5318', 'English', 'Are you sure you want to reset the settings? Changes on workflows get lost.');
INSERT INTO txt VALUES ('U5319', 'German', 'Server für ausgehende Emails zur Benachrichtigung verwalten.');
INSERT INTO txt VALUES ('U5319', 'English', 'Manage email server for outgoing user notifications.');
+INSERT INTO txt VALUES ('U5320', 'German', 'Mehrere Email-Adressen mit Komma trennen');
+INSERT INTO txt VALUES ('U5320', 'English', 'Multiple email addresses can be separated by using commas');
INSERT INTO txt VALUES ('U5401', 'German', 'Passwort geändert.');
INSERT INTO txt VALUES ('U5401', 'English', 'Password changed.');
@@ -1790,7 +1909,7 @@ INSERT INTO txt VALUES ('U7401', 'English', 'View the past autodiscovery message
INSERT INTO txt VALUES ('U7501', 'German', 'Archiv der Nachrichten der täglichen Checks');
INSERT INTO txt VALUES ('U7501', 'English', 'View the past daily check messages');
-INSERT INTO txt VALUES ('U8001', 'German', 'Sind sie sicher, dass sie löschen wollen: ');
+INSERT INTO txt VALUES ('U8001', 'German', 'Sind sie sicher, dass sie Folgendes löschen wollen: ');
INSERT INTO txt VALUES ('U8001', 'English', 'Are you sure you want to delete: ');
INSERT INTO txt VALUES ('U8002', 'German', 'Neue Genehmigung zum Auftrag hinzugefügt.');
INSERT INTO txt VALUES ('U8002', 'English', 'New approval added to task.');
@@ -1798,6 +1917,28 @@ INSERT INTO txt VALUES ('U8003', 'German', 'Sind sie sicher, dass sie abbrechen
INSERT INTO txt VALUES ('U8003', 'English', 'Are you sure you want to cancel? Already Created tasks will be lost.');
+-- generic error messages --
+INSERT INTO txt VALUES ('E_load_title', 'German', 'Fehler beim Laden');
+INSERT INTO txt VALUES ('E_load_title', 'English', 'Loading error');
+INSERT INTO txt VALUES ('E_load_message', 'German', 'Beim Laden der Elemente ist ein unerwarteter Fehler aufgetreten. Bitte melden Sie den Fehler, wenn er nicht erklärbar ist.');
+INSERT INTO txt VALUES ('E_load_message', 'English', 'An unexpected error occurred while loading the items. Please report the error if it cannot be resolved.');
+
+INSERT INTO txt VALUES ('E_add_title', 'German', 'Fehler beim Erstellen');
+INSERT INTO txt VALUES ('E_add_title', 'English', 'Creation error');
+INSERT INTO txt VALUES ('E_add_message', 'German', 'Bei der Erstellung des Elements ist ein unerwarteter Fehler aufgetreten. Bitte melden Sie den Fehler, wenn er nicht erklärbar ist.');
+INSERT INTO txt VALUES ('E_add_message', 'English', 'An unexpected error occurred while creating the item. Please report the error if it cannot be resolved.');
+
+INSERT INTO txt VALUES ('E_modify_title', 'German', 'Fehler beim Modifizieren');
+INSERT INTO txt VALUES ('E_modify_title', 'English', 'Modification error');
+INSERT INTO txt VALUES ('E_modify_message', 'German', 'Bei der Modifizierung des Elements ist ein unerwarteter Fehler aufgetreten. Bitte melden Sie den Fehler, wenn er nicht erklärbar ist.');
+INSERT INTO txt VALUES ('E_modify_message', 'English', 'An unexpected error occurred while modifying the item. Please report the error if it cannot be resolved.');
+
+INSERT INTO txt VALUES ('E_delete_title', 'German', 'Fehler beim Löschen');
+INSERT INTO txt VALUES ('E_delete_title', 'English', 'Deletion error');
+INSERT INTO txt VALUES ('E_delete_message', 'German', 'Bei der Löschung des Elements ist ein unerwarteter Fehler aufgetreten. Bitte melden Sie den Fehler, wenn er nicht erklärbar ist.');
+INSERT INTO txt VALUES ('E_delete_message', 'English', 'An unexpected error occurred while deleting the item. Please report the error if it cannot be resolved.');
+
+
-- error messages
INSERT INTO txt VALUES ('E0001', 'German', 'Nicht klassifizierter Fehler: ');
INSERT INTO txt VALUES ('E0001', 'English', 'Unclassified error: ');
@@ -1838,6 +1979,8 @@ INSERT INTO txt VALUES ('E4002', 'German', 'Keine Regeln für die gewä
INSERT INTO txt VALUES ('E4002', 'English', 'No rules found for given criteria');
INSERT INTO txt VALUES ('E4003', 'German', 'Keine Änderungen für die gewählten Kriterien gefunden');
INSERT INTO txt VALUES ('E4003', 'English', 'No changes found for given criteria');
+INSERT INTO txt VALUES ('E4004', 'German', 'Nutzungsdaten nicht unterstützt für: ');
+INSERT INTO txt VALUES ('E4004', 'English', 'Usage data not supported for: ');
INSERT INTO txt VALUES ('E5101', 'German', 'Löschen des Managements nicht erlaubt, da noch Gateways zugeordnet sind. Diese zuerst löschen wenn möglich');
INSERT INTO txt VALUES ('E5101', 'English', 'Deletion of management not allowed as there are related Gateways. Delete them first if possible');
@@ -2011,8 +2154,8 @@ INSERT INTO txt VALUES ('E5415', 'English', 'Password must contain at least one
INSERT INTO txt VALUES ('E5421', 'German', 'Schlüssel nicht gefunden oder Wert nicht konvertierbar: Wert wird gesetzt auf: ');
INSERT INTO txt VALUES ('E5421', 'English', 'Key not found or could not convert value to int: taking value: ');
-INSERT INTO txt VALUES ('E6001', 'German', 'Der Relogin war nicht erfolgreich. Haben Sie ein falsches Passwort eingegeben? Schauen Sie für Details bitte in die Logs.');
-INSERT INTO txt VALUES ('E6001', 'English', 'Re-login unsuccessful. Did you enter a wrong password? See log for details!');
+INSERT INTO txt VALUES ('E6001', 'German', 'Der Re-Login war nicht erfolgreich. Haben Sie ein falsches Passwort eingegeben? Schauen Sie für Details bitte in die Logs.');
+INSERT INTO txt VALUES ('E6001', 'English', 'Re-login failed. Did you enter a wrong password? See log for details.');
INSERT INTO txt VALUES ('E7001', 'German', 'Aktion wurde bereits durchgeführt');
INSERT INTO txt VALUES ('E7001', 'English', 'Action has already been processed');
@@ -2061,6 +2204,7 @@ INSERT INTO txt VALUES ('E8101', 'English', 'Sending of emails cannot be tested
+
-- errors from Api
INSERT INTO txt VALUES ('A0001', 'German', 'Ungültige Anmeldedaten. Nutzername darf nicht leer sein');
INSERT INTO txt VALUES ('A0001', 'English', 'Invalid credentials. Username must not be empty');
@@ -2134,6 +2278,7 @@ INSERT INTO txt VALUES ('H1001', 'German', 'Die erste Eingabezeile ist die Filt
Nach klicken der "Report erstellen" Schaltfläche werden die Reportdaten im unteren Teil des Fensters dargestellt.
In der Rechten Randleiste werden Details zu den markierten Objekten gezeigt.
Der Report kann in verschiedenen Ausgabeformaten exportiert werden.
+ Mit Hilfe der Tabs am oberen Bildschirmrand kann zwischen der Report-Generierung, dem Report-Scheduling und dem Report-Archiv umgeschaltet werden.
');
INSERT INTO txt VALUES ('H1001', 'English', 'The first input line is the filter line, where the parameters for the report creation are defined.
It is subject to a special Filter Syntax.
@@ -2142,6 +2287,7 @@ INSERT INTO txt VALUES ('H1001', 'English', 'The first input line is the filter
After selecting the "Generate Report" button the Report Data is shown in the lower part of the window.
In the Right Sidebar details about the selected objects are given.
The report can be exported to different output formats.
+ Using the tabs at the top of the screen you may switch between report generation, report scheduling and the report archive.
');
INSERT INTO txt VALUES ('H1101', 'German', '
Alle Filter sind schreibungsunabhängig.
Es gibt verschiedene Varianten für die meisten Schlüsselwörter, z.B. können DestinationPort-Filter geschrieben werden als:
@@ -2171,8 +2317,13 @@ INSERT INTO txt VALUES ('H1102', 'German', 'Folgende Report-Typen stehen zur Au
Regeln (aufgelöst) - Anzeige von Zugriffsregeln, wobei sämtliche Gruppen in Quelle, Ziel und Dienst aufgelöst werden.
Dies ermöglicht einen Export in einer einzigen Tabelle ohne Hilfstabellen, in denen die Objekt-Definitionen stehen. Default-Report-Zeitpunkt: jetzt
Regeln (technisch) - wie der aufgelöste Regel-Report, nur dass Objektnamen nicht angezeigt werden. Default-Report-Zeitpunkt: jetzt
+
Unbenutzte Regeln - Anzeige aller Regeln die das letztemal vor einem vorgegebenen Zeitpunkt benutzt wurden. Geräte, die keine Nutzungsinformation liefern, werden ignoriert.
+ Falls der Reporter auch die Rolle "requester" hat, wird bei Selektion ausgegebener Regeln eine Schaltfläche zur Erzeugung eines Löschantrags angeboten.
NAT-Regeln - Anzeige der NAT-Regeln und nicht der Zugriffsregeln. Default-Report-Zeitpunkt: jetzt
+
Rezertifizierung - Anzeige aller Regeln mit anstehenden Rezertifizierungen. Der Default-Report-Zeitraum kann in den Einstellungen gesetzt werden
Änderungen - Anzeige von Änderungen in einem bestimmten Zeitraum. Default-Report-Zeitraum: dieses Jahr
+
Änderungen (aufgelöst) - Anzeige von Änderungen in einem bestimmten Zeitraum, wobei sämtliche Gruppen in Quelle, Ziel und Dienst aufgelöst werden. Default-Report-Zeitraum: dieses Jahr
+
Änderungen (technisch)- wie der aufgelöste Änderungs-Report, nur dass Objektnamen nicht angezeigt werden. Default-Report-Zeitraum: dieses Jahr
Statistik - Anzeige von Statistikdaten über Anzahl von Objekten und Regeln. Default-Report-Zeitpunkt: jetzt
');
@@ -2181,8 +2332,13 @@ INSERT INTO txt VALUES ('H1102', 'English', 'Choose from the following report t
Rules - display access rules; default report time: now
Rules (resolved) - display access rules but not showing any group structure but only resolved group content. Default report time: now
Rules (technical) - display access rules, resolving groups and not showing object names. Default report time: now<
+
Unused Rules - display all rules where the rule last hit lies before a given time. Devices delivering no usage information are disregarded.
+ If the reporter has also a requester role, a button to create a delete rule request is offered after selecting reported rules.
NAT Rules - display NAT rules instead of access rules. Default report time: now
+
Recertification - display all rules where recertifications are upcoming. Default report interval can be defined in settings
Changes - display all changes in a defined time interval. Default report interval: this year
+
Changes (resolved) - display all changes in a defined time interval but not showing any group structure but only resolved group content. Default report interval: this year
+
Changes (technical) - display all changes in a defined time interval resolving groups and not showing object names. Default report interval: this year
Statistics - display statistical data on the number of objects and rules. Default report time: now
remove: Mögliche Werte: true/false. Wenn "true", werden nur dezertifizierte Regeln gesucht
recertdisplay (recertdisp): Definiert den Zeitraum für die Vorausschau (in Tagen) für die nächste Rezertifizierung. Nur Regeln in diesem Zeitfenster werden gesucht.
lasthit (last-hit, last-used, last-usage, last-use): Filtern nach Regel-Nutzung - aktuell unterstützt für FortiManager und Check Point >=R80.
+
not-used-for-days (unused, unused-days, not-used): nicht genutzt seit der vorgegebenen Anzahl von Tagen oder gar nicht
gateway (gw, firewall, fw, device, dev): Additionally to the specific device selection in the left sidebar
@@ -2214,6 +2371,7 @@ INSERT INTO txt VALUES ('H1111', 'English', '
gateway (gw, firewall, fw, devi
remove: Possible Values: true/false. If "true", only decertified rules are searched
recertdisplay (recertdisp): Defines the lookahead period (in days) for next recertification. Only rules in this time range are searched.
lasthit (last-hit, last-used, last-usage, last-use): filter by rule usage - supported for FortiManager and Check Point >=R80 only.
+
not-used-for-days (unused, unused-days, not-used): not used for the given number of days or never
');
@@ -2276,18 +2434,18 @@ INSERT INTO txt VALUES ('H1214', 'German', 'Regeländerungen des aktuellen
INSERT INTO txt VALUES ('H1214', 'English', 'This year's Rule Changes: All rule change performed in the current year in the selected devices.');
INSERT INTO txt VALUES ('H1215', 'German', 'Aktuelle NAT Regeln: Aktuell aktive NAT-Regeln aller ausgewählten Devices.');
INSERT INTO txt VALUES ('H1215', 'English', 'Current NAT Rules: Currently active NAT rules of all selected devices.');
-INSERT INTO txt VALUES ('H1301', 'German', 'Direkt nach der Erzeugung oder vom Archiv aus können Reports in verschiedenen Ausgabeformaten exportiert werden:');
-INSERT INTO txt VALUES ('H1301', 'English', 'Directly after creation or from the archive reports can be exported to different output formats:');
+INSERT INTO txt VALUES ('H1301', 'German', 'Direkt nach der Erzeugung oder vom Archiv aus können Reports in verschiedenen Ausgabeformaten exportiert werden:');
+INSERT INTO txt VALUES ('H1301', 'English', 'Directly after creation or from the archive reports can be exported to different output formats:');
INSERT INTO txt VALUES ('H1302', 'German', '
pdf
html
csv (aktuell nur für die aufgelösten und technischen Report-Typen unterstützt)
json
');
INSERT INTO txt VALUES ('H1302', 'English', '
pdf
html
csv (currently only supported for resolved and technical report types)
json
');
INSERT INTO txt VALUES ('H1303', 'German', 'Nach betätigen des "Report exportieren"-Auswahlfeldes kann eines oder mehrere dieser Formate ausgewählt werden.
Bei Aktivierung der pdf-Ausgabe wird desweiteren das Seitenformat zur Auswahl angeboten.
- Auch kann der Report mit einem Namen versehen und archiviert werden.
+ Auch kann der Report mit einem Namen versehen und archiviert werden.
Ein weiteres Ausgabefenster erlaubt dann das separate Abholen der ausgewählten Ausgabedateien.
');
INSERT INTO txt VALUES ('H1303', 'English', 'After clicking the "Export Report" button one or more of them can be selected.
When selecting the pdf export, the page format is offered for selection.
- Also the possibility to name and save the report in the archive is given.
+ Also the possibility to name and save the report in the archive is given.
Another Popup allows then to download the selected output files separately.
');
INSERT INTO txt VALUES ('H1401', 'German', 'Im unteren Teil der Hauptseite werden die Ausgabedaten des generierten Reports dargestellt.
@@ -2306,6 +2464,23 @@ INSERT INTO txt VALUES ('H1402', 'German', '
Nummer
Name
Que
INSERT INTO txt VALUES ('H1402', 'English', '
Number
Name
Source Zone
Source
Destination Zone
Destination
Services
Action
Logging
Enabled
UID
Comment
');
+INSERT INTO txt VALUES ('H1403', 'German', 'Zusätzlich werden in einzelnen Reporttypen weitere Spalten dargestellt:
+
Recertification Report: Next Recertification Date, Owner, IP address match, Last Hit
+
+');
+
INSERT INTO txt VALUES ('H1501', 'German', 'Hier werden die fixen Kriterien für die Auswahl zur Reporterstellung dargestellt.
Weiteren Kriterien können über die Filterleiste hinzugefügt werden.
');
@@ -2318,11 +2493,13 @@ INSERT INTO txt VALUES ('H1503', 'German', 'Auflistung aller verfügbaren D
Die Ansicht kann für unterschiedliche Nutzer entsprechend der Mandantenzuordnung variieren.
Für eine Reporterstellung muss hier eine Auswahl getroffen werden. Die dargestellten Devices können ein- oder ausgeklappt werden.
Ab welcher Mindestanzahl die Darstellung zu Beginn eingeklappt ist, kann individuell in den Reporting-Einstellungen definiert werden.
+ Im Unbenutzte-Regel-Report werden Devices, die keine Nutztungsinformationen liefern, bei Reporterstellung automatisch deselektiert.
');
INSERT INTO txt VALUES ('H1503', 'English', 'Display of all available devices.
This view may differ for the different users according to the tenant assignments.
For the creation of a report a selection out of them has to be done. The displayed devices can be collapsed or expanded.
In the Report Settings it is possible to define the minimum number, where the display starts collapsed.
+ In the Unused Rules Report devices not delivering usage information are deselected automatically during report creation.
');
INSERT INTO txt VALUES ('H1504', 'German', 'Anzeige der gewählten Reportzeit bzw. des gewählten Reportzeitraums in Abhängigkeit vom gewählten Report-Typ.
Vorgabewerte sind "jetzt" bzw. "dieses Jahr". Über die "Ändern"-Schaltfläche kann dies in einem entsprechenden Popup-Fenster angepasst werden:
@@ -2336,14 +2513,38 @@ INSERT INTO txt VALUES ('H1505', 'German', 'Für Report-Typen, welche die A
INSERT INTO txt VALUES ('H1505', 'English', 'For report types requiring a report time there are two options:
Selecting a particular time with the date/time picker or using the default value "now".
');
-INSERT INTO txt VALUES ('H1506', 'German', 'Für Report-Typen, die Zeitintervalle benötigen, kann gewählt werden zwischen:');
-INSERT INTO txt VALUES ('H1506', 'English', 'For report types requiring a time range a selection can be done between:');
+INSERT INTO txt VALUES ('H1506', 'German', 'Für Report-Typen, die Zeitintervalle benötigen (nicht Unbenutzte-Regel- und Rezertifizierungs-Report), kann gewählt werden zwischen:');
+INSERT INTO txt VALUES ('H1506', 'English', 'For report types requiring a time range (not Unused Rule or Recertification Report) a selection can be done between:');
INSERT INTO txt VALUES ('H1507', 'German', 'Vordefinierte Abkürzungen "dieses Jahr", "letztes Jahr", "dieser Monat", "letzter Monat", "diese Woche", "letzte Woche", "heute" oder "gestern"');
INSERT INTO txt VALUES ('H1507', 'English', 'Predefined shortcuts "this year", "last year", "this month", "last month", "this week", "last week", "today" or "yesterday"');
INSERT INTO txt VALUES ('H1508', 'German', 'Zeitintervalle in Tagen, Wochen, Monaten oder Jahren relativ zum aktuellen Zeitpunkt');
INSERT INTO txt VALUES ('H1508', 'English', 'Time intervals in days, weeks, months or years in relation to the actual time');
INSERT INTO txt VALUES ('H1509', 'German', 'Absolute Start- und Endezeiten. Beide Grenzen können durch setzen der "offen"-Markierung ausser Kraft gesetzt werden.');
INSERT INTO txt VALUES ('H1509', 'English', 'Absolute start and end times. Both limits can be separately omitted by setting the "open" checkbox.');
+INSERT INTO txt VALUES ('H1510', 'German', 'Nur beim Unbenutzte-Regel-Report: Unbenutzt seit: Hier wird die Anzahl von Tagen eingegeben, seitdem die anzuzeigenden Regeln nicht mehr benutzt wurden.
+ Regeln, die noch keine letzte Nutzung protokolliert haben, werden ebenfalls dargestellt, falls sie älter als eine in den Reporting-Einstellungen definierte Toleranzzeit sind.
+ Dort kann auch der Default-Wert für den Zeitraum der Nichtbenutzung gesetzt werden.
+');
+INSERT INTO txt VALUES ('H1510', 'English', 'Only for Unused Rules Report: Unused since: Here the number of days is given, where the rules to be displayed have not be used.
+ Rules never used are also displayed if their creation date is older than a tolerance interval defined in the Report Settings.
+ There also the default value for the unused interval can be defined.
+');
+INSERT INTO txt VALUES ('H1511', 'German', 'Nur beim Rezertifizierungs-Report: Rezertifizierungsparameter
+
Fällig in: Hier wird festgelegt, wie weit die Suche nach zu rezertifizierenden Regeln gehen soll (in Tagen).
+ Der Default-Wert kann sowohl vom Administrator in den Allgemeinen
+ als auch vom jeweiligen Nutzer in den Persönlichen Rezertifizierungseinstellungen festgelegt werden.
+
Eigentümer: Hier kann aus den dem Nutzer zugeordneten Eigentümerschaften ausgewählt werden.
+
Any-Regeln anzeigen: Wenn das Häkchen gesetzt ist, werden auch Regeln mit Ip 0.0.0.0 in Quelle oder Ziel dargestellt.
+ Beim deselektieren wird ein exkludierender Ausdruck zur Filterzeile hinzugefügt.
+');
+INSERT INTO txt VALUES ('H1511', 'English', 'Only for Recertification Report: Recertification Parameters
+
Due within: Select how far ahead should be searched for rules to be recertified (in days).
+ The default value can be set by the administrator in the General
+ as well as in the Personal Recertification Settings by the user
.
+
Owner: Select the certifying owner out of the ownerships related to the user.
+
Show any rules: If flag is set, rules with Ip 0.0.0.0 in source or destination are shown.
+ When deselecting an excluding statement is added to the filter line.
+');
INSERT INTO txt VALUES ('H1601', 'German', 'Die rechte Randleiste hat zwei Reiter: Unter "Alle" werden alle aktuell abgeholten Objekte dargestellt,
während unter "Regel" nur die in der Reportausgabe ausgewählten Regeln gezeigt werden.
Folgende Daten werden dargestellt, gruppiert nach den ausgewählten Devices:
@@ -2361,8 +2562,8 @@ INSERT INTO txt VALUES ('H2001', 'German', 'Es können Reports für ein
INSERT INTO txt VALUES ('H2001', 'English', 'Reports can be scheduled for a given time or as recurring tasks.
Every user can administrate his own report schedules.
');
-INSERT INTO txt VALUES ('H2011', 'German', 'Name: Der Reportname, der im Archiv wiederzufinden ist.');
-INSERT INTO txt VALUES ('H2011', 'English', 'Name: The report name to be found in the Archive.');
+INSERT INTO txt VALUES ('H2011', 'German', 'Name: Der Reportname, der im Archiv wiederzufinden ist.');
+INSERT INTO txt VALUES ('H2011', 'English', 'Name: The report name to be found in the Archive.');
INSERT INTO txt VALUES ('H2012', 'German', 'Startdatum und -zeit: Erste Ausführung des Terminauftrags.
Bitte einige Minuten im voraus wählen, wenn die Ausführung noch heute erfolgen soll, da es einen Zeitverzug von einigen Minuten durch den Timer geben kann.
');
@@ -2397,13 +2598,13 @@ INSERT INTO txt VALUES ('H2018', 'English', 'Count: Counts how many reports have
INSERT INTO txt VALUES ('H3001', 'German', 'Hier sind die archivierten Reports mit Name sowie Informationen zu Erzeugungsdatum, Typ, Vorlage (nur bei termingesteuerten Reports),
Eigentümer sowie eine kurze Beschreibung des Inhalts zu finden.
Sie können zum einen durch Export manuell erzeugter Reports durch Setzen des "Archiv"-Kennzeichens in Export Report erzeugt werden.
- Zum anderen finden sich hier auch die durch das Scheduling erzeugten Reports.
+ Zum anderen finden sich hier auch die durch das Scheduling erzeugten Reports.
Die archivierten Reports können von hier heruntergeladen oder gelöscht werden.
');
INSERT INTO txt VALUES ('H3001', 'English', 'Here the archived reports can be found with name and information about creation date, type, template (only at scheduled reports),
owner and a short description about the content.
They may be created on the one hand by exporting manually created reports with setting the flag "Archive" in Export Report.
- On the other hand here also the reports created by the Scheduling can be found.
+ On the other hand here also the reports created by the Scheduling can be found.
It is possible to download or delete these archived reports.
');
@@ -2846,10 +3047,11 @@ INSERT INTO txt VALUES ('H5012', 'English', 'The chapter "Authorization" offers
and Roles, additionally there is an overview of the owners.
');
INSERT INTO txt VALUES ('H5013', 'German', 'Im Kapitel "Voreinstellungen" kann der Administrator Standardeinstellungen vornehmen,
- die für alle Nutzer gelten, sowie die Passworteinstellungen definieren, welche für alle Passwortänderungen gültig sind.
+ die für alle Nutzer gelten, sowie die Email-, Importer- und
+ Passworteinstellungen definieren.
');
INSERT INTO txt VALUES ('H5013', 'English', 'In the "Defaults" chapter the administrator can define Default Values applicable to all users
- and set a Password Policy valid for all password changes.
+ and define email-, importer- and Password Policy settings.
');
INSERT INTO txt VALUES ('H5014', 'German', 'Das Kapitel "Persönlich" ist für alle Nutzer zugänglich. Hier können das individuelle Password,
die bevorzugte Sprache und Reporting-Einstellungen gesetzt werden.
@@ -3331,10 +3533,10 @@ INSERT INTO txt VALUES ('H5411', 'German', 'Standardsprache: Die Sprache, die n
INSERT INTO txt VALUES ('H5411', 'English', 'Default Language: The language which every user gets at first login.
After login each user can define its own preferred language.
');
-INSERT INTO txt VALUES ('H5412', 'German', 'Pro Abruf geholte Elemente: Definiert die (maximale) Anzahl der Objekte, die bei der Reporterzeugung und beim Aufbau der rechten Randleiste in einem Schritt geholt werden.
+INSERT INTO txt VALUES ('H5412', 'German', 'UI - Pro Abruf geholte Elemente: Definiert die (maximale) Anzahl der Objekte, die bei der Reporterzeugung und beim Aufbau der rechten Randleiste in einem Schritt geholt werden.
Dies kann genutzt werden, um die Performanz zu optimieren, wenn nötig.
');
-INSERT INTO txt VALUES ('H5412', 'English', 'Elements per fetch: Defines the (maximum) number of objects which are fetched in one step for the report creation and the build up of the right sidebar.
+INSERT INTO txt VALUES ('H5412', 'English', 'UI - Elements per fetch: Defines the (maximum) number of objects which are fetched in one step for the report creation and the build up of the right sidebar.
This can be used to optimize performance if necessary.
');
INSERT INTO txt VALUES ('H5413', 'German', 'Max initiale Abrufe rechte Randleiste: Definiert die (maximale) Anzahl an Abrufen während der Initialisierung der rechten Randleiste.
@@ -3351,8 +3553,49 @@ INSERT INTO txt VALUES ('H5414', 'English', 'Completely auto-fill right sidebar:
');
INSERT INTO txt VALUES ('H5415', 'German', 'Datenaufbewahrungszeit (in Tagen): Legt fest, wie lange die Daten in der Datenbank gehalten werden (wird noch nicht unterstützt).');
INSERT INTO txt VALUES ('H5415', 'English', 'Data retention time (in days): Defines how long the data is kept in the database (currently not supported).');
-INSERT INTO txt VALUES ('H5416', 'German', 'Importintervall (in Sekunden): Zeitintervall zwischen zwei Imports (wird noch nicht unterstützt)');
-INSERT INTO txt VALUES ('H5416', 'English', 'Import sleep time (in seconds): Time between import loops (currently not supported).');
+INSERT INTO txt VALUES ('H5416', 'German', '
+
+
Importintervall (in Sekunden): Zeitintervall zwischen zwei Import-Läfen. Default-Wert = 40.
+
Zertifikate beim Import prüfen: Sollen bei den API-Calls in Richtung der Firewalls nur gültige Zertifikate akzeptiert werden?.
+ Sollte nur auf "aktiv" gesetzt werden, wenn alle Firewalls offiziell signierte Zertifikate besitzen,
+ andernfalls ist ein Import nicht möglich. Default-Wert = "inaktiv".
+
Zertifikatswarnungen unterdrücken: Sollen im Log Warnungen bei selbstsignierten oder ungültigen Zertifkaten auf zu importierenden
+ Firewalls ausgegeben werden? Default-Wert = "inaktiv".
+
FW API - Pro Abruf geholte Elemente: Wie viele Objekte sollen beim Import per Firewall-API Call auf einmal geholt werden? Default-Wert = 150.
+
Änderungsbenachrichtigung via Email
+
+
Änderungsbenachrichtigung aktiv: Sollen Emails bei festgestellten Änderungen versendet werden, ist diese
+ Einstellung zu aktivieren. Default-Wert = "inaktiv".
+
Empfänger-Email-Adressen für Änderungen: Komma-separierte Liste von Email-Adressen, die bei festgestellter
+ sicherheitsrelevanter Änderung auf einem importierten Management benachrichtigt werden. Default-Wert = "leer".
+
Titel der Änderungsbenachrichtigung: Betreffzeile der Benachrichtigungs-Email. Default-Wert = "leer".
+
Text der Änderungsbenachrichtigung: Start des Email-Textes. Die Email enthält stets den Namen und die ID des
+ geänderten Managements sowie die Anzahl der festgestellten Änderungen. Default-Wert = "leer".
+
+
+
+');
+INSERT INTO txt VALUES ('H5416', 'English', '
+
+
Import sleep time (in seconds): Time between import loops; default value=40.
+
Check certificates during import: During API calls towards Firewalls shall only valid certificates be accepted?.
+ This should only be set to "active" if all firewall API certificates are valid, otherwise an import will not be possible.
+ Default value = "inactive".
+
Suppress certificate warnings: Shall warnings about invalid certificates be written to import log? Default value = "inactive".
+
FW API - Elements per fetch: How many objects/rules shall be fetched per API call from a firewall management? Default value = 150.
+
Change notification via email:
+
+
Change notification active?: When an import finds security relevant changes, should an email be sent out?
+ Default value = "inactive".
+
Recipient email addresses for change notifications: A comma-separated list of email addresses, which will get information in the case of
+ security relevant changes found during import of a firewall management. Default value = "empty".
+
Subject of change notification emails: Subject line for notification emails. Default value = "empty".
+
Body of change notification emails: Start of the email text. The email will always contain name and ID of the changed
+ firewall management as well as the number of changes. Default value = "empty".
+
+
+
+');
INSERT INTO txt VALUES ('H5417', 'German', 'Rezertifizierungsintervall (in Tagen): Maximale Zeit, nach der eine Regel rezertifiziert werden soll.');
INSERT INTO txt VALUES ('H5417', 'English', 'Recertification Period (in days): Maximum time, after when a rule should be recertified.');
INSERT INTO txt VALUES ('H5418', 'German', 'Rezertifizierungserinnerungsintervall (in Tagen): Zeit vor dem Fälligkeitsdatum, ab der eine Regel als fällig hervorgehoben werden soll.');
@@ -3363,15 +3606,30 @@ INSERT INTO txt VALUES ('H5420', 'German', 'Frist zum Löschen der Regeln (
INSERT INTO txt VALUES ('H5420', 'English', 'Rule Removal Grace Period (in days): Maximum time the fwadmin has to remove the decertified rules.');
INSERT INTO txt VALUES ('H5421', 'German', 'Kommentar Pflichtfeld: Legt fest, dass das Kommentarfeld für Re- und Dezertifizierungen gefüllt sein muss.');
INSERT INTO txt VALUES ('H5421', 'English', 'Comment Required: A non-empty comment for the re- or decertification is required.');
-
+INSERT INTO txt VALUES ('H5422', 'German', 'Devices zu Beginn eingeklappt ab: Legt fest, ab wievielen Devices (Managements + Gateways) diese in der linken Randleiste zunächst eingeklappt dargestellt werden.');
+INSERT INTO txt VALUES ('H5422', 'English', 'Devices collapsed at beginning from: defines from which number of devices (managements + gateways) they are displayed collapsed in the left sidebar at beginning.');
+INSERT INTO txt VALUES ('H5423', 'German', 'Nachrichten-Anzeigedauer (in Sekunden): legt fest, wie lange Erfolgs-Nachrichten dargestellt werden, bis sie automatisch ausgeblendet werden.
+ Fehler-Nachrichten erscheinen dreimal so lange. Beim Wert 0 werden die Nachrichten nicht automatisch ausgeblendet.
+ Die Nutzer-Meldungen können auch danach noch unter UI-Nachrichten eingesehen werden.
+');
+INSERT INTO txt VALUES ('H5423', 'English', 'Message view time (in seconds): defines how long success messages are displayed, until they fade out automatically.
+ Error messages are displayed 3 times as long. Value 0 means that the messages do not fade out.
+ All user messages can still be reviewed at UI Messages.
+');
+INSERT INTO txt VALUES ('H5424', 'German', 'Startzeit täglicher Check: legt die Zeit fest, wann der tägliche Check durchgeführt werden soll.');
+INSERT INTO txt VALUES ('H5424', 'English', 'Daily check start at: defines the time when the daily check should happen.');
+INSERT INTO txt VALUES ('H5426', 'German', 'Autodiscover-Intervall (in Stunden): legt das Intervall fest, in dem die Autodiscovery durchgeführt werden soll.');
+INSERT INTO txt VALUES ('H5426', 'English', 'Auto-discovery sleep time (in hours): defines the interval in which the autodiscovery should be performed.');
+INSERT INTO txt VALUES ('H5427', 'German', 'Autodiscover-Start: legt eine Bezugszeit fest, ab dem die Intervalle für die Autodiscovery gerechnet werden.');
+INSERT INTO txt VALUES ('H5427', 'English', 'Auto-discovery start at: defines a referential time from which the autodiscovery intervals are calculated.');
INSERT INTO txt VALUES ('H5428', 'German', 'Rezert Check - aktiv: aktviere bzw. deaktiviere regelmäßige Prüfungen zur Versendung von Benachrichtigungs- oder Eskalations-Emails an die Eigentümer.');
INSERT INTO txt VALUES ('H5428', 'English', 'Recert Check - active: enable or disable recurring recertification checks to send out notification or escalation emails to owners.');
INSERT INTO txt VALUES ('H5429', 'German', 'Rezert Check alle: Abstand der Prüfungen für den Versand von Benachrichtigungs- oder Eskalations-Emails an die Eigentümer.');
INSERT INTO txt VALUES ('H5429', 'English', 'Recert Check every: Interval between checks for recertification notifications.');
INSERT INTO txt VALUES ('H5430', 'German', 'Rezert Check - Email Titel: Titel der Benachrichtigungs-Email.');
INSERT INTO txt VALUES ('H5430', 'English', 'Recert Check - Email subject: Subject line of the notification email.');
-INSERT INTO txt VALUES ('H5446', 'German', 'Rezert Check - Text anstehend: Textinhalt der Benachrichtigungsmail bei demnächst anstehenden Rezertifizierungen.');
-INSERT INTO txt VALUES ('H5446', 'English', 'Recert Check - text upcoming: Email body of the notification email for upcoming recertifications.');
+INSERT INTO txt VALUES ('H5431', 'German', 'Der Administrator kann Vorgaben für Passwörter definieren, gegen die alle neuen Passwörter aller (internen) Nutzer geprüft werden.');
+INSERT INTO txt VALUES ('H5431', 'English', 'The admin user can define a password policy, against which all new passwords of all (internal) users are checked.');
INSERT INTO txt VALUES ('H5432', 'German', 'Rezert Check - Text überfällig: Textinhalt der Benachrichtigungsmail bei überfälligen Rezertifizierungen (Eskalation).');
INSERT INTO txt VALUES ('H5432', 'English', 'Recert Check - text overdue: Email body of the notification email for overdue recertifications (escalation).');
@@ -3392,30 +3650,6 @@ INSERT INTO txt VALUES ('H5439', 'English', 'Initial state for delete rule ticke
INSERT INTO txt VALUES ('H5440', 'German', 'Neuberechnen offene Rezertifizierungen: Auswahl, wann die Neuberechnung durchgeführt werden soll - beim Hochfahren, täglich via Scheduler oder jetzt (kann mehrere Minuten dauern).');
INSERT INTO txt VALUES ('H5440', 'English', 'Recalculate open recertifications: Choose, when to do this: at startup, daily via scheduler or now (this may take several minutes).');
-INSERT INTO txt VALUES ('H5422', 'German', 'Devices zu Beginn eingeklappt ab: Legt fest, ab wievielen Devices (Managements + Gateways) diese in der linken Randleiste zunächst eingeklappt dargestellt werden.');
-INSERT INTO txt VALUES ('H5422', 'English', 'Devices collapsed at beginning from: defines from which number of devices (managements + gateways) they are displayed collapsed in the left sidebar at beginning.');
-INSERT INTO txt VALUES ('H5423', 'German', 'Nachrichten-Anzeigedauer (in Sekunden): legt fest, wie lange Erfolgs-Nachrichten dargestellt werden, bis sie automatisch ausgeblendet werden.
- Fehler-Nachrichten erscheinen dreimal so lange. Beim Wert 0 werden die Nachrichten nicht automatisch ausgeblendet.
- Die Nutzer-Meldungen können auch danach noch unter UI-Nachrichten eingesehen werden.
-');
-INSERT INTO txt VALUES ('H5423', 'English', 'Message view time (in seconds): defines how long success messages are displayed, until they fade out automatically.
- Error messages are displayed 3 times as long. Value 0 means that the messages do not fade out.
- All user messages can still be reviewed at UI Messages.
-');
-INSERT INTO txt VALUES ('H5424', 'German', 'Startzeit täglicher Check: legt die Zeit fest, wann der tägliche Check durchgeführt werden soll.');
-INSERT INTO txt VALUES ('H5424', 'English', 'Daily check start at: defines the time when the daily check should happen.');
-INSERT INTO txt VALUES ('H5425', 'German', 'FW API - Pro Abruf geholte Elemente: Definiert die (maximale) Anzahl der Objekte, die beim Import über die FWO-API in einem Schritt geholt werden.
- Dies kann genutzt werden, um die Performanz zu optimieren, wenn nötig.
-');
-INSERT INTO txt VALUES ('H5425', 'English', 'FW API - Elements per fetch: Defines the (maximum) number of objects which are fetched in one step during import via the FWO-API.
- This can be used to optimize performance if necessary.
-');
-INSERT INTO txt VALUES ('H5426', 'German', 'Autodiscover-Intervall (in Stunden): legt das Intervall fest, in dem die Autodiscovery durchgeführt werden soll.');
-INSERT INTO txt VALUES ('H5426', 'English', 'Auto-discovery sleep time (in hours): defines the interval in which the autodiscovery should be performed.');
-INSERT INTO txt VALUES ('H5427', 'German', 'Autodiscover-Start: legt eine Bezugszeit fest, ab dem die Intervalle für die Autodiscovery gerechnet werden.');
-INSERT INTO txt VALUES ('H5427', 'English', 'Auto-discovery start at: defines a referential time from which the autodiscovery intervals are calculated.');
-INSERT INTO txt VALUES ('H5431', 'German', 'Der Administrator kann Vorgaben für Passwörter definieren, gegen die alle neuen Passwörter aller (internen) Nutzer geprüft werden.');
-INSERT INTO txt VALUES ('H5431', 'English', 'The admin user can define a password policy, against which all new passwords of all (internal) users are checked.');
INSERT INTO txt VALUES ('H5441', 'German', 'Mindestlänge: Minimale Länge des Passworts');
INSERT INTO txt VALUES ('H5441', 'English', 'Min Length: Minimal length of the password.');
INSERT INTO txt VALUES ('H5442', 'German', 'Grossbuchstaben enthalten: Das Passwort muss mindestens einen Grossbuchstaben enthalten.');
@@ -3426,6 +3660,16 @@ INSERT INTO txt VALUES ('H5444', 'German', 'Ziffern enthalten: Das Passwort mus
INSERT INTO txt VALUES ('H5444', 'English', 'Number Required: There has to be at least one number in the password.');
INSERT INTO txt VALUES ('H5445', 'German', 'Sonderzeichen enthalten: Das Passwort muss mindestens ein Sonderzeichen enthalten. Mögliche Werte: !?(){}=~$%&#*-+.,_');
INSERT INTO txt VALUES ('H5445', 'English', 'Special Characters Required: There has to be at least one special character in the password. Possible values are: !?(){}=~$%&#*-+.,_');
+INSERT INTO txt VALUES ('H5446', 'German', 'Rezert Check - Text anstehend: Textinhalt der Benachrichtigungsmail bei demnächst anstehenden Rezertifizierungen.');
+INSERT INTO txt VALUES ('H5446', 'English', 'Recert Check - text upcoming: Email body of the notification email for upcoming recertifications.');
+INSERT INTO txt VALUES ('H5447', 'German', 'Als unbenutzt gewertet nach (in Tagen): Gibt den Zeitpunkt an, vor dem die letzte Nutzung der Regel für den Unbenutzte-Regel-Report in der Vergangenheit liegen muss.');
+INSERT INTO txt VALUES ('H5447', 'English', 'Regarded as unused from (in days): Defines the point in time, before which the last usage has to be in the past for the Unused Rules Report.');
+INSERT INTO txt VALUES ('H5448', 'German', 'Toleranz ab Erzeugungsdatum (in Tagen): Noch niemals benutzte Regeln werden im Unbenutzte-Regel-Report nur berücksichtigt, wenn sie vor dem durch den hier definierten Toleranzwert festgelegten Zeitpunkt erzeugt wurden.');
+INSERT INTO txt VALUES ('H5448', 'English', 'Tolerance from creation date (in days): Never used rules are only regarded in the Unused Rules Report, if they have been created before the point in time defined by this tolerance value.');
+INSERT INTO txt VALUES ('H5449', 'German', 'Sitzungs-Timeout (in Minuten): Zeit, nach der ein Nutzer automatisch aus der Sitzung ausgeloggt wird.');
+INSERT INTO txt VALUES ('H5449', 'English', 'Session timeout (in minutes): Time after which a user is logged out automatically.');
+INSERT INTO txt VALUES ('H5450', 'German', 'Benachrichtigung vor Sitzungs-Timeout (in Minuten): Intervall vor dem automatischen Logout, in dem eine Warnung ausgegeben wird.');
+INSERT INTO txt VALUES ('H5450', 'English', 'Warning before session timeout (in minutes): Interval before automatic logout when a warning message is displayed.');
INSERT INTO txt VALUES ('H5451', 'German', 'Jeder Nutzer (ausser Demo-Nutzer) kann sein eigenes Passwort ändern.
Bitte das alte Passwort einmal und das neue Passwort zweimal eingeben, um Eingabefehler zu vermeiden.
Das neue Passwort muss sich vom alten unterscheiden und wird gegen die Passworteinstellungen geprüft.
@@ -3434,6 +3678,11 @@ INSERT INTO txt VALUES ('H5451', 'English', 'Every user (except demo user) can c
Please insert the old password once and the new password twice to avoid input mistakes.
The new password has to be different from the old one and is checked against the Password Policy.
');
+INSERT INTO txt VALUES ('H5452', 'German', 'Max erlaubte Importdauer (in Stunden): Obergrenze, welche Importdauer im täglichen Check noch als akzeptabel gewertet wird.');
+INSERT INTO txt VALUES ('H5452', 'English', 'Max allowed import duration (in hours): Upper limit for the accepted import duration in the daily check.');
+INSERT INTO txt VALUES ('H5453', 'German', 'Max erlaubtes Importintervall (in Stunden): Obergrenze, welcher Abstand zwischen zwei Imports im täglichen Check noch akzeptiert wird.');
+INSERT INTO txt VALUES ('H5453', 'English', 'Max import interval (in hours): Upper limit for the accepted interval between two imports in the daily check.');
+
INSERT INTO txt VALUES ('H5461', 'German', 'Jeder Nutzer kann seine eigene bevorzugte Sprache für die Anwendung einstellen.
Alle Texte werden in dieser Sprache dargestellt, soweit verfügbar. Wenn nicht, wird die Standardsprache verwendet. Wenn der Text auch dort nicht verfügbar ist, wird Englisch genutzt.
Die Standardsprache beim ersten Anmelden kann vom Admin für alle Nutzer in den Standardeinstellungen definiert werden.
@@ -3456,7 +3705,27 @@ INSERT INTO txt VALUES ('H5481', 'German', 'Ein Rezertifizierer kann einige per
INSERT INTO txt VALUES ('H5481', 'English', 'A recertifier can overwrite some personal settings for the recertification report.
The default value is set by the admin in the Default Settings.
');
-
+INSERT INTO txt VALUES ('H5491', 'German', 'Firewall Orchestrator kann Benachrichtigungen versenden, z.B. für anstehende Rezertifizierungen oder wenn beim Import
+ Änderungen festgestellt wurden.
+
+
Der Name oder die IP-Adresse des SMTP-Servers für ausgehende Emails wird im Feld "Adresse" eingetragen.
+
Der TCP-Port des SMTP-Servers (meist 25, 587 oder 465, abhängig von der verwendeten Verschlüsselung) wird im "Port"-Feld eingetragen.
+
Anschließend wird die gewünschte Art der Verschlüsselung eingestellt (None=unverschlüsselt / StartTls / Tls)
+
Verlangt der SMTP-Server eine Authentisierung, so sind Email-Nutzer und Email-Nutzer-Passwort in den beiden folgenden Feldern einzutragen. Anderfalls können diese Felder leer gelassen werden.
+
Schließkann nach eine individuelle Absendeadresse im Feld "Email-Absendeadresse" konfiguriert werden.
+
+');
+INSERT INTO txt VALUES ('H5491', 'English', 'Firewall Orchestrator is able to send out notifications, e.g. for upcoming recertifications or when an import found changes in the firewall configuration.
+
+
Enter the name of IP address of your outgoing SMTP server in the field Feld "Adress".
+
The TCP port of the SMTP server (usually 25, 587 or 465, depending on the encryption method used) is entered in the "Port" field.
+
Choose the desired encryption type (None=clear-text / StartTls / Tls)
+
If the SMTP server requires authentication, enter Email User name and password in the following two fields. Otherwise leave empty.
+
Finally an individual sender address can be configured using the field "Email sender address".
+
+');
+INSERT INTO txt VALUES ('H5495', 'German', 'Die folgenden Einstellungen wirken sich auf das Import-Modul (python) aus.');
+INSERT INTO txt VALUES ('H5495', 'English', 'The following settings apply to the Import Module (python)');
INSERT INTO txt VALUES ('H5501', 'German', 'Aktionen müssen zuerst in den Einstellungen definiert werden und können dann den jeweiligen Stati zugeordnet werden.
Die Aktion wird dann bei Eintreffen der hier definierten Bedingungen angeboten bzw. ausgeführt.
');
diff --git a/roles/database/files/upgrade/6.5.0.sql b/roles/database/files/upgrade/6.5.0.sql
new file mode 100644
index 000000000..156e57279
--- /dev/null
+++ b/roles/database/files/upgrade/6.5.0.sql
@@ -0,0 +1,53 @@
+--- Compliance Tables ---
+create schema if not exists compliance;
+
+create table if not exists compliance.network_zone
+(
+ id BIGSERIAL PRIMARY KEY,
+ name VARCHAR NOT NULL,
+ description VARCHAR NOT NULL,
+ super_network_zone_id bigint,
+ owner_id bigint
+);
+
+create table if not exists compliance.network_zone_communication
+(
+ from_network_zone_id bigint NOT NULL,
+ to_network_zone_id bigint NOT NULL
+);
+
+create table if not exists compliance.ip_range
+(
+ network_zone_id bigint NOT NULL,
+ ip_range_start inet NOT NULL,
+ ip_range_end inet NOT NULL,
+ PRIMARY KEY(network_zone_id, ip_range_start, ip_range_end)
+);
+
+
+--- Compliance Foreign Keys ---
+
+--- compliance.ip_range ---
+ALTER TABLE compliance.ip_range DROP CONSTRAINT IF EXISTS compliance_ip_range_network_zone_foreign_key;
+ALTER TABLE compliance.ip_range ADD CONSTRAINT compliance_ip_range_network_zone_foreign_key FOREIGN KEY (network_zone_id) REFERENCES compliance.network_zone(id) ON UPDATE RESTRICT ON DELETE CASCADE;
+
+--- compliance.network_zone ---
+ALTER TABLE compliance.network_zone DROP CONSTRAINT IF EXISTS compliance_super_zone_foreign_key;
+ALTER TABLE compliance.network_zone ADD CONSTRAINT compliance_super_zone_foreign_key FOREIGN KEY (super_network_zone_id) REFERENCES compliance.network_zone(id) ON UPDATE RESTRICT ON DELETE CASCADE;
+
+--- compliance.network_zone_communication ---
+ALTER TABLE compliance.network_zone_communication DROP CONSTRAINT IF EXISTS compliance_from_network_zone_communication_foreign_key;
+ALTER TABLE compliance.network_zone_communication DROP CONSTRAINT IF EXISTS compliance_to_network_zone_communication_foreign_key;
+ALTER TABLE compliance.network_zone_communication ADD CONSTRAINT compliance_from_network_zone_communication_foreign_key FOREIGN KEY (from_network_zone_id) REFERENCES compliance.network_zone(id) ON UPDATE RESTRICT ON DELETE CASCADE;
+ALTER TABLE compliance.network_zone_communication ADD CONSTRAINT compliance_to_network_zone_communication_foreign_key FOREIGN KEY (to_network_zone_id) REFERENCES compliance.network_zone(id) ON UPDATE RESTRICT ON DELETE CASCADE;
+
+
+--- Compliance Constraints ---
+CREATE EXTENSION IF NOT EXISTS btree_gist;
+--- prevent overlapping ip address ranges in the same zone
+ALTER TABLE compliance.ip_range DROP CONSTRAINT IF EXISTS exclude_overlapping_ip_ranges;
+ALTER TABLE compliance.ip_range ADD CONSTRAINT exclude_overlapping_ip_ranges
+EXCLUDE USING gist (
+ network_zone_id WITH =,
+ numrange(ip_range_start - '0.0.0.0'::inet, ip_range_end - '0.0.0.0'::inet, '[]') WITH &&
+);
diff --git a/roles/database/files/upgrade/6.5.1.sql b/roles/database/files/upgrade/6.5.1.sql
new file mode 100644
index 000000000..a36e914c7
--- /dev/null
+++ b/roles/database/files/upgrade/6.5.1.sql
@@ -0,0 +1,2 @@
+insert into config (config_key, config_value, config_user) VALUES ('unusedTolerance', '400', 0) ON CONFLICT DO NOTHING;
+insert into config (config_key, config_value, config_user) VALUES ('creationTolerance', '90', 0) ON CONFLICT DO NOTHING;
diff --git a/roles/database/tasks/install-database.yml b/roles/database/tasks/install-database.yml
index 261957446..54feb32d4 100644
--- a/roles/database/tasks/install-database.yml
+++ b/roles/database/tasks/install-database.yml
@@ -85,7 +85,7 @@
- name: include table creation pre ansible 2.10
include_tasks: install-db-base-ansible-pre2.10.yml
when: ansible_version.full is version('2.10', '<')
-
+
- name: create db users with group memberships
import_tasks: create-users.yml
when: installation_mode == "new"
diff --git a/roles/database/tasks/main.yml b/roles/database/tasks/main.yml
index dbed28527..34af40108 100644
--- a/roles/database/tasks/main.yml
+++ b/roles/database/tasks/main.yml
@@ -147,6 +147,7 @@
loop:
- csv
- sql
+ tags: [ 'test' ]
- name: create tablespace directory
file:
diff --git a/roles/database/tasks/upgrade_database_new.yml b/roles/database/tasks/upgrade_database_new.yml
index 605ba5689..a34bd5708 100644
--- a/roles/database/tasks/upgrade_database_new.yml
+++ b/roles/database/tasks/upgrade_database_new.yml
@@ -4,8 +4,6 @@
db: "{{ fworch_db_name }}"
path_to_script: "{{ database_install_dir }}/upgrade/{{ item }}.sql"
as_single_query: "{{ postgresql_query_as_single_query }}"
- # register: res
- # when: not (res.changed|d(false))
loop: "{{ upgrade_files | sort }}"
become: true
ignore_errors: false
diff --git a/roles/database/tasks/upgrade_database_old.yml b/roles/database/tasks/upgrade_database_old.yml
index d6a46e5d4..d7586b00a 100644
--- a/roles/database/tasks/upgrade_database_old.yml
+++ b/roles/database/tasks/upgrade_database_old.yml
@@ -3,8 +3,6 @@
postgresql_query:
db: "{{ fworch_db_name }}"
path_to_script: "{{ database_install_dir }}/upgrade/{{ item }}.sql"
- # register: res
- # when: not (res.changed|d(false))
loop: "{{ upgrade_files | sort }}"
ignore_errors: false
become: true
diff --git a/roles/importer/files/importer/checkpointR8x/cp_const.py b/roles/importer/files/importer/checkpointR8x/cp_const.py
index e44efd6e3..7c38cccc6 100644
--- a/roles/importer/files/importer/checkpointR8x/cp_const.py
+++ b/roles/importer/files/importer/checkpointR8x/cp_const.py
@@ -28,7 +28,7 @@
api_obj_types = nw_obj_table_names + svc_obj_table_names # all obj table names to look at during import
cp_specific_object_types = [ # used for fetching enrichment data via "get object" separately (no specific API call)
- 'simple-gateway', 'simple-cluster', 'CpmiVsClusterNetobj', 'CpmiVsxClusterNetobj', 'CpmiVsxClusterMember',
+ 'simple-gateway', 'simple-cluster', 'CpmiVsClusterNetobj', 'CpmiVsxClusterNetobj', 'CpmiVsxClusterMember', 'CpmiVsNetobj',
'CpmiAnyObject', 'CpmiClusterMember', 'CpmiGatewayPlain', 'CpmiHostCkp', 'CpmiGatewayCluster', 'checkpoint-host',
'cluster-member'
]
diff --git a/roles/importer/files/importer/checkpointR8x/cp_enrich.py b/roles/importer/files/importer/checkpointR8x/cp_enrich.py
new file mode 100644
index 000000000..f54e21ba7
--- /dev/null
+++ b/roles/importer/files/importer/checkpointR8x/cp_enrich.py
@@ -0,0 +1,168 @@
+import sys
+from common import importer_base_dir
+from fwo_log import getFwoLogger
+sys.path.append(importer_base_dir + '/checkpointR8x')
+import time
+import cp_getter
+import fwo_globals
+import cp_const
+import cp_network
+
+
+################# enrich #######################
+def enrich_config (config, mgm_details, limit=150, details_level=cp_const.details_level, noapi=False, sid=None):
+
+ logger = getFwoLogger()
+ base_url = 'https://' + mgm_details['hostname'] + ':' + str(mgm_details['port']) + '/web_api/'
+ nw_objs_from_obj_tables = []
+ svc_objs_from_obj_tables = []
+ starttime = int(time.time())
+
+ # do nothing for empty configs
+ if config == {}:
+ return 0
+
+ #################################################################################
+ # get object data which is only contained as uid in config by making additional api calls
+ # get all object uids (together with type) from all rules in fields src, dst, svc
+ nw_uids_from_rulebase = []
+ svc_uids_from_rulebase = []
+
+ for rulebase in config['rulebases'] + config['nat_rulebases']:
+ if fwo_globals.debug_level>5:
+ if 'layername' in rulebase:
+ logger.debug ( "Searching for all uids in rulebase: " + rulebase['layername'] )
+ cp_getter.collect_uids_from_rulebase(rulebase, nw_uids_from_rulebase, svc_uids_from_rulebase, "top_level")
+
+ # remove duplicates from uid lists
+ nw_uids_from_rulebase = list(set(nw_uids_from_rulebase))
+ svc_uids_from_rulebase = list(set(svc_uids_from_rulebase))
+
+ # get all uids in objects tables
+ for obj_table in config['object_tables']:
+ nw_objs_from_obj_tables.extend(cp_getter.get_all_uids_of_a_type(obj_table, cp_const.nw_obj_table_names))
+ svc_objs_from_obj_tables.extend(cp_getter.get_all_uids_of_a_type(obj_table, cp_const.svc_obj_table_names))
+
+ # identify all objects (by type) that are missing in objects tables but present in rulebase
+ missing_nw_object_uids = cp_getter.get_broken_object_uids(nw_objs_from_obj_tables, nw_uids_from_rulebase)
+ missing_svc_object_uids = cp_getter.get_broken_object_uids(svc_objs_from_obj_tables, svc_uids_from_rulebase)
+
+ # adding the uid of the Original object for natting:
+ missing_nw_object_uids.append(cp_const.original_obj_uid)
+ missing_svc_object_uids.append(cp_const.original_obj_uid)
+
+ if fwo_globals.debug_level>4:
+ logger.debug ( "found missing nw objects: '" + ",".join(missing_nw_object_uids) + "'" )
+ logger.debug ( "found missing svc objects: '" + ",".join(missing_svc_object_uids) + "'" )
+
+ if noapi == False:
+ # if sid is None:
+ # TODO: why is the re-genereation of a new sid necessary here?
+ # if mgm_details['domainUid'] != None:
+ # api_domain = mgm_details['domainUid']
+ # else:
+ # api_domain = mgm_details['configPath']
+
+ # sid = cp_getter.login(mgm_details['import_credential']['user'],mgm_details['import_credential']['secret'],mgm_details['hostname'],mgm_details['port'],api_domain)
+ # logger.debug ( "re-logged into api" )
+
+ # if an object is not there:
+ # make api call: show object details-level full uid "" and add object to respective json
+ for missing_obj in missing_nw_object_uids:
+ show_params_host = {'details-level':cp_const.details_level,'uid':missing_obj}
+ logger.debug ( "fetching obj with uid: " + missing_obj)
+ obj = cp_getter.cp_api_call(base_url, 'show-object', show_params_host, sid)
+ if 'object' in obj:
+ obj = obj['object']
+ if (obj['type'] == 'CpmiAnyObject'):
+ json_obj = {"object_type": "hosts", "object_chunks": [ {
+ "objects": [ {
+ 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
+ 'comments': 'any nw object checkpoint (hard coded)',
+ 'type': 'CpmiAnyObject', 'ipv4-address': '0.0.0.0/0',
+ } ] } ] }
+ config['object_tables'].append(json_obj)
+ elif (obj['type'] == 'simple-gateway' or obj['type'] == 'CpmiGatewayPlain' or obj['type'] == 'interop'):
+ json_obj = {"object_type": "hosts", "object_chunks": [ {
+ "objects": [ {
+ 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
+ 'comments': obj['comments'], 'type': 'host', 'ipv4-address': cp_network.get_ip_of_obj(obj),
+ } ] } ] }
+ config['object_tables'].append(json_obj)
+ elif obj['type'] == 'multicast-address-range':
+ logger.debug("found multicast-address-range: " + obj['name'] + " (uid:" + obj['uid']+ ")")
+ json_obj = {"object_type": "hosts", "object_chunks": [ {
+ "objects": [ {
+ 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
+ 'comments': obj['comments'], 'type': 'host', 'ipv4-address': cp_network.get_ip_of_obj(obj),
+ } ] } ] }
+ config['object_tables'].append(json_obj)
+ elif (obj['type'] == 'CpmiVsClusterMember' or obj['type'] == 'CpmiVsxClusterMember'):
+ json_obj = {"object_type": "hosts", "object_chunks": [ {
+ "objects": [ {
+ 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
+ 'comments': obj['comments'], 'type': 'host', 'ipv4-address': cp_network.get_ip_of_obj(obj),
+ } ] } ] }
+ config['object_tables'].append(json_obj)
+ logger.debug ('missing obj: ' + obj['name'] + obj['type'])
+ elif (obj['type'] == 'Global'):
+ json_obj = {"object_type": "hosts", "object_chunks": [ {
+ "objects": [ {
+ 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
+ 'comments': obj['comments'], 'type': 'host', 'ipv4-address': '0.0.0.0/0',
+ } ] } ] }
+ config['object_tables'].append(json_obj)
+ logger.debug ('missing obj: ' + obj['name'] + obj['type'])
+ elif (obj['type'] == 'updatable-object'):
+ json_obj = {"object_type": "hosts", "object_chunks": [ {
+ "objects": [ {
+ 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
+ 'comments': obj['comments'], 'type': 'group' #, 'ipv4-address': '0.0.0.0/0',
+ } ] } ] }
+ config['object_tables'].append(json_obj)
+ logger.debug ('missing obj: ' + obj['name'] + obj['type'])
+ elif (obj['type'] == 'Internet'):
+ json_obj = {"object_type": "hosts", "object_chunks": [ {
+ "objects": [ {
+ 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
+ 'comments': obj['comments'], 'type': 'network', 'ipv4-address': '0.0.0.0/0',
+ } ] } ] }
+ config['object_tables'].append(json_obj)
+ elif (obj['type'] == 'access-role'):
+ pass # ignorning user objects
+ else:
+ logger.warning ( "missing nw obj of unexpected type '" + obj['type'] + "': " + missing_obj )
+ logger.debug ( "missing nw obj: " + missing_obj + " added" )
+ else:
+ logger.warning("could not get the missing object with uid=" + missing_obj + " from CP API")
+
+ for missing_obj in missing_svc_object_uids:
+ show_params_host = {'details-level':cp_const.details_level,'uid':missing_obj}
+ obj = cp_getter.cp_api_call(base_url, 'show-object', show_params_host, sid)
+ if 'object' in obj:
+ obj = obj['object']
+ if (obj['type'] == 'CpmiAnyObject'):
+ json_obj = {"object_type": "services-other", "object_chunks": [ {
+ "objects": [ {
+ 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
+ 'comments': 'any svc object checkpoint (hard coded)',
+ 'type': 'service-other', 'ip-protocol': '0'
+ } ] } ] }
+ config['object_tables'].append(json_obj)
+ elif (obj['type'] == 'Global'):
+ json_obj = {"object_type": "services-other", "object_chunks": [ {
+ "objects": [ {
+ 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
+ 'comments': 'Original svc object checkpoint (hard coded)',
+ 'type': 'service-other', 'ip-protocol': '0'
+ } ] } ] }
+ config['object_tables'].append(json_obj)
+ else:
+ logger.warning ( "missing svc obj (uid=" + missing_obj + ") of unexpected type \"" + obj['type'] +"\"" )
+ logger.debug ( "missing svc obj: " + missing_obj + " added")
+
+ # logout_result = cp_getter.cp_api_call(base_url, 'logout', {}, sid)
+
+ logger.debug ( "checkpointR8x/enrich_config - duration: " + str(int(time.time()) - starttime) + "s" )
+
+ return 0
diff --git a/roles/importer/files/importer/checkpointR8x/getter.py b/roles/importer/files/importer/checkpointR8x/cp_getter.py
similarity index 89%
rename from roles/importer/files/importer/checkpointR8x/getter.py
rename to roles/importer/files/importer/checkpointR8x/cp_getter.py
index 1a9afa8e8..6c455c12e 100644
--- a/roles/importer/files/importer/checkpointR8x/getter.py
+++ b/roles/importer/files/importer/checkpointR8x/cp_getter.py
@@ -193,7 +193,10 @@ def collect_uids_from_rulebase(rulebase, nw_uids_found, svc_uids_found, debug_te
chunk_name = 'nat_rule_chunks'
else:
for rule in rulebase:
- collect_uids_from_rule(rule, nw_uids_found, svc_uids_found)
+ if 'rulebase' in rule:
+ collect_uids_from_rulebase(rule['rulebase'], nw_uids_found, svc_uids_found, debug_text + '.')
+ else:
+ collect_uids_from_rule(rule, nw_uids_found, svc_uids_found)
return
for layer_chunk in rulebase[chunk_name]:
if 'rulebase' in layer_chunk:
@@ -239,37 +242,21 @@ def get_broken_object_uids(all_uids_from_obj_tables, all_uids_from_rules):
return list(set(broken_uids))
-def get_inline_layer_names_from_rulebase(rulebase, inline_layers):
- logger = getFwoLogger()
- if 'layerchunks' in rulebase:
- for chunk in rulebase['layerchunks']:
- if 'rulebase' in chunk:
- for rules_chunk in chunk['rulebase']:
- get_inline_layer_names_from_rulebase(rules_chunk, inline_layers)
- else:
- if 'rulebase' in rulebase:
- # add section header, but only if it does not exist yet (can happen by chunking a section)
- for rule in rulebase['rulebase']:
- if 'inline-layer' in rule:
- inline_layers.append(rule['inline-layer']['name'])
- if 'name' in rule and rule['name'] == "Placeholder for domain rules":
- logger.debug ("getter - found domain rules reference with uid " + rule["uid"])
-
- if 'rule-number' in rulebase: # not a rulebase but a single rule
- if 'inline-layer' in rulebase:
- inline_layers.append(rulebase['inline-layer']['name'])
- # get_inline_layer_names_from_rulebase(rulebase, inline_layers)
-
-
-def get_layer_from_api_as_dict (api_v_url, sid, show_params_rules, layername):
+def get_layer_from_api_as_dict (api_v_url, sid, show_params_rules, layername, access_type='access', collection_type='rulebase'):
+ # access_type: access / nat
+ # collection_type: rulebase / layer
logger = getFwoLogger()
current_layer_json = { "layername": layername, "layerchunks": [] }
current=0
total=current+1
while (current6:
- logger.debug ( "get_layer_from_api_as_dict current offset: "+ str(current) )
+
+ #################################################################################
+ # adding inline and domain layers (if they exist)
+ add_inline_layers (current_layer_json, api_v_url, sid, show_params_rules)
+
return current_layer_json
-def get_nat_rules_from_api_as_dict (api_host, api_port, api_v_url, sid, show_params_rules):
+def add_inline_layers (rulebase, api_v_url, sid, show_params_rules, access_type='access', collection_type='layer'):
+
+ if 'layerchunks' in rulebase:
+ for chunk in rulebase['layerchunks']:
+ if 'rulebase' in chunk:
+ for rules_chunk in chunk['rulebase']:
+ add_inline_layers(rules_chunk, api_v_url, sid, show_params_rules)
+ else:
+ if 'rulebase' in rulebase:
+ rulebase_idx = 0
+ for rule in rulebase['rulebase']:
+ if 'inline-layer' in rule:
+ inline_layer_name = rule['inline-layer']['name']
+ if fwo_globals.debug_level>5:
+ logger.debug ( "found inline layer " + inline_layer_name )
+ inline_layer = get_layer_from_api_as_dict (api_v_url, sid, show_params_rules, inline_layer_name, access_type=access_type, collection_type=collection_type)
+ rulebase['rulebase'][rulebase_idx+1:rulebase_idx+1] = inline_layer['layerchunks'] #### insert inline layer here
+ rulebase_idx += len(inline_layer['layerchunks'])
+
+ if 'name' in rule and rule['name'] == "Placeholder for domain rules":
+ logger.debug ("getter - found domain rules reference with uid " + rule["uid"])
+ rulebase_idx += 1
+
+
+def get_nat_rules_from_api_as_dict (api_v_url, sid, show_params_rules):
logger = getFwoLogger()
nat_rules = { "nat_rule_chunks": [] }
current=0
diff --git a/roles/importer/files/importer/checkpointR8x/parse_network.py b/roles/importer/files/importer/checkpointR8x/cp_network.py
similarity index 70%
rename from roles/importer/files/importer/checkpointR8x/parse_network.py
rename to roles/importer/files/importer/checkpointR8x/cp_network.py
index 721930aef..be58428ba 100644
--- a/roles/importer/files/importer/checkpointR8x/parse_network.py
+++ b/roles/importer/files/importer/checkpointR8x/cp_network.py
@@ -1,11 +1,12 @@
from fwo_log import getFwoLogger
import json
import cp_const
-from cpcommon import get_ip_of_obj
from fwo_const import list_delimiter
+import fwo_alert, fwo_api
+import ipaddress
-def parse_network_objects_to_json(full_config, config2import, import_id, mgm_id=0, debug_level=0):
+def normalize_network_objects(full_config, config2import, import_id, mgm_id=0, debug_level=0):
nw_objects = []
for obj_table in full_config['object_tables']:
@@ -114,3 +115,44 @@ def add_member_names_for_nw_group(idx, nw_objects):
member_names += member_name + list_delimiter
group['obj_member_names'] = member_names[:-1]
nw_objects.insert(idx, group)
+
+
+def validate_ip_address(address):
+ try:
+ # ipaddress.ip_address(address)
+ ipaddress.ip_network(address)
+ return True
+ # print("IP address {} is valid. The object returned is {}".format(address, ip))
+ except ValueError:
+ return False
+ # print("IP address {} is not valid".format(address))
+
+
+def get_ip_of_obj(obj, mgm_id=None):
+ if 'ipv4-address' in obj:
+ ip_addr = obj['ipv4-address']
+ elif 'ipv6-address' in obj:
+ ip_addr = obj['ipv6-address']
+ elif 'subnet4' in obj:
+ ip_addr = obj['subnet4'] + '/' + str(obj['mask-length4'])
+ elif 'subnet6' in obj:
+ ip_addr = obj['subnet6'] + '/' + str(obj['mask-length6'])
+ elif 'ipv4-address-first' in obj and 'ipv4-address-last' in obj:
+ ip_addr = obj['ipv4-address-first'] + '-' + str(obj['ipv4-address-last'])
+ elif 'ipv6-address-first' in obj and 'ipv6-address-last' in obj:
+ ip_addr = obj['ipv6-address-first'] + '-' + str(obj['ipv6-address-last'])
+ else:
+ ip_addr = None
+
+ ## fix malformed ip addresses (should not regularly occur and constitutes a data issue in CP database)
+ if ip_addr is None or ('type' in obj and (obj['type'] == 'address-range' or obj['type'] == 'multicast-address-range')):
+ pass # ignore None and ranges here
+ elif not validate_ip_address(ip_addr):
+ alerter = fwo_alert.getFwoAlerter()
+ alert_description = "object is not a valid ip address (" + str(ip_addr) + ")"
+ fwo_api.create_data_issue(alerter['fwo_api_base_url'], alerter['jwt'], severity=2, obj_name=obj['name'], object_type=obj['type'], description=alert_description, mgm_id=mgm_id)
+ alert_description = "object '" + obj['name'] + "' (type=" + obj['type'] + ") is not a valid ip address (" + str(ip_addr) + ")"
+ fwo_api.setAlert(alerter['fwo_api_base_url'], alerter['jwt'], title="import error", severity=2, role='importer', \
+ description=alert_description, source='import', alertCode=17, mgm_id=mgm_id)
+ ip_addr = '0.0.0.0/32' # setting syntactically correct dummy ip
+ return ip_addr
diff --git a/roles/importer/files/importer/checkpointR8x/parse_rule.py b/roles/importer/files/importer/checkpointR8x/cp_rule.py
similarity index 68%
rename from roles/importer/files/importer/checkpointR8x/parse_rule.py
rename to roles/importer/files/importer/checkpointR8x/cp_rule.py
index b674cc437..b52664a1b 100644
--- a/roles/importer/files/importer/checkpointR8x/parse_rule.py
+++ b/roles/importer/files/importer/checkpointR8x/cp_rule.py
@@ -1,60 +1,52 @@
from asyncio.log import logger
from fwo_log import getFwoLogger
import json
-import cp_const, cpcommon
+import cp_const
import fwo_const
-from fwo_const import list_delimiter
+import fwo_globals
+from fwo_const import list_delimiter, default_section_header_text
from fwo_base import sanitize
from fwo_exception import ImportRecursionLimitReached
+uid_to_name_map = {}
-def add_section_header_rule_in_json(rulebase, section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid):
- section_header_uids.append(sanitize(rule_uid))
- rule = {
- "control_id": int(import_id),
- "rule_num": int(rule_num),
- "rulebase_name": sanitize(layer_name),
- # rule_ruleid
- "rule_disabled": False,
- "rule_src_neg": False,
- "rule_src": "Any",
- "rule_src_refs": sanitize(cp_const.any_obj_uid),
- "rule_dst_neg": False,
- "rule_dst": "Any",
- "rule_dst_refs": sanitize(cp_const.any_obj_uid),
- "rule_svc_neg": False,
- "rule_svc": "Any",
- "rule_svc_refs": sanitize(cp_const.any_obj_uid),
- "rule_action": "Accept",
- "rule_track": "Log",
- "rule_installon": "Policy Targets",
- "rule_time": "Any",
- "rule_implied": False,
- # "rule_comment": None,
- # rule_name
- "rule_uid": sanitize(rule_uid),
- "rule_head_text": sanitize(section_name),
- # rule_from_zone
- # rule_to_zone
- # rule_last_change_admin
- "parent_rule_uid": sanitize(parent_uid)
- }
- rulebase.append(rule)
-
-
-def add_domain_rule_header_rule_in_json(rulebase, section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid):
- add_section_header_rule_in_json(rulebase, section_name, layer_name,
- import_id, rule_uid, rule_num, section_header_uids, parent_uid)
+def normalize_rulebases_top_level (full_config, current_import_id, config2import):
+ logger = getFwoLogger()
+ target_rulebase = []
+ rule_num = 0
+ parent_uid=""
+ section_header_uids=[]
-def resolve_uid_to_name(nw, config2import):
+ # fill uid_to_name_map:
for nw_obj in config2import['network_objects']:
- if nw_obj['obj_uid']==nw:
- return nw_obj['obj_name']
- return nw
+ uid_to_name_map[nw_obj['obj_uid']] = nw_obj['obj_name']
+
+ rb_range = range(len(full_config['rulebases']))
+ for rb_id in rb_range:
+ # if current_layer_name == args.rulebase:
+ if fwo_globals.debug_level>3:
+ logger.debug("parsing layer " + full_config['rulebases'][rb_id]['layername'])
+
+ # parse access rules
+ rule_num = parse_rulebase(
+ full_config['rulebases'][rb_id], target_rulebase, full_config['rulebases'][rb_id]['layername'],
+ current_import_id, rule_num, section_header_uids, parent_uid, config2import)
+ # now parse the nat rulebase
+
+ # parse nat rules
+ if len(full_config['nat_rulebases'])>0:
+ if len(full_config['nat_rulebases']) != len(rb_range):
+ logger.warning('get_config - found ' + str(len(full_config['nat_rulebases'])) +
+ ' nat rulebases and ' + str(len(rb_range)) + ' access rulebases')
+ else:
+ rule_num = parse_nat_rulebase(
+ full_config['nat_rulebases'][rb_id], target_rulebase, full_config['rulebases'][rb_id]['layername'],
+ current_import_id, rule_num, section_header_uids, parent_uid, config2import)
+ return target_rulebase
-def parse_single_rule_to_json(src_rule, rulebase, layer_name, import_id, rule_num, parent_uid, config2import, debug_level=0):
+def parse_single_rule(src_rule, rulebase, layer_name, import_id, rule_num, parent_uid, config2import, debug_level=0):
logger = getFwoLogger()
# reference to domain rule layer, filling up basic fields
if 'type' in src_rule and src_rule['type'] != 'place-holder':
@@ -75,8 +67,11 @@ def parse_single_rule_to_json(src_rule, rulebase, layer_name, import_id, rule_nu
src['networks'] + list_delimiter
else: # more than one source
for nw in src['networks']:
- nw_resolved = resolve_uid_to_name(nw, config2import)
- rule_src_name += src["name"] + '@' + nw_resolved + list_delimiter
+ nw_resolved = resolve_uid_to_name(nw)
+ if nw_resolved == "":
+ rule_src_name += src["name"] + list_delimiter
+ else:
+ rule_src_name += src["name"] + '@' + nw_resolved + list_delimiter
else: # standard network objects as source
rule_src_name += src["name"] + list_delimiter
else:
@@ -234,7 +229,6 @@ def parse_single_rule_to_json(src_rule, rulebase, layer_name, import_id, rule_nu
"rule_track": sanitize(src_rule['track']['type']['name']),
"rule_installon": sanitize(src_rule['install-on'][0]['name']),
"rule_time": sanitize(src_rule['time'][0]['name']),
- "rule_comment": sanitize(comments),
"rule_name": sanitize(rule_name),
"rule_uid": sanitize(src_rule['uid']),
"rule_implied": False,
@@ -246,68 +240,127 @@ def parse_single_rule_to_json(src_rule, rulebase, layer_name, import_id, rule_nu
"parent_rule_uid": sanitize(parent_rule_uid),
"last_hit": sanitize(last_hit)
}
+ if comments is not None:
+ rule['rule_comment'] = sanitize(comments)
rulebase.append(rule)
+ return rule_num + 1
+ return rule_num
-def parse_rulebase_json(src_rulebase, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=0, recursion_level=1):
+def resolve_uid_to_name(nw_obj_uid):
+ if nw_obj_uid in uid_to_name_map:
+ return uid_to_name_map[nw_obj_uid]
+ else:
+ logger = getFwoLogger()
+ logger.warning("could not resolve network object with uid " + nw_obj_uid)
+ return ""
+
+
+def insert_section_header_rule(rulebase, section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid):
+ section_header_uids.append(sanitize(rule_uid))
+ rule = {
+ "control_id": int(import_id),
+ "rule_num": int(rule_num),
+ "rulebase_name": sanitize(layer_name),
+ # rule_ruleid
+ "rule_disabled": False,
+ "rule_src_neg": False,
+ "rule_src": "Any",
+ "rule_src_refs": sanitize(cp_const.any_obj_uid),
+ "rule_dst_neg": False,
+ "rule_dst": "Any",
+ "rule_dst_refs": sanitize(cp_const.any_obj_uid),
+ "rule_svc_neg": False,
+ "rule_svc": "Any",
+ "rule_svc_refs": sanitize(cp_const.any_obj_uid),
+ "rule_action": "Accept",
+ "rule_track": "Log",
+ "rule_installon": "Policy Targets",
+ "rule_time": "Any",
+ "rule_implied": False,
+ # "rule_comment": None,
+ # rule_name
+ "rule_uid": sanitize(rule_uid),
+ "rule_head_text": sanitize(section_name),
+ # rule_from_zone
+ # rule_to_zone
+ # rule_last_change_admin
+ "parent_rule_uid": sanitize(parent_uid)
+ }
+ rulebase.append(rule)
+ return rule_num + 1
+
+
+def add_domain_rule_header_rule(rulebase, section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid):
+ return insert_section_header_rule(rulebase, section_name, layer_name,
+ import_id, rule_uid, rule_num, section_header_uids, parent_uid)
+
+
+def check_and_add_section_header(src_rulebase, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=0, recursion_level=1):
+ # if current rulebase starts a new section, add section header, but only if it does not exist yet (can happen by chunking a section)
+ if 'type' in src_rulebase and src_rulebase['type'] == 'access-section' and 'uid' in src_rulebase: # and not src_rulebase['uid'] in section_header_uids:
+ section_name = default_section_header_text
+ if 'name' in src_rulebase:
+ section_name = src_rulebase['name']
+ if 'parent_rule_uid' in src_rulebase:
+ parent_uid = src_rulebase['parent_rule_uid']
+ else:
+ parent_uid = ""
+ rule_num = insert_section_header_rule(target_rulebase, section_name, layer_name, import_id, src_rulebase['uid'], rule_num, section_header_uids, parent_uid)
+ parent_uid = src_rulebase['uid']
+ return rule_num
- if (recursion_level > fwo_const.max_recursion_level):
- raise ImportRecursionLimitReached(
- "parse_rulebase_json") from None
+def parse_rulebase(src_rulebase, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import,
+ debug_level=0, recursion_level=1, layer_disabled=False):
logger = getFwoLogger()
- if 'layerchunks' in src_rulebase:
+ if (recursion_level > fwo_const.max_recursion_level):
+ raise ImportRecursionLimitReached("parse_rulebase") from None
+
+ # parse chunks
+ if 'layerchunks' in src_rulebase: # found chunks of layers which need to be parsed separately
for chunk in src_rulebase['layerchunks']:
if 'rulebase' in chunk:
for rules_chunk in chunk['rulebase']:
- rule_num = parse_rulebase_json(rules_chunk, target_rulebase, layer_name, import_id, rule_num,
- section_header_uids, parent_uid, config2import, debug_level=debug_level, recursion_level=recursion_level+1)
+ rule_num = parse_rulebase(rules_chunk, target_rulebase, layer_name, import_id, rule_num,
+ section_header_uids, parent_uid, config2import, debug_level=debug_level, recursion_level=recursion_level+1)
else:
- logger.warning("found no rulebase in chunk:\n" +
- json.dumps(chunk, indent=2))
- else:
- if 'rulebase' in src_rulebase:
- # add section header, but only if it does not exist yet (can happen by chunking a section)
- if src_rulebase['type'] == 'access-section' and not src_rulebase['uid'] in section_header_uids:
- section_name = "section without name"
- if 'name' in src_rulebase:
- section_name = src_rulebase['name']
- if 'parent_rule_uid' in src_rulebase:
- parent_uid = src_rulebase['parent_rule_uid']
- else:
- parent_uid = ""
- add_section_header_rule_in_json(target_rulebase, section_name, layer_name,
- import_id, src_rulebase['uid'], rule_num, section_header_uids, parent_uid)
- rule_num += 1
- parent_uid = src_rulebase['uid']
- for rule in src_rulebase['rulebase']:
+ rule_num = parse_rulebase(chunk, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=debug_level, recursion_level=recursion_level+1)
+
+ check_and_add_section_header(src_rulebase, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=debug_level, recursion_level=recursion_level+1)
+
+ # parse layered rulebase
+ if 'rulebase' in src_rulebase:
+ # layer_disabled = not src_rulebase['enabled']
+ for rule in src_rulebase['rulebase']:
+ if 'type' in rule:
if rule['type'] == 'place-holder': # add domain rules
section_name = ""
if 'name' in src_rulebase:
section_name = rule['name']
- add_domain_rule_header_rule_in_json(
+ rule_num = add_domain_rule_header_rule(
target_rulebase, section_name, layer_name, import_id, rule['uid'], rule_num, section_header_uids, parent_uid)
else: # parse standard sections
- parse_single_rule_to_json(
+ rule_num = parse_single_rule(
rule, target_rulebase, layer_name, import_id, rule_num, parent_uid, config2import, debug_level=debug_level)
- rule_num += 1
-
- if src_rulebase['type'] == 'place-holder': # add domain rules
- logger.debug('found domain rule ref: ' + src_rulebase['uid'])
- section_name = ""
- if 'name' in src_rulebase:
- section_name = src_rulebase['name']
- add_domain_rule_header_rule_in_json(
- target_rulebase, section_name, layer_name, import_id, src_rulebase['uid'], rule_num, section_header_uids, parent_uid)
- rule_num += 1
- if 'rule-number' in src_rulebase: # rulebase is just a single rule
- parse_single_rule_to_json(
- src_rulebase, target_rulebase, layer_name, import_id, rule_num, parent_uid, config2import)
- rule_num += 1
+ if 'rulebase' in rule: # alsways check if a rule contains another layer
+ rule_num = parse_rulebase(rule, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=debug_level, recursion_level=recursion_level+1)
+
+ if 'type' in src_rulebase and src_rulebase['type'] == 'place-holder': # add domain rules
+ logger.debug('found domain rule ref: ' + src_rulebase['uid'])
+ section_name = ""
+ if 'name' in src_rulebase:
+ section_name = src_rulebase['name']
+ rule_num = add_domain_rule_header_rule(
+ target_rulebase, section_name, layer_name, import_id, src_rulebase['uid'], rule_num, section_header_uids, parent_uid)
+
+ if 'rule-number' in src_rulebase: # rulebase is just a single rule
+ rule_num = parse_single_rule(src_rulebase, target_rulebase, layer_name, import_id, rule_num, parent_uid, config2import)
+
return rule_num
-def parse_nat_rulebase_json(src_rulebase, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=0, recursion_level=1):
+def parse_nat_rulebase(src_rulebase, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=0, recursion_level=1):
if (recursion_level > fwo_const.max_recursion_level):
raise ImportRecursionLimitReached(
@@ -318,39 +371,29 @@ def parse_nat_rulebase_json(src_rulebase, target_rulebase, layer_name, import_id
for chunk in src_rulebase['nat_rule_chunks']:
if 'rulebase' in chunk:
for rules_chunk in chunk['rulebase']:
- rule_num = parse_nat_rulebase_json(rules_chunk, target_rulebase, layer_name, import_id, rule_num,
+ rule_num = parse_nat_rulebase(rules_chunk, target_rulebase, layer_name, import_id, rule_num,
section_header_uids, parent_uid, config2import, debug_level=debug_level, recursion_level=recursion_level+1)
else:
logger.warning(
"parse_rule: found no rulebase in chunk:\n" + json.dumps(chunk, indent=2))
else:
if 'rulebase' in src_rulebase:
- # add section header, but only if it does not exist yet (can happen by chunking a section)
- if src_rulebase['type'] == 'access-section' and not src_rulebase['uid'] in section_header_uids:
- section_name = ""
- if 'name' in src_rulebase:
- section_name = src_rulebase['name']
- parent_uid = ""
- add_section_header_rule_in_json(target_rulebase, section_name, layer_name,
- import_id, src_rulebase['uid'], rule_num, section_header_uids, parent_uid)
- rule_num += 1
- parent_uid = src_rulebase['uid']
+ check_and_add_section_header(src_rulebase, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, config2import, debug_level=debug_level, recursion_level=recursion_level+1)
+
for rule in src_rulebase['rulebase']:
(rule_match, rule_xlate) = parse_nat_rule_transform(rule, rule_num)
- parse_single_rule_to_json(
+ rule_num = parse_single_rule(
rule_match, target_rulebase, layer_name, import_id, rule_num, parent_uid, config2import)
- parse_single_rule_to_json(
+ parse_single_rule( # do not increase rule_num here
rule_xlate, target_rulebase, layer_name, import_id, rule_num, parent_uid, config2import)
- rule_num += 1
- if 'rule-number' in src_rulebase: # rulebase is just a single rule
+ if 'rule-number' in src_rulebase: # rulebase is just a single rule (xlate rules do not count)
(rule_match, rule_xlate) = parse_nat_rule_transform(
src_rulebase, rule_num)
- parse_single_rule_to_json(
+ rule_num = parse_single_rule(
rule_match, target_rulebase, layer_name, import_id, rule_num, parent_uid, config2import)
- parse_single_rule_to_json(
+ parse_single_rule( # do not increase rule_num here (xlate rules do not count)
rule_xlate, target_rulebase, layer_name, import_id, rule_num, parent_uid, config2import)
- rule_num += 1
return rule_num
@@ -392,3 +435,4 @@ def parse_nat_rule_transform(xlate_rule_in, rule_num):
'rule_type': 'xlate'
}
return (rule_match, rule_xlate)
+
diff --git a/roles/importer/files/importer/checkpointR8x/parse_service.py b/roles/importer/files/importer/checkpointR8x/cp_service.py
similarity index 98%
rename from roles/importer/files/importer/checkpointR8x/parse_service.py
rename to roles/importer/files/importer/checkpointR8x/cp_service.py
index 2302c8e8b..294ac93be 100644
--- a/roles/importer/files/importer/checkpointR8x/parse_service.py
+++ b/roles/importer/files/importer/checkpointR8x/cp_service.py
@@ -121,7 +121,7 @@ def add_member_names_for_svc_group(idx, svc_objects):
svc_objects.insert(idx, group)
-def parse_service_objects_to_json(full_config, config2import, import_id, debug_level=0):
+def normalize_service_objects(full_config, config2import, import_id, debug_level=0):
svc_objects = []
for svc_table in full_config['object_tables']:
collect_svc_objects(svc_table, svc_objects)
diff --git a/roles/importer/files/importer/checkpointR8x/parse_user.py b/roles/importer/files/importer/checkpointR8x/cp_user.py
similarity index 99%
rename from roles/importer/files/importer/checkpointR8x/parse_user.py
rename to roles/importer/files/importer/checkpointR8x/cp_user.py
index 0551e8ed3..c92fc49a9 100644
--- a/roles/importer/files/importer/checkpointR8x/parse_user.py
+++ b/roles/importer/files/importer/checkpointR8x/cp_user.py
@@ -52,9 +52,8 @@ def collect_users_from_rulebase(rulebase, users):
for rule in rulebase:
collect_users_from_rule(rule, users)
-# the following is only used within new python-only importer:
-
+# the following is only used within new python-only importer:
def parse_user_objects_from_rulebase(rulebase, users, import_id):
collect_users_from_rulebase(rulebase, users)
for user_name in users.keys():
diff --git a/roles/importer/files/importer/checkpointR8x/cpcommon.py b/roles/importer/files/importer/checkpointR8x/cpcommon.py
deleted file mode 100644
index b04a8c264..000000000
--- a/roles/importer/files/importer/checkpointR8x/cpcommon.py
+++ /dev/null
@@ -1,344 +0,0 @@
-from distutils.log import debug
-import sys
-from common import importer_base_dir
-from fwo_log import getFwoLogger
-sys.path.append(importer_base_dir + '/checkpointR8x')
-import json
-import time
-import getter
-import fwo_alert, fwo_api
-import ipaddress
-import fwo_globals
-import cp_const
-from cp_const import details_level
-
-
-def validate_ip_address(address):
- try:
- # ipaddress.ip_address(address)
- ipaddress.ip_network(address)
- return True
- # print("IP address {} is valid. The object returned is {}".format(address, ip))
- except ValueError:
- return False
- # print("IP address {} is not valid".format(address))
-
-
-def get_ip_of_obj(obj, mgm_id=None):
- if 'ipv4-address' in obj:
- ip_addr = obj['ipv4-address']
- elif 'ipv6-address' in obj:
- ip_addr = obj['ipv6-address']
- elif 'subnet4' in obj:
- ip_addr = obj['subnet4'] + '/' + str(obj['mask-length4'])
- elif 'subnet6' in obj:
- ip_addr = obj['subnet6'] + '/' + str(obj['mask-length6'])
- elif 'ipv4-address-first' in obj and 'ipv4-address-last' in obj:
- ip_addr = obj['ipv4-address-first'] + '-' + str(obj['ipv4-address-last'])
- elif 'ipv6-address-first' in obj and 'ipv6-address-last' in obj:
- ip_addr = obj['ipv6-address-first'] + '-' + str(obj['ipv6-address-last'])
- else:
- ip_addr = None
-
- ## fix malformed ip addresses (should not regularly occur and constitutes a data issue in CP database)
- if ip_addr is None or ('type' in obj and (obj['type'] == 'address-range' or obj['type'] == 'multicast-address-range')):
- pass # ignore None and ranges here
- elif not validate_ip_address(ip_addr):
- alerter = fwo_alert.getFwoAlerter()
- alert_description = "object is not a valid ip address (" + str(ip_addr) + ")"
- fwo_api.create_data_issue(alerter['fwo_api_base_url'], alerter['jwt'], severity=2, obj_name=obj['name'], object_type=obj['type'], description=alert_description, mgm_id=mgm_id)
- alert_description = "object '" + obj['name'] + "' (type=" + obj['type'] + ") is not a valid ip address (" + str(ip_addr) + ")"
- fwo_api.setAlert(alerter['fwo_api_base_url'], alerter['jwt'], title="import error", severity=2, role='importer', \
- description=alert_description, source='import', alertCode=17, mgm_id=mgm_id)
- ip_addr = '0.0.0.0/32' # setting syntactically correct dummy ip
- return ip_addr
-
-##################### 2nd-level functions ###################################
-
-def get_basic_config (config_json, mgm_details, force=False, config_filename=None,
- limit=150, details_level=cp_const.details_level, test_version='off', debug_level=0, ssl_verification=True, sid=None):
- logger = getFwoLogger()
-
- api_host = mgm_details['hostname']
- api_user = mgm_details['import_credential']['user']
- if mgm_details['domainUid'] != None:
- api_domain = mgm_details['domainUid']
- else:
- api_domain = mgm_details['configPath']
- api_port = str(mgm_details['port'])
- api_password = mgm_details['import_credential']['secret']
- base_url = 'https://' + api_host + ':' + str(api_port) + '/web_api/'
-
- # top level dict start, sid contains the domain information, so only sending domain during login
- if sid is None: # if sid was not passed, login and get it
- sid = getter.login(api_user,api_password,api_host,api_port,api_domain,ssl_verification)
- v_url = getter.get_api_url (sid, api_host, api_port, api_user, base_url, limit, test_version, ssl_verification, debug_level=debug_level)
-
- config_json.update({'rulebases': [], 'nat_rulebases': [] })
-
- with_hits = True
- show_params_rules = {'limit':limit,'use-object-dictionary':cp_const.use_object_dictionary,'details-level':cp_const.details_level, 'show-hits' : with_hits}
-
- # read all rulebases: handle per device details
- for device in mgm_details['devices']:
- if device['global_rulebase_name'] != None and device['global_rulebase_name']!='':
- show_params_rules['name'] = device['global_rulebase_name']
- # get global layer rulebase
- logger.debug ( "getting layer: " + show_params_rules['name'] )
- current_layer_json = getter.get_layer_from_api_as_dict (v_url, sid, show_params_rules, layername=device['global_rulebase_name'])
- if current_layer_json is None:
- return 1
- # now also get domain rules
- show_params_rules['name'] = device['local_rulebase_name']
- current_layer_json['layername'] = device['local_rulebase_name']
- logger.debug ( "getting domain rule layer: " + show_params_rules['name'] )
- domain_rules = getter.get_layer_from_api_as_dict (v_url, sid, show_params_rules, layername=device['local_rulebase_name'])
- if current_layer_json is None:
- return 1
-
- # now handling possible reference to domain rules within global rules
- # if we find the reference, replace it with the domain rules
- if 'layerchunks' in current_layer_json:
- for chunk in current_layer_json["layerchunks"]:
- for rule in chunk['rulebase']:
- if "type" in rule and rule["type"] == "place-holder":
- logger.debug ("found domain rules place-holder: " + str(rule) + "\n\n")
- current_layer_json = getter.insert_layer_after_place_holder(current_layer_json, domain_rules, rule['uid'])
- else: # no global rules, just get local ones
- show_params_rules['name'] = device['local_rulebase_name']
- logger.debug ( "getting layer: " + show_params_rules['name'] )
- current_layer_json = getter.get_layer_from_api_as_dict (v_url, sid, show_params_rules, layername=device['local_rulebase_name'])
- if current_layer_json is None:
- return 1
-
- config_json['rulebases'].append(current_layer_json)
-
- # getting NAT rules - need package name for nat rule retrieval
- # todo: each gateway/layer should have its own package name (pass management details instead of single data?)
- if device['package_name'] != None and device['package_name'] != '':
- show_params_rules = {'limit':limit,'use-object-dictionary':cp_const.use_object_dictionary,'details-level':cp_const.details_level, 'package': device['package_name'] }
- if debug_level>3:
- logger.debug ( "getting nat rules for package: " + device['package_name'] )
- nat_rules = getter.get_nat_rules_from_api_as_dict (api_host, api_port, v_url, sid, show_params_rules)
- if len(nat_rules)>0:
- config_json['nat_rulebases'].append(nat_rules)
- else:
- config_json['nat_rulebases'].append({ "nat_rule_chunks": [] })
- else: # always making sure we have an (even empty) nat rulebase per device
- config_json['nat_rulebases'].append({ "nat_rule_chunks": [] })
-
- # leaving rules, moving on to objects
- config_json["object_tables"] = []
- show_params_objs = {'limit':limit,'details-level': cp_const.details_level}
-
- for obj_type in cp_const.api_obj_types:
- object_table = { "object_type": obj_type, "object_chunks": [] }
- current=0
- total=current+1
- show_cmd = 'show-' + obj_type
- if debug_level>5:
- logger.debug ( "obj_type: "+ obj_type )
- while (current5:
- logger.debug ( obj_type +" current:"+ str(current) + " of a total " + str(total) )
- else :
- current = total
- if debug_level>5:
- logger.debug ( obj_type +" total:"+ str(total) )
- config_json["object_tables"].append(object_table)
- logout_result = getter.cp_api_call(v_url, 'logout', {}, sid)
-
- # only write config to file if config_filename is given
- if config_filename != None and len(config_filename)>1:
- with open(config_filename, "w") as configfile_json:
- configfile_json.write(json.dumps(config_json))
- return 0
-
-
-################# enrich #######################
-def enrich_config (config, mgm_details, limit=150, details_level=cp_const.details_level, noapi=False, sid=None):
-
- logger = getFwoLogger()
- base_url = 'https://' + mgm_details['hostname'] + ':' + str(mgm_details['port']) + '/web_api/'
- nw_objs_from_obj_tables = []
- svc_objs_from_obj_tables = []
- starttime = int(time.time())
-
- # do nothing for empty configs
- if config == {}:
- return 0
-
- #################################################################################
- # adding inline and domain layers
- found_new_inline_layers = True
- old_inline_layers = []
- while found_new_inline_layers:
- # sweep existing rules for inline layer links
- inline_layers = []
- for rulebase in config['rulebases'] + config['nat_rulebases']:
- getter.get_inline_layer_names_from_rulebase(rulebase, inline_layers)
-
- if len(inline_layers) == len(old_inline_layers):
- found_new_inline_layers = False
- else:
- old_inline_layers = inline_layers
- for layer in inline_layers:
- if fwo_globals.debug_level>5:
- logger.debug ( "found inline layer " + layer )
- # enrich config --> get additional layers referenced in top level layers by name
- # also handle possible recursion (inline layer containing inline layer(s))
- # get layer rules from api
- # add layer rules to config
-
- # next phase: how to logically link layer guard with rules in layer? --> AND of src, dst & svc between layer guard and each rule in layer?
-
- #################################################################################
- # get object data which is only contained as uid in config by making additional api calls
- # get all object uids (together with type) from all rules in fields src, dst, svc
- nw_uids_from_rulebase = []
- svc_uids_from_rulebase = []
-
- for rulebase in config['rulebases'] + config['nat_rulebases']:
- if fwo_globals.debug_level>5:
- if 'layername' in rulebase:
- logger.debug ( "Searching for all uids in rulebase: " + rulebase['layername'] )
- getter.collect_uids_from_rulebase(rulebase, nw_uids_from_rulebase, svc_uids_from_rulebase, "top_level")
-
- # remove duplicates from uid lists
- nw_uids_from_rulebase = list(set(nw_uids_from_rulebase))
- svc_uids_from_rulebase = list(set(svc_uids_from_rulebase))
-
- # get all uids in objects tables
- for obj_table in config['object_tables']:
- nw_objs_from_obj_tables.extend(getter.get_all_uids_of_a_type(obj_table, cp_const.nw_obj_table_names))
- svc_objs_from_obj_tables.extend(getter.get_all_uids_of_a_type(obj_table, cp_const.svc_obj_table_names))
-
- # identify all objects (by type) that are missing in objects tables but present in rulebase
- missing_nw_object_uids = getter.get_broken_object_uids(nw_objs_from_obj_tables, nw_uids_from_rulebase)
- missing_svc_object_uids = getter.get_broken_object_uids(svc_objs_from_obj_tables, svc_uids_from_rulebase)
-
- # adding the uid of the Original object for natting:
- missing_nw_object_uids.append(cp_const.original_obj_uid)
- missing_svc_object_uids.append(cp_const.original_obj_uid)
-
- if fwo_globals.debug_level>4:
- logger.debug ( "found missing nw objects: '" + ",".join(missing_nw_object_uids) + "'" )
- logger.debug ( "found missing svc objects: '" + ",".join(missing_svc_object_uids) + "'" )
-
- if noapi == False:
- # if sid is None:
- # TODO: why is the re-genereation of a new sid necessary here?
-
- if mgm_details['domainUid'] != None:
- api_domain = mgm_details['domainUid']
- else:
- api_domain = mgm_details['configPath']
-
- sid = getter.login(mgm_details['import_credential']['user'],mgm_details['import_credential']['secret'],mgm_details['hostname'],mgm_details['port'],api_domain)
- logger.debug ( "re-logged into api" )
-
- # if an object is not there:
- # make api call: show object details-level full uid "" and add object to respective json
- for missing_obj in missing_nw_object_uids:
- show_params_host = {'details-level':cp_const.details_level,'uid':missing_obj}
- logger.debug ( "fetching obj with uid: " + missing_obj)
- obj = getter.cp_api_call(base_url, 'show-object', show_params_host, sid)
- if 'object' in obj:
- obj = obj['object']
- if (obj['type'] == 'CpmiAnyObject'):
- json_obj = {"object_type": "hosts", "object_chunks": [ {
- "objects": [ {
- 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
- 'comments': 'any nw object checkpoint (hard coded)',
- 'type': 'CpmiAnyObject', 'ipv4-address': '0.0.0.0/0',
- } ] } ] }
- config['object_tables'].append(json_obj)
- elif (obj['type'] == 'simple-gateway' or obj['type'] == 'CpmiGatewayPlain' or obj['type'] == 'interop'):
- json_obj = {"object_type": "hosts", "object_chunks": [ {
- "objects": [ {
- 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
- 'comments': obj['comments'], 'type': 'host', 'ipv4-address': get_ip_of_obj(obj),
- } ] } ] }
- config['object_tables'].append(json_obj)
- elif obj['type'] == 'multicast-address-range':
- logger.debug("found multicast-address-range: " + obj['name'] + " (uid:" + obj['uid']+ ")")
- json_obj = {"object_type": "hosts", "object_chunks": [ {
- "objects": [ {
- 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
- 'comments': obj['comments'], 'type': 'host', 'ipv4-address': get_ip_of_obj(obj),
- } ] } ] }
- config['object_tables'].append(json_obj)
- elif (obj['type'] == 'CpmiVsClusterMember' or obj['type'] == 'CpmiVsxClusterMember'):
- json_obj = {"object_type": "hosts", "object_chunks": [ {
- "objects": [ {
- 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
- 'comments': obj['comments'], 'type': 'host', 'ipv4-address': get_ip_of_obj(obj),
- } ] } ] }
- config['object_tables'].append(json_obj)
- logger.debug ('missing obj: ' + obj['name'] + obj['type'])
- elif (obj['type'] == 'Global'):
- json_obj = {"object_type": "hosts", "object_chunks": [ {
- "objects": [ {
- 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
- 'comments': obj['comments'], 'type': 'host', 'ipv4-address': '0.0.0.0/0',
- } ] } ] }
- config['object_tables'].append(json_obj)
- logger.debug ('missing obj: ' + obj['name'] + obj['type'])
- elif (obj['type'] == 'updatable-object'):
- json_obj = {"object_type": "hosts", "object_chunks": [ {
- "objects": [ {
- 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
- 'comments': obj['comments'], 'type': 'group' #, 'ipv4-address': '0.0.0.0/0',
- } ] } ] }
- config['object_tables'].append(json_obj)
- logger.debug ('missing obj: ' + obj['name'] + obj['type'])
- elif (obj['type'] == 'Internet'):
- json_obj = {"object_type": "hosts", "object_chunks": [ {
- "objects": [ {
- 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
- 'comments': obj['comments'], 'type': 'network', 'ipv4-address': '0.0.0.0/0',
- } ] } ] }
- config['object_tables'].append(json_obj)
- elif (obj['type'] == 'access-role'):
- pass # ignorning user objects
- else:
- logger.warning ( "missing nw obj of unexpected type '" + obj['type'] + "': " + missing_obj )
- logger.debug ( "missing nw obj: " + missing_obj + " added" )
- else:
- logger.warning("could not get the missing object with uid=" + missing_obj + " from CP API")
-
- for missing_obj in missing_svc_object_uids:
- show_params_host = {'details-level':cp_const.details_level,'uid':missing_obj}
- obj = getter.cp_api_call(base_url, 'show-object', show_params_host, sid)
- obj = obj['object']
- if (obj['type'] == 'CpmiAnyObject'):
- json_obj = {"object_type": "services-other", "object_chunks": [ {
- "objects": [ {
- 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
- 'comments': 'any svc object checkpoint (hard coded)',
- 'type': 'service-other', 'ip-protocol': '0'
- } ] } ] }
- config['object_tables'].append(json_obj)
- elif (obj['type'] == 'Global'):
- json_obj = {"object_type": "services-other", "object_chunks": [ {
- "objects": [ {
- 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'],
- 'comments': 'Original svc object checkpoint (hard coded)',
- 'type': 'service-other', 'ip-protocol': '0'
- } ] } ] }
- config['object_tables'].append(json_obj)
- else:
- logger.warning ( "missing svc obj (uid=" + missing_obj + ") of unexpected type \"" + obj['type'] +"\"" )
- logger.debug ( "missing svc obj: " + missing_obj + " added")
-
- logout_result = getter.cp_api_call(base_url, 'logout', {}, sid)
-
- logger.debug ( "checkpointR8x/enrich_config - duration: " + str(int(time.time()) - starttime) + "s" )
-
- return 0
diff --git a/roles/importer/files/importer/checkpointR8x/fwcommon.py b/roles/importer/files/importer/checkpointR8x/fwcommon.py
index 66d49b24a..ac6a80db3 100644
--- a/roles/importer/files/importer/checkpointR8x/fwcommon.py
+++ b/roles/importer/files/importer/checkpointR8x/fwcommon.py
@@ -1,28 +1,28 @@
-from distutils.log import debug
import sys
+import json
+import copy
from common import importer_base_dir
from fwo_log import getFwoLogger
sys.path.append(importer_base_dir + '/checkpointR8x')
-import copy, time
-import cp_const, parse_network, parse_rule, parse_service, parse_user
-import getter
-from cpcommon import get_basic_config, enrich_config
+import time
import fwo_globals
-from fwo_exception import FwLoginFailed
+import cp_rule
+import cp_const, cp_network, cp_service
+import cp_getter
+from cp_enrich import enrich_config
+from fwo_exception import FwLoginFailed, FwLogoutFailed
+from cp_user import parse_user_objects_from_rulebase
def has_config_changed (full_config, mgm_details, force=False):
if full_config != {}: # a native config was passed in, so we assume that an import has to be done (simulating changes here)
return 1
- # from 5.8 onwards: preferably use domain uid instead of domain name due to CP R81 bug with certain installations
- if mgm_details['domainUid'] != None:
- domain = mgm_details['domainUid']
- else:
- domain = mgm_details['configPath']
+
+ domain, _ = prepare_get_vars(mgm_details)
try: # top level dict start, sid contains the domain information, so only sending domain during login
- session_id = getter.login(mgm_details['import_credential']['user'], mgm_details['import_credential']['secret'], mgm_details['hostname'], str(mgm_details['port']), domain)
+ session_id = login_cp(mgm_details, domain)
except:
raise FwLoginFailed # maybe 2Temporary failure in name resolution"
@@ -34,10 +34,15 @@ def has_config_changed (full_config, mgm_details, force=False):
if last_change_time==None or last_change_time=='' or force:
# if no last import time found or given or if force flag is set, do full import
- return 1
- else:
- # otherwise search for any changes since last import
- return (getter.get_changes(session_id, mgm_details['hostname'], str(mgm_details['port']),last_change_time) != 0)
+ result = 1
+ else: # otherwise search for any changes since last import
+ result = (cp_getter.get_changes(session_id, mgm_details['hostname'], str(mgm_details['port']),last_change_time) != 0)
+
+ try: # top level dict start, sid contains the domain information, so only sending domain during login
+ logout_result = cp_getter.cp_api_call("https://" + mgm_details['hostname'] + ":" + str(mgm_details['port']) + "/web_api/", 'logout', {}, session_id)
+ except:
+ raise FwLogoutFailed # maybe temporary failure in name resolution"
+ return result
def get_config(config2import, full_config, current_import_id, mgm_details, limit=150, force=False, jwt=None):
@@ -50,18 +55,20 @@ def get_config(config2import, full_config, current_import_id, mgm_details, limit
if not parsing_config_only: # get config from cp fw mgr
starttime = int(time.time())
- # from 5.8 onwards: preferably use domain uid instead of domain name due to CP R81 bug with certain installations
- if mgm_details['domainUid'] != None:
- domain = mgm_details['domainUid']
- else:
- domain = mgm_details['configPath']
+ if 'users' not in full_config:
+ full_config.update({'users': {}})
+
+ domain, base_url = prepare_get_vars(mgm_details)
- sid = getter.login(mgm_details['import_credential']['user'], mgm_details['import_credential']['secret'], mgm_details['hostname'], str(mgm_details['port']), domain)
+ sid = login_cp(mgm_details, domain)
- result_get_basic_config = get_basic_config (full_config, mgm_details, force=force, sid=sid, limit=str(limit), details_level=cp_const.details_level, test_version='off')
+ result_get_rules = get_rules (full_config, mgm_details, base_url, sid, force=force, limit=str(limit), details_level=cp_const.details_level, test_version='off')
+ if result_get_rules>0:
+ return result_get_rules
- if result_get_basic_config>0:
- return result_get_basic_config
+ result_get_objects = get_objects (full_config, mgm_details, base_url, sid, force=force, limit=str(limit), details_level=cp_const.details_level, test_version='off')
+ if result_get_objects>0:
+ return result_get_objects
result_enrich_config = enrich_config (full_config, mgm_details, limit=str(limit), details_level=cp_const.details_level, sid=sid)
@@ -71,47 +78,150 @@ def get_config(config2import, full_config, current_import_id, mgm_details, limit
duration = int(time.time()) - starttime
logger.debug ( "checkpointR8x/get_config - duration: " + str(duration) + "s" )
- if full_config == {}: # no changes
- return 0
+ cp_network.normalize_network_objects(full_config, config2import, current_import_id, mgm_id=mgm_details['id'])
+ cp_service.normalize_service_objects(full_config, config2import, current_import_id)
+ parse_users_from_rulebases(full_config, full_config['rulebases'], full_config['users'], config2import, current_import_id)
+ config2import.update({'rules': cp_rule.normalize_rulebases_top_level(full_config, current_import_id, config2import) })
+ if not parsing_config_only: # get config from cp fw mgr
+ try: # logout
+ logout_result = cp_getter.cp_api_call("https://" + mgm_details['hostname'] + ":" + str(mgm_details['port']) + "/web_api/", 'logout', {}, sid)
+ except:
+ raise FwLogoutFailed # maybe emporary failure in name resolution"
+ return 0
+
+
+def prepare_get_vars(mgm_details):
+
+ # from 5.8 onwards: preferably use domain uid instead of domain name due to CP R81 bug with certain installations
+ if mgm_details['domainUid'] != None:
+ domain = mgm_details['domainUid']
else:
- parse_network.parse_network_objects_to_json(full_config, config2import, current_import_id, mgm_id=mgm_details['id'])
- parse_service.parse_service_objects_to_json(full_config, config2import, current_import_id)
- if 'users' not in full_config:
- full_config.update({'users': {}})
- target_rulebase = []
- rule_num = 0
- parent_uid=""
- section_header_uids=[]
- rb_range = range(len(full_config['rulebases']))
- for rb_id in rb_range:
- parse_user.parse_user_objects_from_rulebase(
- full_config['rulebases'][rb_id], full_config['users'], current_import_id)
- # if current_layer_name == args.rulebase:
- if fwo_globals.debug_level>3:
- logger.debug("parsing layer " + full_config['rulebases'][rb_id]['layername'])
-
- # parse access rules
- rule_num = parse_rule.parse_rulebase_json(
- full_config['rulebases'][rb_id], target_rulebase, full_config['rulebases'][rb_id]['layername'],
- current_import_id, rule_num, section_header_uids, parent_uid, config2import)
- # now parse the nat rulebase
-
- # parse nat rules
- if len(full_config['nat_rulebases'])>0:
- if len(full_config['nat_rulebases']) != len(rb_range):
- logger.warning('get_config - found ' + str(len(full_config['nat_rulebases'])) +
- ' nat rulebases and ' + str(len(rb_range)) + ' access rulebases')
- else:
- rule_num = parse_rule.parse_nat_rulebase_json(
- full_config['nat_rulebases'][rb_id], target_rulebase, full_config['rulebases'][rb_id]['layername'],
- current_import_id, rule_num, section_header_uids, parent_uid, config2import)
- config2import.update({'rules': target_rulebase})
-
- # copy users from full_config to config2import
- # also converting users from dict to array:
- config2import.update({'user_objects': []})
- for user_name in full_config['users'].keys():
- user = copy.deepcopy(full_config['users'][user_name])
- user.update({'user_name': user_name})
- config2import['user_objects'].append(user)
+ domain = mgm_details['configPath']
+ api_host = mgm_details['hostname']
+ api_user = mgm_details['import_credential']['user']
+ if mgm_details['domainUid'] != None:
+ api_domain = mgm_details['domainUid']
+ else:
+ api_domain = mgm_details['configPath']
+ api_port = str(mgm_details['port'])
+ api_password = mgm_details['import_credential']['secret']
+ base_url = 'https://' + api_host + ':' + str(api_port) + '/web_api/'
+
+ return domain, base_url
+
+
+def login_cp(mgm_details, domain, ssl_verification=True):
+ return cp_getter.login(mgm_details['import_credential']['user'], mgm_details['import_credential']['secret'], mgm_details['hostname'], str(mgm_details['port']), domain)
+
+
+def get_rules (config_json, mgm_details, v_url, sid, force=False, config_filename=None,
+ limit=150, details_level=cp_const.details_level, test_version='off', debug_level=0, ssl_verification=True):
+
+ logger = getFwoLogger()
+ config_json.update({'rulebases': [], 'nat_rulebases': [] })
+ with_hits = True
+ show_params_rules = {'limit':limit,'use-object-dictionary':cp_const.use_object_dictionary,'details-level':cp_const.details_level, 'show-hits' : with_hits}
+
+ # read all rulebases: handle per device details
+ for device in mgm_details['devices']:
+ if device['global_rulebase_name'] != None and device['global_rulebase_name']!='':
+ show_params_rules['name'] = device['global_rulebase_name']
+ # get global layer rulebase
+ logger.debug ( "getting layer: " + show_params_rules['name'] )
+ current_layer_json = cp_getter.get_layer_from_api_as_dict (v_url, sid, show_params_rules, layername=device['global_rulebase_name'])
+ if current_layer_json is None:
+ return 1
+ # now also get domain rules
+ show_params_rules['name'] = device['local_rulebase_name']
+ current_layer_json['layername'] = device['local_rulebase_name']
+ logger.debug ( "getting domain rule layer: " + show_params_rules['name'] )
+ domain_rules = cp_getter.get_layer_from_api_as_dict (v_url, sid, show_params_rules, layername=device['local_rulebase_name'])
+ if current_layer_json is None:
+ return 1
+
+ # now handling possible reference to domain rules within global rules
+ # if we find the reference, replace it with the domain rules
+ if 'layerchunks' in current_layer_json:
+ for chunk in current_layer_json["layerchunks"]:
+ for rule in chunk['rulebase']:
+ if "type" in rule and rule["type"] == "place-holder":
+ logger.debug ("found domain rules place-holder: " + str(rule) + "\n\n")
+ current_layer_json = cp_getter.insert_layer_after_place_holder(current_layer_json, domain_rules, rule['uid'])
+ else: # no global rules, just get local ones
+ show_params_rules['name'] = device['local_rulebase_name']
+ logger.debug ( "getting layer: " + show_params_rules['name'] )
+ current_layer_json = cp_getter.get_layer_from_api_as_dict (v_url, sid, show_params_rules, layername=device['local_rulebase_name'])
+ if current_layer_json is None:
+ return 1
+
+ config_json['rulebases'].append(current_layer_json)
+
+ # getting NAT rules - need package name for nat rule retrieval
+ # todo: each gateway/layer should have its own package name (pass management details instead of single data?)
+ if device['package_name'] != None and device['package_name'] != '':
+ show_params_rules = {'limit':limit,'use-object-dictionary':cp_const.use_object_dictionary,'details-level':cp_const.details_level, 'package': device['package_name'] }
+ if debug_level>3:
+ logger.debug ( "getting nat rules for package: " + device['package_name'] )
+ nat_rules = cp_getter.get_nat_rules_from_api_as_dict (v_url, sid, show_params_rules)
+ if len(nat_rules)>0:
+ config_json['nat_rulebases'].append(nat_rules)
+ else:
+ config_json['nat_rulebases'].append({ "nat_rule_chunks": [] })
+ else: # always making sure we have an (even empty) nat rulebase per device
+ config_json['nat_rulebases'].append({ "nat_rule_chunks": [] })
+ return 0
+
+
+def get_objects(config_json, mgm_details, v_url, sid, force=False, config_filename=None,
+ limit=150, details_level=cp_const.details_level, test_version='off', debug_level=0, ssl_verification=True):
+
+ logger = getFwoLogger()
+
+ config_json["object_tables"] = []
+ show_params_objs = {'limit':limit,'details-level': cp_const.details_level}
+
+ for obj_type in cp_const.api_obj_types:
+ object_table = { "object_type": obj_type, "object_chunks": [] }
+ current=0
+ total=current+1
+ show_cmd = 'show-' + obj_type
+ if debug_level>5:
+ logger.debug ( "obj_type: "+ obj_type )
+ while (current5:
+ logger.debug ( obj_type +" current:"+ str(current) + " of a total " + str(total) )
+ else :
+ current = total
+ if debug_level>5:
+ logger.debug ( obj_type +" total:"+ str(total) )
+ config_json["object_tables"].append(object_table)
+ # logout_result = cp_getter.cp_api_call(v_url, 'logout', {}, sid)
+
+ # only write config to file if config_filename is given
+ if config_filename != None and len(config_filename)>1:
+ with open(config_filename, "w") as configfile_json:
+ configfile_json.write(json.dumps(config_json))
return 0
+
+
+def parse_users_from_rulebases (full_config, rulebase, users, config2import, current_import_id):
+ if 'users' not in full_config:
+ full_config.update({'users': {}})
+
+ rb_range = range(len(full_config['rulebases']))
+ for rb_id in rb_range:
+ parse_user_objects_from_rulebase (full_config['rulebases'][rb_id], full_config['users'], current_import_id)
+
+ # copy users from full_config to config2import
+ # also converting users from dict to array:
+ config2import.update({'user_objects': []})
+ for user_name in full_config['users'].keys():
+ user = copy.deepcopy(full_config['users'][user_name])
+ user.update({'user_name': user_name})
+ config2import['user_objects'].append(user)
diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_api-test-call.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_api-test-call.py
deleted file mode 100755
index a9253a98c..000000000
--- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_api-test-call.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/python3
-import logging, logging.config
-import json, argparse
-import sys
-from common import importer_base_dir, set_ssl_verification
-sys.path.append(importer_base_dir)
-import getter
-
-logging.config.fileConfig(fname='discovery_logging.conf', disable_existing_loggers=False)
-
-logger = logging.getLogger(__name__)
-
-logger.info("START")
-parser = argparse.ArgumentParser(description='Read configuration from Check Point R8x management via API calls')
-parser.add_argument('-a', '--hostname', metavar='api_host', required=True, help='Check Point R8x management server')
-parser.add_argument('-w', '--password', metavar='api_password', required=True, help='password for management server')
-parser.add_argument('-m', '--mode', metavar='mode', required=True, help='[domains|packages|layers|generic]')
-parser.add_argument('-c', '--command', metavar='command', required=False, help='generic command to send to the api (needs -m generic). ' +
- 'Please note that the command must be written as one word (e.g. show-access-layer instead of show acess-layers).')
-parser.add_argument('-u', '--user', metavar='api_user', default='fworch', help='user for connecting to Check Point R8x management server, default=fworch')
-parser.add_argument('-p', '--port', metavar='api_port', default='443', help='port for connecting to Check Point R8x management server, default=443')
-parser.add_argument('-D', '--domain', metavar='api_domain', default='', help='name of Domain in a Multi-Domain Environment')
-parser.add_argument('-s', '--ssl', metavar='ssl_verification_mode', default='', help='[ca]certfile, if value not set, ssl check is off"; default=empty/off')
-parser.add_argument('-l', '--level', metavar='level_of_detail', default='standard', help='[standard|full]')
-parser.add_argument('-i', '--limit', metavar='api_limit', default='150', help='The maximal number of returned results per HTTPS Connection; default=150')
-parser.add_argument('-n', '--nolimit', metavar='nolimit', default='off', help='[on|off] Set to on if (generic) command does not understand limit switch')
-parser.add_argument('-d', '--debug', metavar='debug_level', default='0', help='Debug Level: 0(off) 4(DEBUG Console) 41(DEBUG File); default=0')
-parser.add_argument('-V', '--version', metavar='api_version', default='off', help='alternate API version [off|]; default=off')
-
-args = parser.parse_args()
-if len(sys.argv)==1:
- parser.print_help(sys.stderr)
- sys.exit(1)
-
-domain = args.domain
-
-if args.mode == 'packages':
- api_command='show-packages'
- api_details_level="standard"
-elif args.mode == 'domains' or args.mode == 'devices':
- api_command='show-domains'
- api_details_level="standard"
- domain = ''
-elif args.mode == 'layers':
- api_command='show-access-layers'
- api_details_level="standard"
-elif args.mode == 'generic':
- api_command=args.command
- api_details_level=args.level
-else:
- sys.exit("\"" + args.mode +"\" - unknown mode")
-
-offset = 0
-use_object_dictionary = 'false'
-base_url = 'https://' + args.hostname + ':' + args.port + '/web_api/'
-ssl_verification = set_ssl_verification(args.ssl)
-logger = logging.getLogger(__name__)
-
-xsid = getter.login(args.user, args.password, args.hostname, args.port, domain, ssl_verification)
-api_versions = getter.cp_api_call(args.hostname, args.port, base_url, 'show-api-versions', {}, xsid, ssl_verification=ssl_verification)
-
-api_version = api_versions["current-version"]
-api_supported = api_versions["supported-versions"]
-v_url = getter.set_api_url(base_url,args.version,api_supported,args.hostname)
-if args.version != 'off':
- api_version = args.version
-logger.debug ("using current version: "+ api_version )
-logger.debug ("supported versions: "+ ', '.join(api_supported) )
-logger.debug ("limit:"+ args.limit )
-logger.debug ("Domain:"+ args.domain )
-logger.debug ("login:"+ args.user )
-logger.debug ("sid:"+ xsid )
-
-payload = { "details-level" : api_details_level }
-if args.nolimit == 'off':
- payload.update( { "limit" : args.limit, "offset" : offset } )
-
-if args.mode == 'generic': # need to divide command string into command and payload (i.e. parameters)
- cmd_parts = api_command.split(" ")
- api_command = cmd_parts[0]
- idx = 1
- if len(cmd_parts)>1:
- payload.pop('limit')
- payload.pop('offset')
- while idx < len(cmd_parts):
- payload.update({cmd_parts[idx]: cmd_parts[idx+1]})
- idx += 2
-
-result = getter.cp_api_call(args.hostname, args.port, v_url, api_command, payload, xsid, ssl_verification=ssl_verification)
-
-if args.debug == "1" or args.debug == "3":
- print ("\ndump of result:\n" + json.dumps(result, indent=4))
-if args.mode == 'packages':
- print ("\nthe following packages exist on management server:")
- for p in result['packages']:
- print (" package: " + p['name'])
- if "access-layers" in result:
- print ("the following layers exist on management server:")
- for p in result['packages']:
- print (" package: " + p['name'])
- for l in p['access-layers']:
- print (" layer: " + l['name'])
-
-if args.mode == 'domains':
- print ("\nthe following domains exist on management server:")
- for d in result['objects']:
- print (" domain: " + d['name'] + ", uid: " + d['uid'])
-if args.mode == 'layers':
- print ("\nthe following access-layers exist on management server:")
- for l in result['access-layers']:
- print (" access-layer: " + l['name'] + ", uid: " + l['uid'] )
-if args.mode == 'generic':
- print (json.dumps(result, indent=3))
-
-logout_result = getter.cp_api_call(args.hostname, args.port, v_url, 'logout', {}, xsid, ssl_verification=ssl_verification)
diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_auto-discover.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_auto-discover.py
deleted file mode 100755
index 6c2e043dd..000000000
--- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_auto-discover.py
+++ /dev/null
@@ -1,198 +0,0 @@
-#!/usr/bin/python3
-import sys
-# from .. common import importer_base_dir
-sys.path.append('..')
-import logging, logging.config
-import getter
-import json, argparse, sys
-import fwo_log
-logging.config.fileConfig(fname='discovery_logging.conf', disable_existing_loggers=False)
-
-logger = logging.getLogger(__name__)
-
-logger.info("START")
-parser = argparse.ArgumentParser(description='Discover all devices, policies starting from a single server (MDS or stand-alone) from Check Point R8x management via API calls')
-parser.add_argument('-a', '--hostname', metavar='api_host', required=True, help='Check Point R8x management server')
-parser.add_argument('-w', '--password_file', metavar='api_password_file', required=True, help='name of file containing the password for API of the management server')
-parser.add_argument('-u', '--user', metavar='api_user', default='fworch', help='user for connecting to Check Point R8x management server, default=fworch')
-parser.add_argument('-p', '--port', metavar='api_port', default='443', help='port for connecting to Check Point R8x management server, default=443')
-parser.add_argument('-s', '--ssl', metavar='ssl_verification_mode', default='', help='[ca]certfile, if value not set, ssl check is off"; default=empty/off')
-parser.add_argument('-d', '--debug', metavar='debug_level', default='0', help='Debug Level: 0(off) 4(DEBUG Console) 41(DEBUG File); default=0')
-parser.add_argument('-V', '--version', metavar='api_version', default='off', help='alternate API version [off|]; default=off')
-parser.add_argument('-D', '--domain', metavar='api_domain', default='', help='name of Domain in a Multi-Domain Environment')
-parser.add_argument('-f', '--format', metavar='output_format', default='table', help='[json|table]]')
-
-args = parser.parse_args()
-if len(sys.argv)==1:
- parser.print_help(sys.stderr)
- sys.exit(1)
-
-offset = 0
-use_object_dictionary = 'false'
-base_url = 'https://' + args.hostname + ':' + args.port + '/web_api/'
-ssl_verification = fwo_log.set_ssl_verification(args.ssl, debug_level=args.debug)
-
-with open(args.password_file, 'r') as file:
- apiuser_pwd = file.read().replace('\n', '')
-
-xsid = getter.login(args.user, apiuser_pwd, args.hostname, args.port, args.domain, ssl_verification=ssl_verification, debug=args.debug)
-
-api_versions = getter.cp_api_call(base_url, 'show-api-versions', {}, xsid, ssl_verification=ssl_verification)
-api_version = api_versions["current-version"]
-api_supported = api_versions["supported-versions"]
-v_url = getter.set_api_url(base_url,args.version,api_supported,args.hostname)
-
-v_url = 'https://' + args.hostname + ':' + args.port + '/web_api/'
-if args.version != "off":
- v_url += 'v' + args.version + '/'
-
-logger = logging.getLogger(__name__)
-
-xsid = getter.login(args.user, apiuser_pwd, args.hostname, args.port, '', ssl_verification=ssl_verification)
-
-if args.debug == "1" or args.debug == "3":
- debug = True
-else:
- debug = False
-
-# todo: only show active devices (optionally with a switch)
-domains = getter.cp_api_call (v_url, 'show-domains', {}, xsid, ssl_verification=ssl_verification)
-gw_types = ['simple-gateway', 'simple-cluster', 'CpmiVsClusterNetobj', 'CpmiGatewayPlain', 'CpmiGatewayCluster', 'CpmiVsxClusterNetobj']
-parameters = { "details-level" : "full" }
-
-if domains['total']== 0:
- logging.debug ("no domains found, adding dummy domain.")
- domains['objects'].append ({ "name": "", "uid": "" })
-
- # fetching gateways for non-MDS management:
- obj = domains['objects'][0]
- obj['gateways'] = getter.cp_api_call(v_url, 'show-gateways-and-servers', parameters, xsid, ssl_verification=ssl_verification)
-
- if 'objects' in obj['gateways']:
- for gw in obj['gateways']['objects']:
- if 'type' in gw and gw['type'] in gw_types and 'policy' in gw:
- if 'access-policy-installed' in gw['policy'] and gw['policy']['access-policy-installed'] and "access-policy-name" in gw['policy']:
- logging.debug ("standalone mgmt: found gateway " + gw['name'] + " with policy" + gw['policy']['access-policy-name'])
- gw['package'] = getter.cp_api_call(v_url,
- "show-package",
- { "name" : gw['policy']['access-policy-name'], "details-level": "full" },
- xsid, ssl_verification)
- else:
- logging.warning ("Standalone WARNING: did not find any gateways in stand-alone management")
- logout_result = getter.cp_api_call(v_url, 'logout', {}, xsid, ssl_verification=ssl_verification)
-
-else: # visit each domain and fetch layers
- for obj in domains['objects']:
- domain_name = obj['name']
- logging.debug ("MDS: searchig in domain " + domain_name)
- xsid = getter.login(args.user, apiuser_pwd, args.hostname, args.port, domain_name, ssl_verification=ssl_verification)
- obj['gateways'] = getter.cp_api_call(v_url, 'show-gateways-and-servers', parameters, xsid, ssl_verification)
- if 'objects' in obj['gateways']:
- for gw in obj['gateways']['objects']:
- if 'type' in gw and gw['type'] in gw_types and 'policy' in gw:
- if 'access-policy-installed' in gw['policy'] and gw['policy']['access-policy-installed'] and "access-policy-name" in gw['policy']:
- api_call_str = "show-package name " + gw['policy']['access-policy-name'] + ", logged in to domain " + domain_name
- logging.debug ("MDS: found gateway " + gw['name'] + " with policy: " + gw['policy']['access-policy-name'])
- logging.debug ("api call: " + api_call_str)
- try:
- tmp_pkg_name = getter.cp_api_call(v_url, 'show-package', { "name" : gw['policy']['access-policy-name'], "details-level": "full" },
- xsid, ssl_verification=ssl_verification)
- except:
- tmp_pkg_name = "ERROR while trying to get package " + gw['policy']['access-policy-name']
- gw['package'] = tmp_pkg_name
- else:
- logging.warning ("Domain-WARNING: did not find any gateways in domain " + obj['name'])
- logout_result = getter.cp_api_call(v_url, 'logout', {}, xsid, ssl_verification=ssl_verification)
-
-# now collect only relevant data and copy to new dict
-domains_essential = []
-for obj in domains['objects']:
- domain = { 'name': obj['name'], 'uid': obj['uid'] }
- gateways = []
- domain['gateways'] = gateways
- if 'objects' in obj['gateways']:
- for gw in obj['gateways']['objects']:
- if 'policy' in gw and 'access-policy-name' in gw['policy']:
- gateway = { "name": gw['name'], "uid": gw['uid'], "access-policy-name": gw['policy']['access-policy-name'] }
- layers = []
- if 'package' in gw:
- if 'access-layers' in gw['package']:
- found_domain_layer = False
- for ly in gw['package']['access-layers']:
- if 'firewall' in ly and ly['firewall']:
- if 'parent-layer' in ly:
- found_domain_layer = True
- for ly in gw['package']['access-layers']:
- if 'firewall' in ly and ly['firewall']:
- if 'parent-layer' in ly:
- layer = { "name": ly['name'], "uid": ly['uid'], "type": "domain-layer", "parent-layer": ly['parent-layer'] }
- elif domains['total']==0:
- layer = { "name": ly['name'], "uid": ly['uid'], "type": "local-layer" }
- elif found_domain_layer:
- layer = { "name": ly['name'], "uid": ly['uid'], "type": "global-layer" }
- else: # in domain context, but no global layer exists
- layer = { "name": ly['name'], "uid": ly['uid'], "type": "stand-alone-layer" }
- layers.append(layer)
- gateway['layers'] = layers
- gateways.append(gateway)
- domain['gateways'] = gateways
- domains_essential.append(domain)
-devices = {"domains": domains_essential }
-
-
-##### output ########
-if args.format == 'json':
- print (json.dumps(devices, indent=3))
-
-elif args.format == 'table':
- # compact print in FWO UI input format
- colsize_number = 35
- colsize = "{:"+str(colsize_number)+"}"
- table = ""
- heading_list = ["Domain/Management", "Gateway", "Policy String"]
-
- # add table header:
- for heading in heading_list:
- table += colsize.format(heading)
- table += "\n"
- x = 0
- while x < len(heading_list) * colsize_number:
- table += '-'
- x += 1
- table += "\n"
-
- # print one gateway/policy per line
- for dom in devices['domains']:
- if 'gateways' in dom:
- for gw in dom['gateways']:
- table += colsize.format(dom["name"])
- table += colsize.format(gw['name'])
- if 'layers' in gw:
- found_domain_layer = False
- layer_string = ''
- for ly in gw['layers']:
- if 'parent-layer' in ly:
- found_domain_layer = True
- for ly in gw['layers']:
- if ly['type'] == 'stand-alone-layer' or ly['type'] == 'local-layer':
- layer_string = ly["name"]
- elif found_domain_layer and ly['type'] == 'domain-layer':
- domain_layer = ly['name']
- elif found_domain_layer and ly['type'] == 'global-layer':
- global_layer = ly['name']
- else:
- logging.warning ("found unknown layer type")
- if found_domain_layer:
- layer_string = global_layer + '/' + domain_layer
- table += colsize.format(layer_string)
- table += "\n"
- else:
- table += colsize.format(dom["name"])
- table += "\n" # empty line between domains for readability
-
- print (table)
-
-else:
- logging.error("You specified a wrong output format: " + args.format )
- parser.print_help(sys.stderr)
- sys.exit(1)
diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_enrich_config.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_enrich_config.py
deleted file mode 100755
index 2db73c357..000000000
--- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_enrich_config.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/python3
-import argparse, time
-import json
-import sys, os
-import cp_const
-
-from common import importer_base_dir, set_ssl_verification
-sys.path.append(importer_base_dir)
-sys.path.append(importer_base_dir + "/checkpointR8x")
-from fwo_log import getFwoLogger
-from cpcommon import enrich_config
-
-
-parser = argparse.ArgumentParser(description='Read configuration from Check Point R8x management via API calls')
-parser.add_argument('-a', '--apihost', metavar='api_host', required=True, help='Check Point R8x management server')
-parser.add_argument('-w', '--password', metavar='api_password_file', default='import_user_secret', help='name of the file to read the password for management server from')
-parser.add_argument('-u', '--user', metavar='api_user', default='fworch', help='user for connecting to Check Point R8x management server, default=fworch')
-parser.add_argument('-p', '--port', metavar='api_port', default='443', help='port for connecting to Check Point R8x management server, default=443')
-parser.add_argument('-D', '--domain', metavar='api_domain', default='', help='name of Domain in a Multi-Domain Envireonment')
-parser.add_argument('-l', '--layer', metavar='policy_layer_name(s)', required=True, help='name of policy layer(s) to read (comma separated)')
-parser.add_argument('-s', '--ssl', metavar='ssl_verification_mode', default='', help='[ca]certfile, if value not set, ssl check is off"; default=empty/off')
-parser.add_argument('-i', '--limit', metavar='api_limit', default='150', help='The maximal number of returned results per HTTPS Connection; default=150')
-parser.add_argument('-d', '--debug', metavar='debug_level', default='0', help='Debug Level: 0(off) 4(DEBUG Console) 41(DEBUG File); default=0')
-parser.add_argument('-k', '--package', metavar='package_name', help='name of the package for a gateway - necessary for getting NAT rules')
-parser.add_argument('-c', '--configfile', metavar='config_file', required=True, help='filename to read and write config in json format from/to')
-parser.add_argument('-n', '--noapi', metavar='mode', default='false', help='if set to true (only in combination with mode=enrich), no api connections are made. Useful for testing only.')
-
-args = parser.parse_args()
-if len(sys.argv)==1:
- parser.print_help(sys.stderr)
- sys.exit(1)
-
-with open(args.password, "r") as password_file:
- api_password = password_file.read().rstrip()
-
-debug_level = int(args.debug)
-logger = getFwoLogger()
-config = {}
-starttime = int(time.time())
-
-# possible todo: get mgmt_details via API just from mgmt_name and dev_name?
-mgm_details = {
- 'hostname': args.apihost,
- 'port': args.port,
- 'user': args.user,
- 'secret': api_password,
- 'configPath': args.domain,
- 'devices': [
- {
- 'local_rulebase_name': args.layer,
- 'global_rulebase_name': None,
- 'package_name': args.package
- }
- ]
-}
-
-result = enrich_config (config, mgm_details, noapi=False, limit=args.limit, details_level=cp_const.details_level)
-
-duration = int(time.time()) - starttime
-logger.debug ( "checkpointR8x/enrich_config - duration: " + str(duration) + "s" )
-
-# dump new json file if config_filename is set
-if args.config_filename != None and len(args.config_filename)>1:
- if os.path.exists(args.config_filename): # delete json file (to enabiling re-write)
- os.remove(args.config_filename)
- with open(args.config_filename, "w") as json_data:
- json_data.write(json.dumps(config))
-
-sys.exit(0)
diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_get_basic_config.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_get_basic_config.py
deleted file mode 100755
index c6a95f560..000000000
--- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_get_basic_config.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/python3
-
-import time, sys
-import argparse
-from fwo_const import importer_base_dir
-sys.path.append(importer_base_dir)
-from fwo_log import getFwoLogger
-from cp_const import details_level
-from cpcommon import get_basic_config
-
-
-parser = argparse.ArgumentParser(description='Read configuration from Check Point R8x management via API calls')
-parser.add_argument('-a', '--apihost', metavar='api_host', required=True, help='Check Point R8x management server')
-parser.add_argument('-w', '--password', metavar='api_password_file', default='import_user_secret', help='name of the file to read the password for management server from')
-parser.add_argument('-u', '--user', metavar='api_user', default='fworch', help='user for connecting to Check Point R8x management server, default=fworch')
-parser.add_argument('-p', '--port', metavar='api_port', default='443', help='port for connecting to Check Point R8x management server, default=443')
-parser.add_argument('-D', '--domain', metavar='api_domain', default='', help='name of Domain in a Multi-Domain Envireonment')
-parser.add_argument('-l', '--layer', metavar='policy_layer_name(s)', required=True, help='name of policy layer(s) to read (comma separated)')
-parser.add_argument('-k', '--package', metavar='policy package name', required=False, help='name of policy package (needed for nat rule retrieval)')
-parser.add_argument('-s', '--ssl', metavar='ssl_verification_mode', default='', help='[ca]certfile, if value not set, ssl check is off"; default=empty/off')
-parser.add_argument('-i', '--limit', metavar='api_limit', default='150', help='The maximal number of returned results per HTTPS Connection; default=150')
-parser.add_argument('-d', '--debug', metavar='debug_level', default='0', help='Debug Level: 0(off) 4(DEBUG Console) 41(DEBUG File); default=0')
-parser.add_argument('-t', '--testing', metavar='version_testing', default='off', help='Version test, [off|]; default=off')
-parser.add_argument('-o', '--out', metavar='output_file', required=True, help='filename to write output in json format to')
-parser.add_argument('-F', '--force', action='store_true', default=False, help='if set the import will be attempted without checking for changes before')
-
-args = parser.parse_args()
-if len(sys.argv)==1:
- parser.print_help(sys.stderr)
- sys.exit(1)
-
-with open(args.password, "r") as password_file:
- api_password = password_file.read().rstrip()
-
-debug_level = int(args.debug)
-logger = getFwoLogger()
-starttime = int(time.time())
-full_config_json = {}
-
-# possible todo: get mgmt_details via API just from mgmt_name and dev_name?
-# todo: allow for multiple gateways
-mgm_details = {
- 'hostname': args.apihost,
- 'port': args.port,
- 'user': args.user,
- 'secret': api_password,
- 'configPath': args.domain,
- 'devices': [
- {
- 'local_rulebase_name': args.layer,
- 'global_rulebase_name': None,
- 'package_name': args.package
- }
- ]
-}
-
-get_basic_config (full_config_json, mgm_details, config_filename=args.out,
- force=args.force, limit=args.limit, details_level=details_level, test_version=args.testing, debug_level=debug_level, ssl_verification=set_ssl_verification(args.ssl, debug_level=debug_level))
-
-duration = int(time.time()) - starttime
-logger.debug ( "checkpointR8x/get_config - duration: " + str(duration) + "s" )
-
-sys.exit(0)
diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_config.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_config.py
deleted file mode 100755
index 6b207f86f..000000000
--- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_config.py
+++ /dev/null
@@ -1,98 +0,0 @@
-#!/usr/bin/python3
-import sys
-from common import importer_base_dir
-sys.path.append(importer_base_dir)
-import parse_network, parse_service, parse_user # parse_rule,
-import parse_network_csv, parse_rule_csv, parse_service_csv, parse_user_csv
-import argparse
-import json
-import sys
-import fwo_log
-
-
-parser = argparse.ArgumentParser(description='parse json configuration file from Check Point R8x management')
-parser.add_argument('-f', '--config_file', required=True, help='name of config file to parse (json format)')
-parser.add_argument('-i', '--import_id', default='0', help='unique import id')
-parser.add_argument('-m', '--management_name', default='', help='name of management system to import')
-parser.add_argument('-r', '--rulebase', default='', help='name of rulebase to import')
-parser.add_argument('-n', '--network_objects', action="store_true", help='import network objects')
-parser.add_argument('-s', '--service_objects', action="store_true", help='import service objects')
-parser.add_argument('-u', '--users', action="store_true", help='import users')
-parser.add_argument('-d', '--debug', metavar='debug_level', default='0', help='Debug Level: 0(off) 1(DEBUG Console) 2(DEBUG File)i 2(DEBUG Console&File); default=0')
-args = parser.parse_args()
-
-found_rulebase = False
-number_of_section_headers_so_far = 0
-rule_num = 0
-nw_objects = []
-svc_objects = []
-section_header_uids=[]
-result = ""
-
-# log config
-debug_level = int(args.debug)
-logger = fwo_log.getFwoLogger()
-
-args = parser.parse_args()
-if len(sys.argv)==1:
- parser.print_help(sys.stderr)
- sys.exit(1)
-
-config_filename = args.config_file
-
-with open(args.config_file, "r") as json_data:
- config = json.load(json_data)
-
-logger.debug ("parse_config - args"+ "\nf:" +args.config_file +"\ni: "+ args.import_id +"\nm: "+ args.management_name +"\nr: "+ args.rulebase +"\nn: "+ str(args.network_objects) +"\ns: "+ str(args.service_objects) +"\nu: "+ str(args.users) +"\nd: "+ str(args.debug))
-
-if args.rulebase != '':
- for rulebase in config['rulebases']:
- current_layer_name = rulebase['layername']
- if current_layer_name == args.rulebase:
- logger.debug("parse_config: found layer to parse: " + current_layer_name)
- found_rulebase = True
- rule_num, result = parse_rule_csv.csv_dump_rules(rulebase, args.rulebase, args.import_id, rule_num=0, section_header_uids=[], parent_uid="", debug_level=debug_level)
-
-if args.network_objects:
- result = ''
- nw_objects = []
-
- if args.network_objects != '':
- for obj_table in config['object_tables']:
- parse_network.collect_nw_objects(obj_table, nw_objects, debug_level=debug_level)
- for idx in range(0, len(nw_objects)-1):
- if nw_objects[idx]['obj_typ'] == 'group':
- parse_network.add_member_names_for_nw_group(idx, nw_objects)
-
- for nw_obj in nw_objects:
- result += parse_network_csv.csv_dump_nw_obj(nw_obj, args.import_id)
-
-if args.service_objects:
- result = ''
- service_objects = []
- if args.service_objects != '':
- for obj_table in config['object_tables']:
- parse_service.collect_svc_objects(obj_table, service_objects)
- # resolving group members:
- for idx in range(0, len(service_objects)-1):
- if service_objects[idx]['svc_typ'] == 'group':
- parse_service.add_member_names_for_svc_group(idx, service_objects)
-
- for svc_obj in service_objects:
- result += parse_service_csv.csv_dump_svc_obj(svc_obj, args.import_id)
-
-if args.users:
- users = {}
- result = ''
- for rulebase in config['rulebases']:
- parse_user.collect_users_from_rulebase(rulebase, users)
-
- for user_name in users.keys():
- user_dict = users[user_name]
- result += parse_user_csv.csv_dump_user(user_name, user_dict, args.import_id)
-
-if args.rulebase != '' and not found_rulebase:
- logger.exception("PARSE ERROR: rulebase '" + args.rulebase + "' not found.")
-else:
- result = result[:-1] # strip off final line break to avoid empty last line
- print(result)
diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_network_csv.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_network_csv.py
deleted file mode 100644
index c1e43faf8..000000000
--- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_network_csv.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from fwo_base import csv_add_field
-from fwo_const import csv_delimiter, line_delimiter
-
-
-def csv_dump_nw_obj(nw_obj, import_id):
- result_line = csv_add_field(import_id) # control_id
- result_line += csv_add_field(nw_obj['obj_name']) # obj_name
- result_line += csv_add_field(nw_obj['obj_typ']) # ob_typ
- if nw_obj['obj_member_names'] != None:
- result_line += csv_add_field(nw_obj['obj_member_names']) # obj_member_names
- else:
- result_line += csv_delimiter # no obj_member_names
- if nw_obj['obj_member_refs'] != None:
- result_line += csv_add_field(nw_obj['obj_member_refs']) # obj_member_refs
- else:
- result_line += csv_delimiter # no obj_member_refs
- result_line += csv_delimiter # obj_sw
- if nw_obj['obj_typ'] == 'group':
- result_line += csv_delimiter # obj_ip for groups = null
- result_line += csv_delimiter # obj_ip_end for groups = null
- else:
- result_line += csv_add_field(nw_obj['obj_ip']) # obj_ip
- if 'obj_ip_end' in nw_obj:
- result_line += csv_add_field(nw_obj['obj_ip_end'])# obj_ip_end
- else:
- result_line += csv_delimiter
- result_line += csv_add_field(nw_obj['obj_color']) # obj_color
- if nw_obj['obj_comment'] != None:
- result_line += csv_add_field(nw_obj['obj_comment']) # obj_comment
- else:
- result_line += csv_delimiter # no obj_comment
- result_line += csv_delimiter # obj_location
- if 'obj_zone' in nw_obj:
- result_line += csv_add_field(nw_obj['obj_zone']) # obj_zone
- else:
- result_line += csv_delimiter
- result_line += csv_add_field(nw_obj['obj_uid']) # obj_uid
- result_line += csv_delimiter # last_change_admin
- # add last_change_time
- result_line += line_delimiter
- return result_line
diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_rule_csv.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_rule_csv.py
deleted file mode 100644
index 12f633b41..000000000
--- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_rule_csv.py
+++ /dev/null
@@ -1,224 +0,0 @@
-from fwo_log import getFwoLogger
-import json
-import cp_const, cpcommon, parse_rule, fwo_const
-from fwo_const import list_delimiter, csv_delimiter, line_delimiter
-from fwo_base import csv_add_field
-from fwo_exception import ImportRecursionLimitReached
-
-
-def create_section_header(section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid):
- # only do this once! : section_header_uids.append(rule_uid)
- header_rule_csv = csv_add_field(import_id) # control_id
- header_rule_csv += csv_add_field(str(rule_num)) # rule_num
- header_rule_csv += csv_add_field(layer_name) # rulebase_name
- header_rule_csv += csv_delimiter # rule_ruleid
- header_rule_csv += csv_add_field('False') # rule_disabled
- header_rule_csv += csv_add_field('False') # rule_src_neg
- header_rule_csv += csv_add_field('Any') # rule_src
- header_rule_csv += csv_add_field(cp_const.any_obj_uid) # rule_src_refs
- header_rule_csv += csv_add_field('False') # rule_dst_neg
- header_rule_csv += csv_add_field('Any') # rule_dst
- header_rule_csv += csv_add_field(cp_const.any_obj_uid) # rule_dst_refs
- header_rule_csv += csv_add_field('False') # rule_svc_neg
- header_rule_csv += csv_add_field('Any') # rule_svc
- header_rule_csv += csv_add_field(cp_const.any_obj_uid) # rule_svc_refs
- header_rule_csv += csv_add_field('Accept') # rule_action
- header_rule_csv += csv_add_field('Log') # rule_track
- header_rule_csv += csv_add_field('Policy Targets') # rule_installon
- header_rule_csv += csv_add_field('Any') # rule_time
- header_rule_csv += csv_delimiter # rule_comment
- header_rule_csv += csv_delimiter # rule_name
- header_rule_csv += csv_add_field(rule_uid) # rule_uid
- header_rule_csv += csv_add_field(section_name) # rule_head_text
- header_rule_csv += csv_delimiter # rule_from_zone
- header_rule_csv += csv_delimiter # rule_to_zone
- header_rule_csv += csv_delimiter # rule_last_change_admin
- if parent_uid != "":
- header_rule_csv += csv_add_field(parent_uid, no_csv_delimiter=True) # parent_rule_uid
- return header_rule_csv + line_delimiter
-
-
-def create_domain_rule_header(section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid):
- return create_section_header(section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid)
-
-
-def csv_dump_rule(rule, layer_name, import_id, rule_num, parent_uid, debug_level=0):
- logger = getFwoLogger()
- rule_csv = ''
-
- # reference to domain rule layer, filling up basic fields
- if 'type' in rule and rule['type'] != 'place-holder':
-# add_missing_info_to_domain_ref_rule(rule)
- if 'rule-number' in rule: # standard rule, no section header
- # print ("rule #" + str(rule['rule-number']) + "\n")
- rule_csv += csv_add_field(import_id) # control_id
- rule_csv += csv_add_field(str(rule_num)) # rule_num
- rule_csv += csv_add_field(layer_name) # rulebase_name
- rule_csv += csv_add_field('') # rule_ruleid is empty
- rule_csv += csv_add_field(str(not rule['enabled'])) # rule_disabled
- rule_csv += csv_add_field(str(rule['source-negate'])) # src_neg
-
- # SOURCE names
- rule_src_name = ''
- for src in rule["source"]:
- if src['type'] == 'LegacyUserAtLocation':
- rule_src_name += src['name'] + list_delimiter
- elif src['type'] == 'access-role':
- if isinstance(src['networks'], str): # just a single source
- if src['networks'] == 'any':
- rule_src_name += src["name"] + '@' + 'Any' + list_delimiter
- else:
- rule_src_name += src["name"] + '@' + src['networks'] + list_delimiter
- else: # more than one source
- for nw in src['networks']:
- rule_src_name += src[
- # TODO: this is not correct --> need to reverse resolve name from given UID
- "name"] + '@' + nw + list_delimiter
- else: # standard network objects as source
- rule_src_name += src["name"] + list_delimiter
- rule_src_name = rule_src_name[:-1] # removing last list_delimiter
- rule_csv += csv_add_field(rule_src_name) # src_names
-
- # SOURCE refs
- rule_src_ref = ''
- for src in rule["source"]:
- if src['type'] == 'LegacyUserAtLocation':
- rule_src_ref += src["userGroup"] + '@' + src["location"] + list_delimiter
- elif src['type'] == 'access-role':
- if isinstance(src['networks'], str): # just a single source
- if src['networks'] == 'any':
- rule_src_ref += src['uid'] + '@' + cp_const.any_obj_uid + list_delimiter
- else:
- rule_src_ref += src['uid'] + '@' + src['networks'] + list_delimiter
- else: # more than one source
- for nw in src['networks']:
- rule_src_ref += src['uid'] + '@' + nw + list_delimiter
- else: # standard network objects as source
- rule_src_ref += src["uid"] + list_delimiter
- rule_src_ref = rule_src_ref[:-1] # removing last list_delimiter
- rule_csv += csv_add_field(rule_src_ref) # src_refs
-
- rule_csv += csv_add_field(str(rule['destination-negate'])) # destination negation
-
- rule_dst_name = ''
- for dst in rule["destination"]:
- rule_dst_name += dst["name"] + list_delimiter
- rule_dst_name = rule_dst_name[:-1]
- rule_csv += csv_add_field(rule_dst_name) # rule dest_name
-
- rule_dst_ref = ''
- for dst in rule["destination"]:
- rule_dst_ref += dst["uid"] + list_delimiter
- rule_dst_ref = rule_dst_ref[:-1]
- rule_csv += csv_add_field(rule_dst_ref) # rule_dest_refs
-
- # SERVICE negate
- rule_csv += csv_add_field(str(rule['service-negate'])) # service negation
- # SERVICE names
- rule_svc_name = ''
- for svc in rule["service"]:
- rule_svc_name += svc["name"] + list_delimiter
- rule_svc_name = rule_svc_name[:-1]
- rule_csv += csv_add_field(rule_svc_name) # rule svc name
-
- # SERVICE refs
- rule_svc_ref = ''
- for svc in rule["service"]:
- rule_svc_ref += svc["uid"] + list_delimiter
- rule_svc_ref = rule_svc_ref[:-1]
- rule_csv += csv_add_field(rule_svc_ref) # rule svc ref
-
- rule_action = rule['action']
- rule_action_name = rule_action['name']
- rule_csv += csv_add_field(rule_action_name) # rule action
- rule_track = rule['track']
- rule_track_type = rule_track['type']
- rule_csv += csv_add_field(rule_track_type['name']) # rule track
-
- rule_install_on = rule['install-on']
- first_rule_install_target = rule_install_on[0]
- rule_csv += csv_add_field(first_rule_install_target['name']) # install on
-
- rule_time = rule['time']
- first_rule_time = rule_time[0]
- rule_csv += csv_add_field(first_rule_time['name']) # time
- if (rule['comments']!=None and rule['comments']!=''):
- rule_csv += csv_add_field(rule['comments']) # comments
- else:
- rule_csv += csv_delimiter # no comments
- if 'name' in rule:
- rule_name = rule['name']
- else:
- rule_name = None
- rule_csv += csv_add_field(rule_name) # rule_name
-
- rule_csv += csv_add_field(rule['uid']) # rule_uid
- rule_head_text = ''
- rule_csv += csv_add_field(rule_head_text) # rule_head_text
- rule_from_zone = ''
- rule_csv += csv_add_field(rule_from_zone)
- rule_to_zone = ''
- rule_csv += csv_add_field(rule_to_zone)
- rule_meta_info = rule['meta-info']
- rule_csv += csv_add_field(rule_meta_info['last-modifier'])
- # new in v5.1.17:
- if 'parent_rule_uid' in rule:
- logger.debug('found rule (uid=' + rule['uid'] + ') with parent_rule_uid set: ' + rule['parent_rule_uid'])
- parent_rule_uid = rule['parent_rule_uid']
- else:
- parent_rule_uid = parent_uid
- if (parent_rule_uid!=''):
- rule_csv += csv_add_field(parent_rule_uid,no_csv_delimiter=True)
- rule_csv += line_delimiter
- return rule_csv
-
-
-def csv_dump_rules(rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid, debug_level=0, recursion_level=1):
- logger = getFwoLogger()
- result = ''
-
- if recursion_level>fwo_const.max_recursion_level:
- raise ImportRecursionLimitReached("csv_dump_rules") from None
-
- if 'layerchunks' in rulebase:
- for chunk in rulebase['layerchunks']:
- if 'rulebase' in chunk:
- for rules_chunk in chunk['rulebase']:
- rule_num, rules_in_csv = csv_dump_rules(rules_chunk, layer_name, import_id, rule_num, section_header_uids, parent_uid, debug_level=debug_level, recursion_level=recursion_level+1)
- result += rules_in_csv
- else:
- logger.warning("found no rulebase in chunk:\n" + json.dumps(chunk, indent=2))
- else:
- if 'rulebase' in rulebase:
- if rulebase['type'] == 'access-section' and not rulebase['uid'] in section_header_uids: # add section header, but only if it does not exist yet (can happen by chunking a section)
- section_name = "section without name"
- if 'name' in rulebase:
- section_name = rulebase['name']
- if 'parent_rule_uid' in rulebase:
- parent_uid = rulebase['parent_rule_uid']
- else:
- parent_uid = ""
- section_header = create_section_header(section_name, layer_name, import_id, rulebase['uid'], rule_num, section_header_uids, parent_uid)
- rule_num += 1
- result += section_header
- parent_uid = rulebase['uid']
- for rule in rulebase['rulebase']:
- if rule['type'] == 'place-holder': # add domain rules
- section_name = ""
- if 'name' in rulebase:
- section_name = rule['name']
- result += parse_rule.create_domain_rule_header(section_name, layer_name, import_id, rule['uid'], rule_num, section_header_uids, parent_uid)
- else: # parse standard sections
- rule_num, rules_in_layer = csv_dump_rules(rule, layer_name, import_id, rule_num, section_header_uids, parent_uid, debug_level=debug_level)
- result += rules_in_layer
- if rulebase['type'] == 'place-holder': # add domain rules
- logger.debug('found domain rule ref: ' + rulebase['uid'])
- section_name = ""
- if 'name' in rulebase:
- section_name = rulebase['name']
- result += parse_rule.create_domain_rule_header(section_name, layer_name, import_id, rulebase['uid'], rule_num, section_header_uids, parent_uid)
- rule_num += 1
- if 'rule-number' in rulebase:
- result += csv_dump_rule(rulebase, layer_name, import_id, rule_num, parent_uid, debug_level=debug_level)
- rule_num += 1
- return rule_num, result
diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_service_csv.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_service_csv.py
deleted file mode 100644
index 9e01b6b4e..000000000
--- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_service_csv.py
+++ /dev/null
@@ -1,55 +0,0 @@
-from fwo_base import csv_add_field
-from fwo_const import csv_delimiter, line_delimiter
-
-
-def csv_dump_svc_obj(svc_obj, import_id):
- result_line = csv_add_field(import_id) # control_id
- result_line += csv_add_field(svc_obj['svc_name']) # svc_name
- result_line += csv_add_field(svc_obj['svc_typ']) # svc_typ
- result_line += csv_delimiter # no svc_prod_specific
- if svc_obj['svc_member_names'] != None:
- result_line += csv_add_field(svc_obj['svc_member_names']) # svc_member_names
- else:
- result_line += csv_delimiter # no svc_member_names
- if svc_obj['svc_member_refs'] != None:
- result_line += csv_add_field(svc_obj['svc_member_refs']) # obj_member_refs
- else:
- result_line += csv_delimiter # no svc_member_refs
- result_line += csv_add_field(svc_obj['svc_color']) # svc_color
- result_line += csv_add_field(svc_obj['ip_proto']) # ip_proto
- if svc_obj['svc_port']!=None:
- result_line += str(svc_obj['svc_port']) + csv_delimiter # svc_port
- else:
- result_line += csv_delimiter # no svc_port
- if svc_obj['svc_port_end']!=None:
- result_line += str(svc_obj['svc_port_end']) + csv_delimiter # svc_port_end
- else:
- result_line += csv_delimiter # no svc_port_end
- if 'svc_source_port' in svc_obj:
- result_line += csv_add_field(svc_obj['svc_source_port']) # svc_source_port
- else:
- result_line += csv_delimiter # svc_source_port
- if 'svc_source_port_end' in svc_obj:
- result_line += csv_add_field(svc_obj['svc_source_port_end']) # svc_source_port_end
- else:
- result_line += csv_delimiter # svc_source_port_end
- if 'svc_comment' in svc_obj and svc_obj['svc_comment'] != None:
- result_line += csv_add_field(svc_obj['svc_comment']) # svc_comment
- else:
- result_line += csv_delimiter # no svc_comment
- if 'rpc_nr' in svc_obj and svc_obj['rpc_nr'] != None:
- result_line += csv_add_field(str(svc_obj['rpc_nr'])) # rpc_nr
- else:
- result_line += csv_delimiter # no rpc_nr
- if 'svc_timeout_std' in svc_obj:
- result_line += csv_add_field(svc_obj['svc_timeout_std']) # svc_timeout_std
- else:
- result_line += csv_delimiter # svc_timeout_std
- if 'svc_timeout' in svc_obj and svc_obj['svc_timeout']!="" and svc_obj['svc_timeout']!=None:
- result_line += csv_add_field(str(svc_obj['svc_timeout'])) # svc_timeout
- else:
- result_line += csv_delimiter # svc_timeout null
- result_line += csv_add_field(svc_obj['svc_uid']) # svc_uid
- result_line += csv_delimiter # last_change_admin
- result_line += line_delimiter # last_change_time
- return result_line
diff --git a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_user_csv.py b/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_user_csv.py
deleted file mode 100644
index 032540b52..000000000
--- a/roles/importer/files/importer/checkpointR8x/unused_cpr8x_parse_user_csv.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from fwo_base import csv_add_field
-from fwo_const import csv_delimiter, line_delimiter
-
-
-def csv_dump_user(user_name, user, import_id):
- user_line = csv_add_field(import_id) # control_id
- user_line += csv_add_field(user_name) # user_name
- user_line += csv_add_field(user['user_typ']) # user_typ
- if 'user_member_names' in user:
- user_line += csv_add_field(user['user_member_names']) # user_member_names
- else:
- user_line += csv_delimiter # no user_member_names
- if 'user_member_refs' in user:
- user_line += csv_add_field(user['user_member_refs']) # user_member_refs
- else:
- user_line += csv_delimiter # no user_member_refs
- if 'user_color' in user:
- user_line += csv_add_field(user['user_color']) # user_color
- else:
- user_line += csv_delimiter # no user_color
- if 'user_comment' in user and user['user_comment']!=None and user['user_comment']!='':
- user_line += csv_add_field(user['user_comment']) # user_comment
- else:
- user_line += csv_delimiter # no user_comment
- user_line += csv_add_field(user['user_uid']) # user_uid
- user_line += csv_delimiter # user_valid_until
- user_line += line_delimiter # last_change_admin
- return user_line
diff --git a/roles/importer/files/importer/common.py b/roles/importer/files/importer/common.py
index 9d8c51120..ba9ae768d 100644
--- a/roles/importer/files/importer/common.py
+++ b/roles/importer/files/importer/common.py
@@ -15,7 +15,7 @@
import jsonpickle
from fwo_exception import FwoApiLoginFailed, FwoApiFailedLockImport, ConfigFileNotFound, FwLoginFailed, ImportRecursionLimitReached
from fwo_base import split_config
-
+from fwo_mail import send_change_notification_mail
# import_management: import a single management (if no import for it is running)
# lock mgmt for import via FWORCH API call, generating new import_id y
@@ -120,6 +120,8 @@ def import_management(mgm_id=None, ssl_verification=None, debug_level_in=0,
config_changed_since_last_import, error_string, error_count, change_count = get_config_from_api(mgm_details, full_config_json, config2import, jwt, current_import_id, start_time,
in_file=in_file, import_tmp_path=import_tmp_path, error_string=error_string, error_count=error_count, change_count=change_count,
limit=limit, force=force)
+ if (debug_level>7): # dump full native config read from fw API
+ logger.info(json.dumps(full_config_json, indent=2))
time_get_config = int(time.time()) - start_time
logger.debug("import_management - getting config total duration " + str(time_get_config) + "s")
@@ -148,6 +150,11 @@ def import_management(mgm_id=None, ssl_verification=None, debug_level_in=0,
try: # get change count from db
change_count = fwo_api.count_changes_per_import(fwo_config['fwo_api_base_url'], jwt, current_import_id)
+ if change_count>0:
+ emailConfig = fwo_api.get_config_values(fwo_config['fwo_api_base_url'], jwt, keyFilter="email")
+ impChangeNotifyConfig = fwo_api.get_config_values(fwo_config['fwo_api_base_url'], jwt, keyFilter="impChangeNotify")
+ notificationConfig = dict(emailConfig, **impChangeNotifyConfig) # merge the two config dicts
+ send_change_notification_mail(notificationConfig, change_count, mgm_details['name'], mgm_id)
except:
logger.error("import_management - unspecified error while getting change count: " + str(traceback.format_exc()))
raise
@@ -170,7 +177,7 @@ def import_management(mgm_id=None, ssl_verification=None, debug_level_in=0,
else: # if no changes were found, we skip everything else without errors
pass
- if (debug_level>8):
+ if (debug_level>8): # dump normalized config for debugging purposes
logger.info(json.dumps(config2import, indent=2))
error_count = complete_import(current_import_id, error_string, start_time, mgm_details, change_count, error_count, jwt)
@@ -261,7 +268,8 @@ def complete_import(current_import_id, error_string, start_time, mgm_details, ch
logger = getFwoLogger()
fwo_config = readConfig(fwo_config_filename)
- fwo_api.log_import_attempt(fwo_config['fwo_api_base_url'], jwt, mgm_details['id'], successful=not error_count)
+ success = (error_count==0)
+ log_result = fwo_api.log_import_attempt(fwo_config['fwo_api_base_url'], jwt, mgm_details['id'], successful=success)
try: # CLEANUP: delete configs of imports (without changes) (if no error occured)
if fwo_api.delete_json_config_in_import_table(fwo_config['fwo_api_base_url'], jwt, {"importId": current_import_id})<0:
diff --git a/roles/importer/files/importer/fortiosmanagementREST/fOS_common.py b/roles/importer/files/importer/fortiosmanagementREST/fOS_common.py
new file mode 100644
index 000000000..154be9d41
--- /dev/null
+++ b/roles/importer/files/importer/fortiosmanagementREST/fOS_common.py
@@ -0,0 +1,34 @@
+import sys
+from common import importer_base_dir
+sys.path.append(importer_base_dir + '/fortiosmanagementREST')
+from curses import raw
+from fwo_log import getFwoLogger
+from fwo_const import list_delimiter, fwo_config_filename
+from fwo_config import readConfig
+from fwo_api import setAlert, create_data_issue
+
+
+# TODO: deal with objects with identical names (e.g. all ipv4 & all ipv6)
+def resolve_objects (obj_name_string_list, lookup_dict={}, delimiter=list_delimiter, jwt=None, import_id=None, mgm_id=None):
+ logger = getFwoLogger()
+ fwo_config = readConfig(fwo_config_filename)
+
+ ref_list = []
+ objects_not_found = []
+ for el in obj_name_string_list.split(delimiter):
+ found = False
+ if el in lookup_dict:
+ ref_list.append(lookup_dict[el])
+ else:
+ objects_not_found.append(el)
+
+ for obj in objects_not_found:
+ if obj != 'all' and obj != 'Original':
+ if not create_data_issue(fwo_config['fwo_api_base_url'], jwt, import_id=import_id, obj_name=obj, severity=1, mgm_id=mgm_id):
+ logger.warning("resolve_raw_objects: encountered error while trying to log an import data issue using create_data_issue")
+
+ desc = "found a broken object reference '" + obj + "' "
+ setAlert(fwo_config['fwo_api_base_url'], jwt, import_id=import_id, title="object reference error", mgm_id=mgm_id, severity=1, role='importer', \
+ description=desc, source='import', alertCode=16)
+
+ return delimiter.join(ref_list)
diff --git a/roles/importer/files/importer/fortiosmanagementREST/fOS_network.py b/roles/importer/files/importer/fortiosmanagementREST/fOS_network.py
index 2ade05e2e..de3a8f2a1 100644
--- a/roles/importer/files/importer/fortiosmanagementREST/fOS_network.py
+++ b/roles/importer/files/importer/fortiosmanagementREST/fOS_network.py
@@ -15,6 +15,7 @@ def normalize_nwobjects(full_config, config2import, import_id, nw_obj_types, jwt
for obj_orig in full_config[obj_type]:
obj_zone = 'global'
obj = {}
+ ipa = ""
obj.update({'obj_name': obj_orig['name']})
if 'subnet' in obj_orig: # ipv4 object
if isinstance(obj_orig['subnet'], str) and ' ' in obj_orig['subnet']:
@@ -57,9 +58,7 @@ def normalize_nwobjects(full_config, config2import, import_id, nw_obj_types, jwt
if 'extip' not in obj_orig or len(obj_orig['extip'])==0:
logger.error("vip (extip): found empty extip field for " + obj_orig['name'])
else:
- if len(obj_orig['extip'])>1:
- logger.warning("vip (extip): found more than one extip, just using the first one for " + obj_orig['name'])
- set_ip_in_obj(obj, obj_orig['extip'][0]) # resolving nat range if there is one
+ set_ip_in_obj(obj, obj_orig['extip']) # resolving nat range if there is one
nat_obj = {}
nat_obj.update({'obj_typ': 'host' })
nat_obj.update({'obj_color': 'black'})
@@ -73,14 +72,14 @@ def normalize_nwobjects(full_config, config2import, import_id, nw_obj_types, jwt
else:
if len(obj_orig['mappedip'])>1:
logger.warning("vip (extip): found more than one mappedip, just using the first one for " + obj_orig['name'])
- nat_ip = obj_orig['mappedip'][0]
+ nat_ip = obj_orig['mappedip'][0]['range']
set_ip_in_obj(nat_obj, nat_ip)
obj.update({ 'obj_nat_ip': nat_obj['obj_ip'] }) # save nat ip in vip obj
if 'obj_ip_end' in nat_obj: # this nat obj is a range - include the end ip in name and uid as well to avoid akey conflicts
obj.update({ 'obj_nat_ip_end': nat_obj['obj_ip_end'] }) # save nat ip in vip obj
nat_obj.update({'obj_name': nat_obj['obj_ip'] + '-' + nat_obj['obj_ip_end'] + nat_postfix})
else:
- nat_obj.update({'obj_name': nat_obj['obj_ip'] + nat_postfix})
+ nat_obj.update({'obj_name': str(nat_obj['obj_ip']) + nat_postfix})
nat_obj.update({'obj_uid': nat_obj['obj_name']})
###### range handling
diff --git a/roles/importer/files/importer/fortiosmanagementREST/fOS_rule.py b/roles/importer/files/importer/fortiosmanagementREST/fOS_rule.py
index eeace4080..019f0d590 100644
--- a/roles/importer/files/importer/fortiosmanagementREST/fOS_rule.py
+++ b/roles/importer/files/importer/fortiosmanagementREST/fOS_rule.py
@@ -9,7 +9,7 @@
from fwo_log import getFwoLogger
from fwo_data_networking import get_matching_route_obj, get_ip_of_interface_obj
import ipaddress
-from fwcommon import resolve_objects
+from fOS_common import resolve_objects
import time
diff --git a/roles/importer/files/importer/fortiosmanagementREST/fwcommon.py b/roles/importer/files/importer/fortiosmanagementREST/fwcommon.py
index 5b16cf669..86415b0ac 100644
--- a/roles/importer/files/importer/fortiosmanagementREST/fwcommon.py
+++ b/roles/importer/files/importer/fortiosmanagementREST/fwcommon.py
@@ -69,30 +69,30 @@ def get_config(config2import, full_config, current_import_id, mgm_details, limit
fOS_rule.getAccessPolicy(sid, fm_api_url, full_config, limit)
# fOS_rule.getNatPolicy(sid, fm_api_url, full_config, limit)
- # now we normalize relevant parts of the raw config and write the results to config2import dict
- # currently reading zone from objects for backward compat with FortiManager 6.x
- # fmgr_zone.normalize_zones(full_config, config2import, current_import_id)
-
- # write normalized networking data to config2import
- # this is currently not written to the database but only used for natting decisions
- # later we will probably store the networking info in the database as well as a basis
- # for path analysis
-
- # normalize_network_data(full_config, config2import, mgm_details)
-
- fOS_user.normalize_users(
- full_config, config2import, current_import_id, user_scope)
- fOS_network.normalize_nwobjects(
- full_config, config2import, current_import_id, nw_obj_scope, jwt=jwt, mgm_id=mgm_details['id'])
- fOS_service.normalize_svcobjects(
- full_config, config2import, current_import_id, svc_obj_scope)
- fOS_user.normalize_users(
- full_config, config2import, current_import_id, user_scope)
- fOS_rule.normalize_access_rules(
- full_config, config2import, current_import_id, mgm_details=mgm_details, jwt=jwt)
- # fOS_rule.normalize_nat_rules(
- # full_config, config2import, current_import_id, jwt=jwt)
- # fOS_network.remove_nat_ip_entries(config2import)
+ # now we normalize relevant parts of the raw config and write the results to config2import dict
+ # currently reading zone from objects for backward compat with FortiManager 6.x
+ # fmgr_zone.normalize_zones(full_config, config2import, current_import_id)
+
+ # write normalized networking data to config2import
+ # this is currently not written to the database but only used for natting decisions
+ # later we will probably store the networking info in the database as well as a basis
+ # for path analysis
+
+ # normalize_network_data(full_config, config2import, mgm_details)
+
+ fOS_user.normalize_users(
+ full_config, config2import, current_import_id, user_scope)
+ fOS_network.normalize_nwobjects(
+ full_config, config2import, current_import_id, nw_obj_scope, jwt=jwt, mgm_id=mgm_details['id'])
+ fOS_service.normalize_svcobjects(
+ full_config, config2import, current_import_id, svc_obj_scope)
+ fOS_zone.add_zone_if_missing (config2import, 'global', current_import_id)
+
+ fOS_rule.normalize_access_rules(
+ full_config, config2import, current_import_id, mgm_details=mgm_details, jwt=jwt)
+ # fOS_rule.normalize_nat_rules(
+ # full_config, config2import, current_import_id, jwt=jwt)
+ # fOS_network.remove_nat_ip_entries(config2import)
return 0
@@ -112,28 +112,3 @@ def getObjects(sid, fm_api_url, raw_config, limit, nw_obj_types, svc_obj_types):
fOS_getter.update_config_with_fortiOS_api_call(
raw_config, fm_api_url + "/cmdb/" + object_type + "?access_token=" + sid, "user_obj_" + object_type, limit=limit)
-
-# TODO: deal with objects with identical names (e.g. all ipv4 & all ipv6)
-def resolve_objects (obj_name_string_list, lookup_dict={}, delimiter=list_delimiter, jwt=None, import_id=None, mgm_id=None):
- logger = getFwoLogger()
- fwo_config = readConfig(fwo_config_filename)
-
- ref_list = []
- objects_not_found = []
- for el in obj_name_string_list.split(delimiter):
- found = False
- if el in lookup_dict:
- ref_list.append(lookup_dict[el])
- else:
- objects_not_found.append(el)
-
- for obj in objects_not_found:
- if obj != 'all' and obj != 'Original':
- if not create_data_issue(fwo_config['fwo_api_base_url'], jwt, import_id=import_id, obj_name=obj, severity=1, mgm_id=mgm_id):
- logger.warning("resolve_raw_objects: encountered error while trying to log an import data issue using create_data_issue")
-
- desc = "found a broken object reference '" + obj + "' "
- setAlert(fwo_config['fwo_api_base_url'], jwt, import_id=import_id, title="object reference error", mgm_id=mgm_id, severity=1, role='importer', \
- description=desc, source='import', alertCode=16)
-
- return delimiter.join(ref_list)
diff --git a/roles/importer/files/importer/fwo_api.py b/roles/importer/files/importer/fwo_api.py
index 1477b550c..6d24dba86 100644
--- a/roles/importer/files/importer/fwo_api.py
+++ b/roles/importer/files/importer/fwo_api.py
@@ -41,58 +41,57 @@ def call(url, jwt, query, query_variables="", role="reporter", show_progress=Fal
full_query = {"query": query, "variables": query_variables}
logger = getFwoLogger()
- session = requests.Session()
- if fwo_globals.verify_certs is None: # only for first FWO API call (getting info on cert verification)
- session.verify = False
- else:
- session.verify = fwo_globals.verify_certs
- session.headers = request_headers
+ with requests.Session() as session:
+ if fwo_globals.verify_certs is None: # only for first FWO API call (getting info on cert verification)
+ session.verify = False
+ else:
+ session.verify = fwo_globals.verify_certs
+ session.headers = request_headers
- try:
- r = session.post(url, data=json.dumps(full_query), timeout=int(fwo_api_http_import_timeout))
- r.raise_for_status()
- except requests.exceptions.RequestException:
- logger.error(showApiCallInfo(url, full_query, request_headers, type='error') + ":\n" + str(traceback.format_exc()))
+ try:
+ r = session.post(url, data=json.dumps(full_query), timeout=int(fwo_api_http_import_timeout))
+ r.raise_for_status()
+ except requests.exceptions.RequestException:
+ logger.error(showApiCallInfo(url, full_query, request_headers, type='error') + ":\n" + str(traceback.format_exc()))
+ if r != None:
+ if r.status_code == 503:
+ raise FwoApiTServiceUnavailable("FWO API HTTP error 503 (FWO API died?)" )
+ if r.status_code == 502:
+ raise FwoApiTimeout("FWO API HTTP error 502 (might have reached timeout of " + str(int(fwo_api_http_import_timeout)/60) + " minutes)" )
+ else:
+ raise
+ if int(fwo_globals.debug_level) > 4:
+ logger.debug (showApiCallInfo(url, full_query, request_headers, type='debug'))
+ if show_progress:
+ print('.', end='', flush=True)
if r != None:
- if r.status_code == 503:
- raise FwoApiTServiceUnavailable("FWO API HTTP error 503 (FWO API died?)" )
- if r.status_code == 502:
- raise FwoApiTimeout("FWO API HTTP error 502 (might have reached timeout of " + str(int(fwo_api_http_import_timeout)/60) + " minutes)" )
+ return r.json()
else:
- raise
- if int(fwo_globals.debug_level) > 4:
- logger.debug (showApiCallInfo(url, full_query, request_headers, type='debug'))
- if show_progress:
- print('.', end='', flush=True)
-
- if r != None:
- return r.json()
- else:
- return None
+ return None
def login(user, password, user_management_api_base_url, method='api/AuthenticationToken/Get'):
payload = {"Username": user, "Password": password}
- session = requests.Session()
- if fwo_globals.verify_certs is None: # only for first FWO API call (getting info on cert verification)
- session.verify = False
- else:
- session.verify = fwo_globals.verify_certs
- session.headers = {'Content-Type': 'application/json'}
+ with requests.Session() as session:
+ if fwo_globals.verify_certs is None: # only for first FWO API call (getting info on cert verification)
+ session.verify = False
+ else:
+ session.verify = fwo_globals.verify_certs
+ session.headers = {'Content-Type': 'application/json'}
- try:
- response = session.post(user_management_api_base_url + method, data=json.dumps(payload))
- except requests.exceptions.RequestException:
- raise FwoApiLoginFailed ("fwo_api: error during login to url: " + str(user_management_api_base_url) + " with user " + user) from None
+ try:
+ response = session.post(user_management_api_base_url + method, data=json.dumps(payload))
+ except requests.exceptions.RequestException:
+ raise FwoApiLoginFailed ("fwo_api: error during login to url: " + str(user_management_api_base_url) + " with user " + user) from None
- if response.text is not None and response.status_code==200:
- return response.text
- else:
- error_txt = "fwo_api: ERROR: did not receive a JWT during login" + \
- ", api_url: " + str(user_management_api_base_url) + \
- ", ssl_verification: " + str(fwo_globals.verify_certs)
- raise FwoApiLoginFailed(error_txt)
+ if response.text is not None and response.status_code==200:
+ return response.text
+ else:
+ error_txt = "fwo_api: ERROR: did not receive a JWT during login" + \
+ ", api_url: " + str(user_management_api_base_url) + \
+ ", ssl_verification: " + str(fwo_globals.verify_certs)
+ raise FwoApiLoginFailed(error_txt)
def set_api_url(base_url, testmode, api_supported, hostname):
@@ -135,6 +134,18 @@ def get_config_value(fwo_api_base_url, jwt, key='limit'):
return None
+def get_config_values(fwo_api_base_url, jwt, keyFilter='limit'):
+ query_variables = {'keyFilter': keyFilter+"%"}
+ config_query = "query getConf($keyFilter: String) { config(where: {config_key: {_ilike: $keyFilter}}) { config_key config_value } }"
+ result = call(fwo_api_base_url, jwt, config_query, query_variables=query_variables, role='importer')
+ if 'data' in result and 'config' in result['data']:
+ resultArray = result['data']['config']
+ dict1 = {v['config_key']: v['config_value'] for k,v in enumerate(resultArray)}
+ return dict1
+ else:
+ return None
+
+
def get_mgm_details(fwo_api_base_url, jwt, query_variables, debug_level=0):
mgm_query = """
query getManagementDetails($mgmId: Int!) {
diff --git a/roles/importer/files/importer/fwo_const.py b/roles/importer/files/importer/fwo_const.py
index 6794a81ca..1a3fd5686 100644
--- a/roles/importer/files/importer/fwo_const.py
+++ b/roles/importer/files/importer/fwo_const.py
@@ -22,6 +22,7 @@
import_tmp_path = base_dir + '/tmp/import'
fwo_config_filename = base_dir + '/etc/fworch.json'
max_recursion_level = 25 # do not call a function recursively more than this
+default_section_header_text = 'section without name'
# how many objects (network, services, rules, ...) should be sent to the FWO API in one go?
# should be between 500 and 2.000 in production (results in a max obj number of max. 5 x this value - nwobj/svc/rules/...)
diff --git a/roles/importer/files/importer/fwo_exception.py b/roles/importer/files/importer/fwo_exception.py
index 6906525f3..c2c1e69da 100644
--- a/roles/importer/files/importer/fwo_exception.py
+++ b/roles/importer/files/importer/fwo_exception.py
@@ -6,6 +6,13 @@ def __init__(self, message="Login to FW management failed"):
self.message = message
super().__init__(self.message)
+class FwLogoutFailed(Exception):
+ """Raised when logout from FW management failed"""
+
+ def __init__(self, message="Logout from FW management failed"):
+ self.message = message
+ super().__init__(self.message)
+
class FwoApiLoginFailed(Exception):
"""Raised when login to FWO API failed"""
diff --git a/roles/importer/files/importer/fwo_mail.py b/roles/importer/files/importer/fwo_mail.py
new file mode 100644
index 000000000..de8a60ae7
--- /dev/null
+++ b/roles/importer/files/importer/fwo_mail.py
@@ -0,0 +1,82 @@
+import json
+import jsonpickle
+from fwo_data_networking import InterfaceSerializable, RouteSerializable
+import fwo_globals
+from fwo_const import max_objs_per_chunk, csv_delimiter, apostrophe, line_delimiter
+from fwo_log import getFwoLogger, getFwoAlertLogger
+from copy import deepcopy
+import smtplib, ssl
+from email.message import EmailMessage
+
+
+def send_mail(recipient_list, subject, body, fwo_config):
+ logger = getFwoLogger()
+ # Create a text/plain message
+ msg = EmailMessage()
+ senderAddress = ""
+ msg.set_content(body)
+ msg['Subject'] = subject
+ if 'emailSenderAddress' in fwo_config:
+ senderAddress = fwo_config['emailSenderAddress']
+ msg['From'] = senderAddress
+ msg['To'] = recipient_list
+ tlsSetting = ""
+
+ try:
+ if 'emailTls' not in fwo_config or fwo_config['emailTls']=='StartTls':
+ smtp_server = smtplib.SMTP(fwo_config['emailServerAddress'], int(fwo_config['emailPort']))
+ if 'emailTls' in fwo_config and fwo_config['emailTls']=='StartTls':
+ tlsSetting = fwo_config['emailTls']
+ smtp_server.starttls() #setting up to TLS connection
+ smtp_server.ehlo() #calling the ehlo() again as encryption happens on calling startttls()
+ else:
+ smtp_server.ehlo() #setting the ESMTP protocol
+ elif fwo_config['emailTls']=='Tls':
+ context = ssl.create_default_context()
+ context.check_hostname = False
+ context.verify_mode = ssl.CERT_NONE
+ smtp_server = smtplib.SMTP(fwo_config['emailServerAddress'], int(fwo_config['emailPort']))
+ smtp_server.starttls(context=context)
+ smtp_server.ehlo()
+ if 'emailUser' in fwo_config and 'emailPassword' in fwo_config and fwo_config['emailUser']!="":
+ smtp_server.login(fwo_config['emailUser'], fwo_config['emailPassword']) #logging into out email id
+
+ #sending the mail by specifying the from and to address and the message
+ smtp_server.send_message(msg)
+ smtp_server.quit() #terminating the server
+ except Exception as e:
+ if 'emailPort' not in fwo_config:
+ logger.warning("Missing email server port config. Double-check your emailPort configuration")
+ elif int(fwo_config['emailPort'])<1 or int(fwo_config['emailPort'])>65535:
+ logger.warning("Email server port configuration out of bounds: " + str(fwo_config['emailPort']) + ". Double-check your emailPort configuration")
+ elif 'emailServer' not in fwo_config:
+ logger.warning("Missing email server address. Double-check your emailServer configuration")
+ elif len(fwo_config['emailServer'])==0:
+ logger.warning("Empty email server address. Double-check your emailServer configuration")
+ elif recipient_list is None:
+ logger.warning("Undefined email recipient list. Double-check your email recipient list")
+ elif len(recipient_list)==0:
+ logger.warning("Empty email recipient list. Double-check your email recipient list")
+ else:
+ logger.warning("error while sending import change notification email: " +
+ "emailServer: " + fwo_config['emailServerAddress'] + ", " +
+ "emailSenderAddress: " + senderAddress + ", " +
+ "emailPort: " + fwo_config['emailPort'] + ", " +
+ "emailTls: " + str(tlsSetting) + ", " +
+ "impChangeNotifyRecipients: " + str(recipient_list) + ", " +
+ "error: " + str(e)
+ )
+
+
+def send_change_notification_mail(fwo_config, number_of_changes, mgm_name, mgm_id):
+ if 'impChangeNotifyActive' in fwo_config and bool(fwo_config['impChangeNotifyActive']) and 'impChangeNotifyRecipients' in fwo_config:
+ body = ""
+ if 'impChangeNotifyBody' in fwo_config:
+ body += fwo_config['impChangeNotifyBody'] + ": "
+ body += str(number_of_changes) + ", Management: " + mgm_name + " (id=" + mgm_id + ")"
+ send_mail(
+ fwo_config['impChangeNotifyRecipients'].split(','),
+ fwo_config['impChangeNotifySubject'] if 'impChangeNotifySubject' in fwo_config else "firewall orchestrator change notification",
+ body,
+ fwo_config
+ )
diff --git a/roles/importer/files/importer/import-mgm.py b/roles/importer/files/importer/import-mgm.py
index ddbf1169d..b1897c2d8 100755
--- a/roles/importer/files/importer/import-mgm.py
+++ b/roles/importer/files/importer/import-mgm.py
@@ -18,8 +18,16 @@
parser.add_argument('-f', '--force', action='store_true', default=False,
help='If set the import will be attempted without checking for changes or if the importer module is the one defined')
parser.add_argument('-d', '--debug', metavar='debug_level', default='0',
- help='Debug Level: 0=off, 1=send debug to console, 2=send debug to file, 3=save noramlized config file; 4=additionally save native config file; default=0. \n' +\
- 'config files are saved to $FWORCH/tmp/import dir')
+ help='Debug Level: \
+ 0=off, \
+ 1=send debug to console, \
+ 2=send debug to file, \
+ 3=save noramlized config file, \
+ 4=additionally save native config file, \
+ 8=send native config (as read from firewall) to standard out, \
+ 9=send normalized config to standard out, \
+ (default=0), \
+ config files are saved to $FWORCH/tmp/import dir')
parser.add_argument('-v', "--verify_certificates", action='store_true', default = None,
help = "verify certificates")
parser.add_argument('-s', "--suppress_certificate_warnings", action='store_true', default = None,
diff --git a/roles/importer/tasks/main.yml b/roles/importer/tasks/main.yml
index 7a8c16123..ac439a1c6 100644
--- a/roles/importer/tasks/main.yml
+++ b/roles/importer/tasks/main.yml
@@ -50,6 +50,7 @@
owner: "{{ fworch_user }}"
group: "{{ fworch_group }}"
mode: "0755"
+ tags: [ 'test' ]
- name: set x-flag for importer executables (top level only)
file:
diff --git a/roles/importer/templates/fworch-importer-api.service.j2 b/roles/importer/templates/fworch-importer-api.service.j2
index ac5971a16..1287133fe 100644
--- a/roles/importer/templates/fworch-importer-api.service.j2
+++ b/roles/importer/templates/fworch-importer-api.service.j2
@@ -17,8 +17,8 @@ ExecStartPre=/bin/sleep 10
ExecStart={{ importer_home }}/import-main-loop.py
# ExecStop={{ importer_home }}/import-api-stop-helper
TimeoutStopSec=300min
-StandardOutput=syslog
-StandardError=syslog
+StandardOutput=journal
+StandardError=journal
SyslogIdentifier={{ product_name }}-importer-api
User={{ fworch_user }}
KillSignal=SIGINT
diff --git a/roles/importer/templates/fworch-importer-legacy.service.j2 b/roles/importer/templates/fworch-importer-legacy.service.j2
index 4ea747068..ba32e021e 100644
--- a/roles/importer/templates/fworch-importer-legacy.service.j2
+++ b/roles/importer/templates/fworch-importer-legacy.service.j2
@@ -7,8 +7,8 @@ WorkingDirectory={{ importer_home }}
ExecStartPre=/bin/sleep 10
ExecStart={{ importer_home }}/fworch-importer-main.pl
ExecStop={{ importer_home }}/import-stop-helper
-StandardOutput=syslog
-StandardError=syslog
+StandardOutput=journal
+StandardError=journal
SyslogIdentifier={{ product_name }}-importer-legacy
User={{ fworch_user }}
Environment="PERL5LIB={{ importer_home }}"
diff --git a/roles/lib/files/FWO.Api.Client/APIConnection.cs b/roles/lib/files/FWO.Api.Client/APIConnection.cs
index c1b8798b7..76c2ae95c 100644
--- a/roles/lib/files/FWO.Api.Client/APIConnection.cs
+++ b/roles/lib/files/FWO.Api.Client/APIConnection.cs
@@ -6,10 +6,14 @@
namespace FWO.Api.Client
{
- public abstract class ApiConnection
+ public abstract class ApiConnection : IDisposable
{
+ private bool disposed = false;
+
public event EventHandler? OnAuthHeaderChanged;
+ protected List subscriptions = new List();
+
protected void InvokeOnAuthHeaderChanged(object? sender, string newAuthHeader)
{
OnAuthHeaderChanged?.Invoke(sender, newAuthHeader);
@@ -19,8 +23,33 @@ protected void InvokeOnAuthHeaderChanged(object? sender, string newAuthHeader)
public abstract void SetRole(string role);
+ public abstract void SetProperRole(System.Security.Claims.ClaimsPrincipal user, List targetRoleList);
+
+ public abstract void SwitchBack();
+
public abstract Task SendQueryAsync(string query, object? variables = null, string? operationName = null);
- public abstract ApiSubscription GetSubscription(Action exceptionHandler, ApiSubscription.SubscriptionUpdate subscriptionUpdateHandler, string subscription, object? variables = null, string? operationName = null);
+ public abstract GraphQlApiSubscription GetSubscription(Action exceptionHandler, GraphQlApiSubscription.SubscriptionUpdate subscriptionUpdateHandler, string subscription, object? variables = null, string? operationName = null);
+
+ protected virtual void AddSubscription(ApiSubscription subscription)
+ {
+ subscriptions.Add(subscription);
+ }
+
+ protected abstract void Dispose(bool disposing);
+
+ ~ ApiConnection()
+ {
+ if (disposed) return;
+ Dispose(false);
+ }
+
+ public void Dispose()
+ {
+ if (disposed) return;
+ Dispose(true);
+ disposed = true;
+ GC.SuppressFinalize(this);
+ }
}
}
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/_repo.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/_repo.graphql
deleted file mode 100644
index 80cdfe785..000000000
--- a/roles/lib/files/FWO.Api.Client/APIcalls/_repo.graphql
+++ /dev/null
@@ -1,553 +0,0 @@
-################ basics
-
-query getImportId($management_id: Int!, $time: timestamp!) {
- import_control_aggregate(
- where: { mgm_id: { _eq: $management_id }, stop_time: { _lte: $time } }
- ) {
- aggregate {
- max {
- control_id
- }
- }
- }
-}
-
-################# dyn_filter
-
-query filter_dyn($manufacturer_id: [Int!]) {
- __typename
- stm_dev_typ(where: { dev_typ_id: { _in: $manufacturer_id } }) {
- dev_typ_name
- dev_typ_version
- dev_typ_id
- }
-}
-
-query filter_dyn($management_id: [Int!], $device_id: [Int!]) {
- __typename
- management(where: { mgm_id: { _in: $management_id } }) {
- mgm_id
- mgm_name
- devices(where: { dev_id: { _in: $device_id } }) {
- dev_id
- dev_name
- }
- }
-}
-
-query filter_dyn($manufacturer_id: [Int!]!, $management_id: [Int!]!) {
- __typename
- stm_dev_typ(where: { dev_typ_id: { _in: $manufacturer_id } }) {
- dev_typ_name
- dev_typ_version
- dev_typ_id
- management(where: { mgm_id: { _in: $management_id } }) {
- mgm_id
- mgm_name
- }
- }
-}
-
-# query returning a flat list of all device_types matching triple filter:
-query filter_dyn_device_type(
- $manufacturer_id: [Int!]
- $management_id: [Int!]
- $device_id: [Int!]
-) {
- stm_dev_typ(
- where: {
- _and: {
- dev_typ_id: { _in: $manufacturer_id }
- devices: { dev_id: { _in: $device_id } }
- management: { mgm_id: { _in: $management_id } }
- }
- }
- ) {
- dev_typ_id
- dev_typ_name
- }
-}
-
-# query returning a flat list of all managements matching triple filter:
-query filter_dyn_management(
- $manufacturer_id: [Int!]
- $management_id: [Int!]
- $device_id: [Int!]
-) {
- management(
- where: {
- _and: {
- mgm_id: { _in: $management_id }
- dev_typ_id: { _in: $manufacturer_id }
- devices: { dev_id: { _in: $device_id } }
- }
- }
- ) {
- mgm_id
- mgm_name
- }
-}
-
-# query returning a flat list of all devices matching triple filter:
-query filter_dyn_device(
- $manufacturer_id: [Int!]
- $management_id: [Int!]
- $device_id: [Int!]
-) {
- device(
- where: {
- _and: {
- mgm_id: { _in: $management_id }
- dev_typ_id: { _in: $manufacturer_id }
- dev_id: { _in: $device_id }
- }
- }
- ) {
- dev_id
- dev_name
- }
-}
-
-#######################
-
-query filter_dyn_device_type_count(
- $manufacturer_id: [Int!]
- $management_id: [Int!]
- $device_id: [Int!]
-) {
- stm_dev_typ_aggregate(
- where: {
- _and: {
- dev_typ_id: { _in: $manufacturer_id }
- devices: { dev_id: { _in: $device_id } }
- management: { mgm_id: { _in: $management_id } }
- }
- }
- ) {
- aggregate {
- count
- }
- }
-}
-
-query filter_dyn_management_count(
- $manufacturer_id: [Int!]
- $management_id: [Int!]
- $device_id: [Int!]
-) {
- management_aggregate(
- where: {
- _and: {
- mgm_id: { _in: $management_id }
- dev_typ_id: { _in: $manufacturer_id }
- devices: { dev_id: { _in: $device_id } }
- }
- }
- ) {
- aggregate {
- count
- }
- }
-}
-
-# query returning the aggregate number of all devices matching triple filter:
-query filter_dyn_device_count(
- $manufacturer_id: [Int!]
- $management_id: [Int!]
- $device_id: [Int!]
-) {
- device_aggregate(
- where: {
- _and: {
- mgm_id: { _in: $management_id }
- dev_typ_id: { _in: $manufacturer_id }
- dev_id: { _in: $device_id }
- }
- }
- ) {
- aggregate {
- count
- }
- }
-}
-
-#######################
-
-# query returning devices matching a query and total count:
-query filterDeviceByType(
- $manufacturer_id: [Int!]
- $management_id: [Int!]
- $device_id: [Int!]
-) {
- stm_dev_typ_aggregate(
- where: {
- _and: {
- dev_typ_id: { _in: $manufacturer_id }
- devices: { dev_id: { _in: $device_id } }
- management: { mgm_id: { _in: $management_id } }
- }
- }
- ) {
- aggregate {
- count
- }
- }
- device(
- where: {
- _and: {
- mgm_id: { _in: $management_id }
- dev_typ_id: { _in: $manufacturer_id }
- dev_id: { _in: $device_id }
- }
- }
- ) {
- dev_id
- dev_name
- }
-}
-
-#######################
-
-# query returning a multi-level structure with all data matching triple filter:
-query filterDevices(
- $manufacturerId: [Int!]
- $managementId: [Int!]
- $deviceId: [Int!]
-) {
- __typename
- stm_dev_typ(where: { dev_typ_id: { _in: $manufacturerId } }) {
- dev_typ_name
- dev_typ_version
- dev_typ_id
- management(where: { mgm_id: { _in: $managementId } }) {
- mgm_id
- mgm_name
- devices(where: { dev_id: { _in: $deviceId } }) {
- dev_id
- dev_name
- }
- }
- }
-}
-
-query ruleFilterFullTextCurrent(
- $managementId: [Int!]
- $deviceId: [Int!]
- $fullText: String!
- $limit: Int
- $offset: Int
-) {
- management(
- where: { mgm_id: { _in: $managementId } }
- order_by: { mgm_name: asc }
- ) {
- mgm_id
- mgm_name
- devices(
- where: { dev_id: { _in: $deviceId } }
- order_by: { dev_name: asc }
- ) {
- dev_id
- dev_name
- }
- rules(
- limit: $limit
- offset: $offset
- where: {
- _and: {
- active: { _eq: true }
- _or: [
- { rule_src: { _ilike: $fullText } }
- { rule_dst: { _ilike: $fullText } }
- { rule_svc: { _ilike: $fullText } }
- ]
- }
- }
- order_by: { rule_num_numeric: asc }
- ) {
- rule_uid
- rule_src
- rule_dst
- rule_svc
- }
- }
-}
-
-query ruleFilterFullTextInTime (
- $managementId: [Int!]
- $deviceId: [Int!]
- $ruleSrcName: [String!]
- $ruleSrcIp: [cidr!]
- $limit: Int
- $offset: Int
- $current: Boolean
- $reportTime: timestamp
-) {
- management(
- where: { mgm_id: { _in: $managementId } }
- order_by: { mgm_name: asc }
- ) {
- mgm_id
- mgm_name
- devices(
- where: { dev_id: { _in: $deviceId } }
- order_by: { dev_name: asc }
- ) {
- dev_id
- dev_name
- rules_aggregate(
- limit: $limit
- offset: $offset
- where: {
- import_control: { stop_time: {_lte: $reportTime } }
- importControlByRuleLastSeen: { stop_time: {_gt: $reportTime } }
- active: { _eq: $current }
- rule_src: { _in: $ruleSrcName }
- rule_disabled: { _eq: false }
- rule_froms: { object: { obj_ip: { _in: $ruleSrcIp } } }
- }
- order_by: { rule_num_numeric: asc }
- ) {
- aggregate {
- count
- }
- }
- rules(
- limit: $limit
- offset: $offset
- where: {
- import_control: { stop_time: {_lte: $reportTime } }
- importControlByRuleLastSeen: { stop_time: {_gt: $reportTime } }
- active: { _eq: $current }
- rule_src: { _in: $ruleSrcName }
- rule_disabled: { _eq: false }
- rule_froms: { object: { obj_ip: { _in: $ruleSrcIp } } }
- }
- order_by: { rule_num_numeric: asc }
- ) {
- rule_uid
- rule_src
- lastSeenImport: importControlByRuleLastSeen {
- stop_time
- control_id
- }
- createImport: import_control {
- stop_time
- control_id
- }
- }
- }
- }
-}
-
-query ruleFilterKVCurrent(
- $managementId: [Int!]
- $deviceId: [Int!]
- $reportTime: timestamp
- $ruleSrcName: [String!]
- $ruleSrcIp: [cidr!]
- $ruleDstName: [String!]
- $ruleDstIp: [cidr!]
- $limit: Int
- $offset: Int
-) {
- management(
- where: { mgm_id: { _in: $managementId } }
- order_by: { mgm_name: asc }
- ) {
- mgm_id
- mgm_name
- devices(
- where: { dev_id: { _in: $deviceId } }
- order_by: { dev_name: asc }
- ) {
- dev_id
- dev_name
- rules_aggregate(
- limit: $limit
- offset: $offset
- where: {
- active: { _eq: true }
- rule_src: { _in: $ruleSrcName }
- rule_disabled: { _eq: false }
- rule_froms: { object: { obj_ip: { _in: $ruleSrcIp } } }
- }
- order_by: { rule_num_numeric: asc }
- ) {
- aggregate {
- count
- }
- }
- rules(
- limit: $limit
- offset: $offset
- where: {
- active: { _eq: true }
- rule_src: { _in: $ruleSrcName }
- rule_disabled: { _eq: false }
- rule_froms: { object: { obj_ip: { _in: $ruleSrcIp } } }
- }
- order_by: { rule_num_numeric: asc }
- ) {
- rule_uid
- rule_src
- lastSeenImport: importControlByRuleLastSeen {
- stop_time
- control_id
- }
- createImport: import_control {
- stop_time
- control_id
- }
- }
- }
- }
-}
-
-
-query ruleFilterKVInTime(
- $managementId: [Int!]
- $deviceId: [Int!]
- $reportTime: timestamp
- $ruleSrcName: [String!]
- $ruleSrcIp: [cidr!]
- $ruleDstName: [String!]
- $ruleDstIp: [cidr!]
- $limit: Int
- $offset: Int
-) {
- management(
- where: { mgm_id: { _in: $managementId } }
- order_by: { mgm_name: asc }
- ) {
- mgm_id
- mgm_name
- devices(
- where: { dev_id: { _in: $deviceId } }
- order_by: { dev_name: asc }
- ) {
- dev_id
- dev_name
- }
- rules(
- limit: $limit
- offset: $offset
- where: {
- import_control: { stop_time: { _lte: $reportTime } }
- importControlByRuleLastSeen: { stop_time: { _gt: $reportTime } }
- rule_disabled: { _eq: false }
- rule_src: { _in: $ruleSrcName }
- rule_froms: { object: { obj_ip: { _in: $ruleSrcIp } } }
- rule_dst: { _in: $ruleDstName }
- rule_tos: { object: { obj_ip: { _in: $ruleDstIp } } }
- }
- order_by: { rule_num_numeric: asc }
- ) {
- rule_uid
- rule_src
- lastSeenImport: importControlByRuleLastSeen {
- stop_time
- control_id
- }
- createImport: import_control {
- stop_time
- control_id
- }
- }
- }
-}
-
-
-query ruleFilterKVInTimeCount(
- $managementId: [Int!]
- $deviceId: [Int!]
- $reportTime: timestamp
- $ruleSrcName: [String!]
- $ruleSrcIp: [cidr!]
- $ruleDstName: [String!]
- $ruleDstIp: [cidr!]
-) {
- management(
- where: { mgm_id: { _in: $managementId } }
- order_by: { mgm_name: asc }
- ) {
- mgm_id
- mgm_name
- devices(
- where: { dev_id: { _in: $deviceId } }
- order_by: { dev_name: asc }
- ) {
- dev_id
- dev_name
- rules_aggregate(
- where: {
- import_control: { stop_time: { _lte: $reportTime } }
- importControlByRuleLastSeen: { stop_time: { _gt: $reportTime } }
- rule_disabled: { _eq: false }
- rule_src: { _in: $ruleSrcName }
- rule_froms: { object: { obj_ip: { _in: $ruleSrcIp } } }
- rule_dst: { _in: $ruleDstName }
- rule_tos: { object: { obj_ip: { _in: $ruleDstIp } } }
- }
- ) {
- aggregate {
- count
- }
- }
- }
- }
-}
-
-query ruleFilterKVInTimeSingleValues(
- $managementId: [Int!]
- $deviceId: [Int!]
- $reportTime: timestamp
- $ruleSrcName1: String
- $ruleSrcName2: String
- $limit: Int
- $offset: Int
-) {
- management(
- where: { mgm_id: { _in: $managementId } }
- order_by: { mgm_name: asc }
- ) {
- mgm_id
- mgm_name
- devices(
- where: { dev_id: { _in: $deviceId } }
- order_by: { dev_name: asc }
- ) {
- dev_id
- dev_name
- }
- rules(
- limit: $limit
- offset: $offset
- where: {
- _and: {
- import_control: { stop_time: { _lte: $reportTime } }
- importControlByRuleLastSeen: { stop_time: { _gt: $reportTime } }
- rule_disabled: { _eq: false }
- _or: [
- { rule_src: { _ilike: $ruleSrcName1 } }
- { rule_src: { _ilike: $ruleSrcName2 } }
- ]
- }
- }
- order_by: { rule_num_numeric: asc }
- ) {
- rule_uid
- rule_src
- lastSeenImport: importControlByRuleLastSeen {
- stop_time
- control_id
- }
- createImport: import_control {
- stop_time
- control_id
- }
- }
- }
-}
-
-# replace rule values with ...ruleOverview
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/auth/_repo.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/auth/_repo.graphql
deleted file mode 100644
index b514c7ea7..000000000
--- a/roles/lib/files/FWO.Api.Client/APIcalls/auth/_repo.graphql
+++ /dev/null
@@ -1,23 +0,0 @@
-
-query getVisibleDevIdsPerTenant($tenant_id: Int!) {
- device(where: { tenant_to_devices: { tenant_id: { _eq: $tenant_id } } }) {
- dev_id
- }
-}
-
-# this does not work:
-# query getVisibleDevIdsFromTenantName($tenant_name: String!) {
-# device(
-# where: {client_to_devices:
-# {
-# tenant_id: {_eq: getTenantId($tenant_name)}}
-# }
-# )
-# { dev_id }
-# }
-
-query tenantCanViewAllDevices($tenant_id: Int!) {
- tenant(where: { tenant_id: { _eq: $tenant_id } }) {
- tenant_can_view_all_devices
- }
-}
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/compliance/addNetworkZone.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/addNetworkZone.graphql
new file mode 100644
index 000000000..8b8193548
--- /dev/null
+++ b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/addNetworkZone.graphql
@@ -0,0 +1,25 @@
+mutation insert_compliance_network_zone ($name: String!, $description: String!, $ip_ranges: [compliance_ip_range_insert_input!]!, $super_network_zone_id: bigint,
+$communication_sources: [compliance_network_zone_communication_insert_input!]!, $communication_destinations: [compliance_network_zone_communication_insert_input!]!,
+$sub_network_zones: [compliance_network_zone_insert_input!]!) {
+ insert_compliance_network_zone_one (
+ object: {
+ super_network_zone_id: $super_network_zone_id,
+ name: $name,
+ description: $description,
+ ip_ranges: {
+ data: $ip_ranges
+ },
+ network_zone_communication_destinations: {
+ data: $communication_destinations
+ },
+ network_zone_communication_sources: {
+ data: $communication_sources
+ },
+ sub_network_zones: {
+ data: $sub_network_zones
+ }
+ }
+ ) {
+ id
+ }
+}
\ No newline at end of file
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/compliance/deleteNetworkZone.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/deleteNetworkZone.graphql
new file mode 100644
index 000000000..7800da5be
--- /dev/null
+++ b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/deleteNetworkZone.graphql
@@ -0,0 +1,7 @@
+mutation delete_compliance_network_zone ($id: bigint!) {
+ delete_compliance_network_zone_by_pk (
+ id: $id
+ ) {
+ id
+ }
+}
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/compliance/getNetworkZones.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/getNetworkZones.graphql
new file mode 100644
index 000000000..cca37df14
--- /dev/null
+++ b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/getNetworkZones.graphql
@@ -0,0 +1,31 @@
+query get_compliance_network_zones {
+ compliance_network_zone (order_by: {name: asc}) {
+ id
+ name
+ description
+ ip_ranges {
+ ip_range_start
+ ip_range_end
+ }
+ super_network_zone {
+ id
+ name
+ }
+ sub_network_zones {
+ id
+ name
+ }
+ network_zone_communication_destinations {
+ to_network_zone {
+ id
+ name
+ }
+ }
+ network_zone_communication_sources {
+ from_network_zone {
+ id
+ name
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/compliance/updateNetworkZone.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/updateNetworkZone.graphql
new file mode 100644
index 000000000..3b25ce7fb
--- /dev/null
+++ b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/updateNetworkZone.graphql
@@ -0,0 +1,68 @@
+mutation update_compliance_network_zone ($network_zone_id: bigint!, $name: String!, $description: String!, $super_network_zone_id: bigint,
+ $add_ip_ranges: [compliance_ip_range_insert_input!]!, $delete_ip_ranges_exp: [compliance_ip_range_bool_exp!]!,
+ $add_zone_communication: [compliance_network_zone_communication_insert_input!]!, $delete_zone_communication_exp: [compliance_network_zone_communication_bool_exp!]!,
+ $add_sub_zones_exp: [compliance_network_zone_bool_exp!]!, $delete_sub_zones_exp: [compliance_network_zone_bool_exp!]!)
+{
+ update_compliance_network_zone (
+ where: {id: {_eq: $network_zone_id}}
+ _set: {
+ name: $name,
+ description: $description,
+ super_network_zone_id: $super_network_zone_id
+ }
+ ) {
+ affected_rows
+ }
+
+ delete_compliance_ip_range (
+ where: {
+ network_zone_id: {_eq: $network_zone_id},
+ _or: $delete_ip_ranges_exp
+ }
+ ) {
+ affected_rows
+ }
+
+ insert_compliance_ip_range (
+ objects: $add_ip_ranges
+ ) {
+ affected_rows
+ }
+
+ delete_compliance_network_zone_communication (
+ where: {
+ _or: $delete_zone_communication_exp
+ }
+ ) {
+ affected_rows
+ }
+
+ insert_compliance_network_zone_communication (
+ objects: $add_zone_communication
+ ) {
+ affected_rows
+ }
+
+ update_compliance_network_zone_many (
+ updates: [
+ {
+ where: {
+ _or: $delete_sub_zones_exp
+ }
+ _set: {
+ super_network_zone_id: null
+ }
+ },
+ {
+ where: {
+ _or: $add_sub_zones_exp
+ }
+ _set: {
+ super_network_zone_id: $network_zone_id
+ }
+ }
+ ]
+ ) {
+ affected_rows
+ }
+}
\ No newline at end of file
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/compliance/updateNetworkZoneCommunication.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/updateNetworkZoneCommunication.graphql
new file mode 100644
index 000000000..54aed3e5f
--- /dev/null
+++ b/roles/lib/files/FWO.Api.Client/APIcalls/compliance/updateNetworkZoneCommunication.graphql
@@ -0,0 +1,18 @@
+mutation update_compliance_network_zone_communication(
+ $delete_zone_communication_exp: [compliance_network_zone_communication_bool_exp!]!,
+ $add_zone_communication: [compliance_network_zone_communication_insert_input!]!,)
+{
+ delete_compliance_network_zone_communication (
+ where: {
+ _or: $delete_zone_communication_exp
+ }
+ ) {
+ affected_rows
+ }
+
+ insert_compliance_network_zone_communication (
+ objects: $add_zone_communication
+ ) {
+ affected_rows
+ }
+}
\ No newline at end of file
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/config/_repo.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/config/_repo.graphql
deleted file mode 100644
index 72f78f671..000000000
--- a/roles/lib/files/FWO.Api.Client/APIcalls/config/_repo.graphql
+++ /dev/null
@@ -1,24 +0,0 @@
-
-# JWT Hash algorithm (needed by API, Middleware, UI)
-
-# default language per user (UI)
-# current strategy: all user specific information is stored in ldap
-# --> should be retrieved via middleware server?
-
-###############################################
-# basic config data related to device import/report
-# the following could be exposed for offering a UI menu for adding new basic config data:
-# currently only read by (UI, Importer)
-
-# stm_
-# action
-# change_type
-# color
-# dev_typ
-# ip_proto
-# nattyp (needed?)
-# obj_typ
-# report_typ
-# svc_typ
-# track
-# usr_typ
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/device/_repo.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/device/_repo.graphql
deleted file mode 100644
index 1ea0bddc0..000000000
--- a/roles/lib/files/FWO.Api.Client/APIcalls/device/_repo.graphql
+++ /dev/null
@@ -1,51 +0,0 @@
-query showManufacturers {
- stm_dev_typ {
- dev_typ_id
- dev_typ_manufacturer
- dev_typ_version
- }
-}
-
-#####################################
-
-query showManagements {
- management {
- mgm_id
- mgm_name
- }
-}
-
-#####################################
-
-query showDevices {
- device {
- dev_id
- dev_name
- local_rulebase_name
- management {
- mgm_id
- mgm_name
- }
- }
-}
-
-query showDevicesWithType {
- device {
- dev_id
- dev_name
- stm_dev_typ {
- dev_typ_name
- dev_typ_version
- }
- }
-}
-
-###################################
-
-query showManufacturers {
- stm_dev_typ(order_by: { dev_typ_manufacturer: asc, dev_typ_version: asc }) {
- dev_typ_id
- dev_typ_manufacturer
- dev_typ_version
- }
-}
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/monitor/getAllUiLogEntrys.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/monitor/getAllUiLogEntrys.graphql
new file mode 100644
index 000000000..cb60dbc27
--- /dev/null
+++ b/roles/lib/files/FWO.Api.Client/APIcalls/monitor/getAllUiLogEntrys.graphql
@@ -0,0 +1,10 @@
+query getAllUiLogEntrys{
+ log_data_issue (where: {source: {_eq: "ui"}} order_by: { data_issue_id: desc }){
+ data_issue_id
+ severity
+ issue_timestamp
+ suspected_cause
+ description
+ user_id
+ }
+}
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/device/getImportStatus.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/monitor/getImportStatus.graphql
similarity index 100%
rename from roles/lib/files/FWO.Api.Client/APIcalls/device/getImportStatus.graphql
rename to roles/lib/files/FWO.Api.Client/APIcalls/monitor/getImportStatus.graphql
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/monitor/getUiLogEntrys.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/monitor/getUiLogEntrys.graphql
index bd17688da..b18a016c6 100644
--- a/roles/lib/files/FWO.Api.Client/APIcalls/monitor/getUiLogEntrys.graphql
+++ b/roles/lib/files/FWO.Api.Client/APIcalls/monitor/getUiLogEntrys.graphql
@@ -5,5 +5,6 @@ query getUiLogEntrys ($user: Int!){
issue_timestamp
suspected_cause
description
+ user_id
}
}
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/networkObject/_repo.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/networkObject/_repo.graphql
deleted file mode 100644
index a8ae2d117..000000000
--- a/roles/lib/files/FWO.Api.Client/APIcalls/networkObject/_repo.graphql
+++ /dev/null
@@ -1,25 +0,0 @@
-
-
-# needs to be exact import id for the specific device, otherwise it might not return desired results
-query listHistoricalObjects($import_id: Int!, $management_id: Int) {
- object_aggregate(
- where: {
- mgm_id: { _eq: $mgmt }
- obj_create: { _lte: $import_id }
- obj_last_seen: { _gte: $import_id }
- }
- ) {
- aggregate {
- count
- }
- }
- object(
- where: {
- mgm_id: { _eq: $mgmt }
- obj_create: { _lte: $import_id }
- obj_last_seen: { _gte: $import_id }
- }
- ) {
- ...networkObjectDetails
- }
-}
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/recertification/fragments/ruleOpenCertOverview.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/recertification/fragments/ruleOpenCertOverview.graphql
index cdd8292e5..e8f07924b 100644
--- a/roles/lib/files/FWO.Api.Client/APIcalls/recertification/fragments/ruleOpenCertOverview.graphql
+++ b/roles/lib/files/FWO.Api.Client/APIcalls/recertification/fragments/ruleOpenCertOverview.graphql
@@ -35,6 +35,15 @@ fragment ruleOpenCertOverview on rule {
name
}
}
+ recert_history: recertifications (where: { owner: $ownerWhere, recert_date: {_is_null: false}}, order_by: { recert_date: desc }) {
+ recert_date
+ recertified
+ user_dn
+ comment
+ owner {
+ name
+ }
+ }
}
rule_src_neg
rule_dst_neg
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/report/getUsageDataCount.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/report/getUsageDataCount.graphql
new file mode 100644
index 000000000..a76006fea
--- /dev/null
+++ b/roles/lib/files/FWO.Api.Client/APIcalls/report/getUsageDataCount.graphql
@@ -0,0 +1,8 @@
+
+query getUsageDataCount($devId: Int) {
+ rule_aggregate(where: {_and: [ {dev_id: {_eq: $devId } }, { rule_metadatum: {rule_last_hit: { _is_null: false } } } ] }) {
+ aggregate {
+ count
+ }
+ }
+}
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/report/subscribeGeneratedReportsChanges.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/report/subscribeGeneratedReportsChanges.graphql
new file mode 100644
index 000000000..14057b001
--- /dev/null
+++ b/roles/lib/files/FWO.Api.Client/APIcalls/report/subscribeGeneratedReportsChanges.graphql
@@ -0,0 +1,16 @@
+subscription subscribeGeneratedReportsChanges {
+ report(order_by:{report_id:desc}) {
+ report_id
+ report_name
+ report_start_time
+ report_end_time
+ report_type
+ description
+ uiuser {
+ uiuser_username
+ }
+ report_template {
+ report_template_name
+ }
+ }
+}
\ No newline at end of file
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/report/subscribeReportScheduleChanges.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/report/subscribeReportScheduleChanges.graphql
index 71931da7a..d16fdd659 100644
--- a/roles/lib/files/FWO.Api.Client/APIcalls/report/subscribeReportScheduleChanges.graphql
+++ b/roles/lib/files/FWO.Api.Client/APIcalls/report/subscribeReportScheduleChanges.graphql
@@ -1,5 +1,5 @@
subscription subscribeReportScheduleChanges {
- report_schedule {
+ report_schedule(order_by: {report_schedule_id: desc}) {
report_schedule_id
report_schedule_name
report_schedule_every
@@ -8,7 +8,6 @@
report_schedule_owner_user: uiuser {
uiuser_id
uiuser_username
- uuid
ldap_connection: ldap_connection {
ldap_connection_id
}
@@ -20,8 +19,9 @@
report_filter
report_parameters
}
- report_schedule_formats{
+ report_schedule_formats {
report_schedule_format_name
}
+ report_schedule_counter
}
-}
\ No newline at end of file
+}
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/rule/_repo.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/rule/_repo.graphql
deleted file mode 100644
index 0990e97df..000000000
--- a/roles/lib/files/FWO.Api.Client/APIcalls/rule/_repo.graphql
+++ /dev/null
@@ -1,120 +0,0 @@
-
-query getSpecificRuleById($ruleId: Int!) {
- rule(where: { rule_id: { _eq: $ruleId } }) {
- ...ruleDetailsForReport
- }
-}
-
-query listRuleChangesOverview(
- $startId: Int
- $stopId: Int
- $devId: Int
- $changeType: bpchar
-) {
- changelog_rule(
- where: {
- _and: [
- { control_id: { _lt: $stopId } }
- { control_id: { _gt: $startId } }
- { security_relevant: { _eq: true } }
- ]
- dev_id: { _eq: $devId }
- change_action: { _eq: $changeType }
- }
- ) {
- change_request_info
- change_time
- changelog_rule_comment
- new_rule_id
- old_rule_id
- unique_name
- dev_id
- change_action
- new_rule: rule {
- ...ruleOverview
- }
- old_rule: ruleByOldRuleId {
- ...ruleOverview
- }
- }
-}
-
-query listRuleChangesDetails(
- $startId: Int
- $stopId: Int
- $devId: Int
- $changeType: bpchar
-) {
- changelog_rule(
- where: {
- _and: [
- { control_id: { _lt: $stopId } }
- { control_id: { _gt: $startId } }
- { security_relevant: { _eq: true } }
- ]
- dev_id: { _eq: $devId }
- change_action: { _eq: $changeType }
- }
- ) {
- dev_id
- change_action
- import_run_details: import_control {
- import_id: control_id
- mgm_id
- is_initial_import
- import_time: stop_time
- }
- rule {
- ...ruleDetailsForReport
- }
- ruleByOldRuleId {
- ...ruleDetailsForReport
- }
- }
-}
-
-
-
-##############################
-## mutations
-##############################
-
-
-mutation updateRuleRuleComment($rule_id: Int!, $new_comment: String!) {
- update_rule(where: {rule_id: {_eq: $rule_id}}, _set: {rule_comment: $new_comment}) {
- affected_rows
- returning {
- rule_id
- rule_comment_post: rule_comment
- }
- }
-}
-
-query filterRulesByTenant($importId: bigint) {
- view_tenant_rules(where: {access_rule: {_eq: true}, rule_last_seen: {_gte: $importId}, rule_create: {_lte: $importId}}) {
- rule_id
- rule_src
- rule_dst
- rule_create
- rule_last_seen
- tenant_id
- }
-}
-
-query filterRulesByTenantWithoutAnyRulesWithCount($importId: bigint) {
- view_tenant_rules_aggregate
- (where: {access_rule: {_eq: true}, rule_last_seen: {_gte: $importId}, rule_create: {_lte: $importId}, _and: [{rule_src: {_neq: "all"}}, {rule_dst: {_neq: "all"}}, {rule_src: {_neq: "Any"}}, {rule_dst: {_neq: "Any"}}]})
- {
- aggregate {
- count
- }
- }
- view_tenant_rules(where: {access_rule: {_eq: true}, rule_last_seen: {_gte: $importId}, rule_create: {_lte: $importId}, _and: [{rule_src: {_neq: "all"}}, {rule_dst: {_neq: "all"}}, {rule_src: {_neq: "Any"}}, {rule_dst: {_neq: "Any"}}]}) {
- rule_id
- rule_src
- rule_dst
- rule_create
- rule_last_seen
- tenant_id
- }
-}
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleDetails.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleDetails.graphql
index d090fe986..33b764bf5 100644
--- a/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleDetails.graphql
+++ b/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleDetails.graphql
@@ -1,6 +1,7 @@
fragment ruleDetails on rule {
rule_id
rule_uid
+ dev_id
rule_action
section_header: rule_head_text
rule_comment
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleDetailsForReport.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleDetailsForReport.graphql
index c4116d8a7..8ffa21369 100644
--- a/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleDetailsForReport.graphql
+++ b/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleDetailsForReport.graphql
@@ -1,6 +1,7 @@
fragment ruleDetails on rule {
rule_id
rule_uid
+ dev_id
rule_action
section_header: rule_head_text
rule_comment
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleOverview.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleOverview.graphql
index 89f810f42..5042df8cf 100644
--- a/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleOverview.graphql
+++ b/roles/lib/files/FWO.Api.Client/APIcalls/rule/fragments/ruleOverview.graphql
@@ -1,6 +1,7 @@
fragment ruleOverview on rule {
rule_id
rule_uid
+ dev_id
rule_action
section_header: rule_head_text
rule_comment
diff --git a/roles/lib/files/FWO.Api.Client/APIcalls/user/_repo.graphql b/roles/lib/files/FWO.Api.Client/APIcalls/user/_repo.graphql
deleted file mode 100644
index 0cebcc73d..000000000
--- a/roles/lib/files/FWO.Api.Client/APIcalls/user/_repo.graphql
+++ /dev/null
@@ -1,35 +0,0 @@
-fragment userDetails on usr {
- user_id
- user_uid
- user_name
- user_comment
- user_lastname
- user_firstname
- usr_typ_id
- stm_usr_typ {
- usr_typ_name
- }
- user_member_names
- user_member_refs
-}
-
-query listUsers(
- $management_id: [Int!]
- $time: String
- $user_name: [String!]
- $limit: Int
- $offset: Int
-) {
- management(where: { mgm_id: { _in: $management_id } }) {
- mgm_id
- mgm_name
- usrs(
- limit: $limit
- offset: $offset
- where: { active: { _eq: true }, user_name: { _in: $user_name } }
- order_by: { user_name: asc }
- ) {
- ...userDetails
- }
- }
-}
diff --git a/roles/lib/files/FWO.Api.Client/ApiSubscription.cs b/roles/lib/files/FWO.Api.Client/ApiSubscription.cs
index d3b6eda6f..b8f964771 100644
--- a/roles/lib/files/FWO.Api.Client/ApiSubscription.cs
+++ b/roles/lib/files/FWO.Api.Client/ApiSubscription.cs
@@ -1,108 +1,29 @@
-using GraphQL;
-using System;
+using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
-using System.Text.Json;
using System.Threading.Tasks;
-using FWO.Api.Client;
-using Newtonsoft.Json.Linq;
-using FWO.Logging;
-using GraphQL.Client.Abstractions;
-using GraphQL.Client.Http;
namespace FWO.Api.Client
{
- public class ApiSubscription : IDisposable
+ public abstract class ApiSubscription : IDisposable
{
- public delegate void SubscriptionUpdate(SubscriptionResponseType reponse);
- public event SubscriptionUpdate OnUpdate;
+ private bool disposed = false;
- private IObservable> subscriptionStream;
- private IDisposable subscription;
- private readonly GraphQLHttpClient graphQlClient;
- private readonly GraphQLRequest request;
- private readonly Action internalExceptionHandler;
+ protected abstract void Dispose(bool disposing);
- public ApiSubscription(ApiConnection apiConnection, GraphQLHttpClient graphQlClient, GraphQLRequest request, Action exceptionHandler, SubscriptionUpdate OnUpdate)
- {
- this.OnUpdate = OnUpdate;
- this.graphQlClient = graphQlClient;
- this.request = request;
-
- // handle subscription terminating exceptions
- internalExceptionHandler = (Exception exception) =>
- {
- // Case: Jwt expired
- if (exception.Message.Contains("JWTExpired"))
- {
- // Quit subscription by throwing exception.
- // This does NOT lead to a real thrown exception within the application but is instead handled by the graphql library
- throw exception;
- }
- exceptionHandler(exception);
- };
-
- CreateSubscription();
-
- apiConnection.OnAuthHeaderChanged += ApiConnectionOnAuthHeaderChanged;
- }
-
- private void CreateSubscription()
- {
- Log.WriteDebug("API", $"Creating API subscription {request.OperationName}.");
- subscriptionStream = graphQlClient.CreateSubscriptionStream(request, internalExceptionHandler);
- Log.WriteDebug("API", "API subscription created.");
-
- subscription = subscriptionStream.Subscribe(response =>
- {
- if (ApiConstants.UseSystemTextJsonSerializer)
- {
- JsonElement.ObjectEnumerator responseObjectEnumerator = response.Data.EnumerateObject();
- responseObjectEnumerator.MoveNext();
- SubscriptionResponseType returnValue = JsonSerializer.Deserialize(responseObjectEnumerator.Current.Value.GetRawText()) ??
- throw new Exception($"Could not convert result from Json to {nameof(SubscriptionResponseType)}.\nJson: {responseObjectEnumerator.Current.Value.GetRawText()}"); ;
- OnUpdate(returnValue);
- }
- else
- {
- try
- {
- // If repsonse.Data == null -> Jwt expired - connection was closed
- // Leads to this method getting called again
- if (response.Data == null)
- {
- // Terminate subscription
- subscription.Dispose();
- }
- else
- {
- JObject data = (JObject)response.Data;
- JProperty prop = (JProperty)(data.First ?? throw new Exception($"Could not retrieve unique result attribute from Json.\nJson: {response.Data}"));
- JToken result = prop.Value;
- SubscriptionResponseType returnValue = result.ToObject() ?? throw new Exception($"Could not convert result from Json to {typeof(SubscriptionResponseType)}.\nJson: {response.Data}");
- OnUpdate(returnValue);
- }
- }
- catch (Exception ex)
- {
- Log.WriteError("GraphQL Subscription", "Subscription lead to exception", ex);
- throw;
- }
- }
- });
- }
-
- private void ApiConnectionOnAuthHeaderChanged(object? sender, string jwt)
+ public void Dispose()
{
- subscription.Dispose();
- CreateSubscription();
+ if (disposed) return;
+ Dispose(true);
+ disposed = true;
+ GC.SuppressFinalize(this);
}
- public void Dispose()
+ ~ ApiSubscription()
{
- subscription.Dispose();
- GC.SuppressFinalize(this);
+ if (disposed) return;
+ Dispose(false);
}
}
}
diff --git a/roles/lib/files/FWO.Api.Client/Data/ComplianceNetworkZone.cs b/roles/lib/files/FWO.Api.Client/Data/ComplianceNetworkZone.cs
new file mode 100644
index 000000000..fe825434d
--- /dev/null
+++ b/roles/lib/files/FWO.Api.Client/Data/ComplianceNetworkZone.cs
@@ -0,0 +1,181 @@
+using FWO.Api.Client;
+using NetTools;
+using Newtonsoft.Json;
+using System.Net;
+using System.Text.Json.Serialization;
+
+namespace FWO.Api.Data
+{
+ public class ComplianceNetworkZone
+ {
+ [JsonProperty("id"), JsonPropertyName("id")]
+ public int Id { get; set; } = -1;
+
+ [JsonProperty("name"), JsonPropertyName("name")]
+ public string Name { get; set; } = "";
+
+ [JsonProperty("description"), JsonPropertyName("description")]
+ public string Description { get; set; } = "";
+
+ [JsonProperty("ip_ranges", ItemConverterType = typeof(IpAddressRangeJsonTypeConverter)), JsonPropertyName("ip_ranges")]
+ public IPAddressRange[] IPRanges { get; set; } = new IPAddressRange[0];
+
+ [JsonProperty("super_network_zone"), JsonPropertyName("super_network_zone")]
+ public ComplianceNetworkZone? Superzone { get; set; } = null;
+
+ [JsonProperty("sub_network_zones"), JsonPropertyName("sub_network_zones")]
+ public ComplianceNetworkZone[] Subzones { get; set; } = new ComplianceNetworkZone[0];
+
+ [JsonProperty("network_zone_communication_sources", ItemConverterType = typeof(WrapperConverter),
+ ItemConverterParameters = new object[] { "from_network_zone" }), JsonPropertyName("network_zone_communication_sources")]
+ public ComplianceNetworkZone[] AllowedCommunicationSources { get; set; } = new ComplianceNetworkZone[0];
+
+ [JsonProperty("network_zone_communication_destinations", ItemConverterType = typeof(WrapperConverter),
+ ItemConverterParameters = new object[] { "to_network_zone" }), JsonPropertyName("network_zone_communication_destinations")]
+ public ComplianceNetworkZone[] AllowedCommunicationDestinations { get; set; } = new ComplianceNetworkZone[0];
+
+
+ public bool CommunicationAllowedFrom(ComplianceNetworkZone from)
+ {
+ return AllowedCommunicationSources.Contains(from);
+ }
+
+ public bool CommunicationAllowedTo(ComplianceNetworkZone to)
+ {
+ return AllowedCommunicationDestinations.Contains(to);
+ }
+
+ public bool OverlapExists(List ipRanges, List> unseenIpRanges)
+ {
+ bool result = false;
+
+ for (int i = 0; i < IPRanges.Length; i++)
+ {
+ for (int j = 0; j < ipRanges.Count; j++)
+ {
+ if (OverlapExists(IPRanges[i], ipRanges[j]))
+ {
+ result = true;
+ RemoveOverlap(unseenIpRanges[j], IPRanges[i]);
+ }
+ }
+ }
+ return result;
+ }
+
+ ///
+ /// Checks if IP range a and b overlap.
+ ///
+ /// First IP range
+ /// Second IP range
+ /// True, if IP ranges overlap, false otherwise.
+ private bool OverlapExists(IPAddressRange a, IPAddressRange b)
+ {
+ return IpToUint(a.Begin) <= IpToUint(b.End) && IpToUint(b.Begin) <= IpToUint(a.End);
+ }
+
+ private void RemoveOverlap(List ranges, IPAddressRange toRemove)
+ {
+ for (int i = 0; i < ranges.Count; i++)
+ {
+ if (OverlapExists(ranges[i], toRemove))
+ {
+ if (IpToUint(toRemove.Begin) <= IpToUint(ranges[i].Begin) && IpToUint(toRemove.End) >= IpToUint(ranges[i].End))
+ {
+ // Complete overlap, remove the entire range
+ ranges.RemoveAt(i);
+ i--;
+ }
+ else if (IpToUint(toRemove.Begin) <= IpToUint(ranges[i].Begin))
+ {
+ // Overlap on the left side, update the start
+ ranges[i].Begin = UintToIp(IpToUint(toRemove.End) + 1);
+ }
+ else if (IpToUint(toRemove.End) >= IpToUint(ranges[i].End))
+ {
+ // Overlap on the right side, update the end
+ ranges[i].End = UintToIp(IpToUint(toRemove.Begin) - 1);
+ }
+ else
+ {
+ // Overlap in the middle, split the range
+ // begin..remove.begin-1
+ IPAddress end = ranges[i].End;
+ ranges[i].End = UintToIp(IpToUint(toRemove.Begin) - 1);
+ // remove.end+1..end
+ ranges.Insert(i, new IPAddressRange(UintToIp(IpToUint(toRemove.End) + 1), end));
+ i++;
+ }
+ }
+ }
+ }
+
+ private uint IpToUint(IPAddress ipAddress)
+ {
+ byte[] bytes = ipAddress.GetAddressBytes();
+
+ // flip big-endian(network order) to little-endian
+ if (BitConverter.IsLittleEndian)
+ {
+ Array.Reverse(bytes);
+ }
+
+ return BitConverter.ToUInt32(bytes, 0);
+ }
+
+ private IPAddress UintToIp(uint ipAddress)
+ {
+ byte[] bytes = BitConverter.GetBytes(ipAddress);
+
+ // flip big-endian(network order) to little-endian
+ if (BitConverter.IsLittleEndian)
+ {
+ Array.Reverse(bytes);
+ }
+
+ return new IPAddress(bytes);
+ }
+
+ public object Clone()
+ {
+ IPAddressRange[] ipRangesClone = new IPAddressRange[IPRanges.Length];
+ for (int i = 0; i < IPRanges.Length; i++)
+ {
+ ipRangesClone[i] = new IPAddressRange(IPRanges[i].Begin, IPRanges[i].End);
+ }
+
+ return new ComplianceNetworkZone()
+ {
+ Id = Id,
+ Superzone = (ComplianceNetworkZone?)Superzone?.Clone(),
+ Name = Name,
+ Description = Description,
+ IPRanges = ipRangesClone,
+ Subzones = CloneArray(Subzones),
+ AllowedCommunicationSources = CloneArray(AllowedCommunicationSources),
+ AllowedCommunicationDestinations = CloneArray(AllowedCommunicationDestinations)
+ };
+ }
+
+ private static ComplianceNetworkZone[] CloneArray(ComplianceNetworkZone[] array)
+ {
+ ComplianceNetworkZone[] arrayClone = new ComplianceNetworkZone[array.Length];
+ for (int i = 0; i < array.Length; i++)
+ {
+ arrayClone[i] = (ComplianceNetworkZone)array[i].Clone();
+ }
+ return arrayClone;
+ }
+
+ public override bool Equals(object? obj)
+ {
+ if (obj == null) return false;
+ return ((ComplianceNetworkZone)obj).Id == Id;
+ }
+
+ public override int GetHashCode()
+ {
+ return HashCode.Combine(Id);
+ }
+ }
+}
diff --git a/roles/lib/files/FWO.Api.Client/Data/DeviceFilter.cs b/roles/lib/files/FWO.Api.Client/Data/DeviceFilter.cs
index de33eee3a..3bfbd4b4c 100644
--- a/roles/lib/files/FWO.Api.Client/Data/DeviceFilter.cs
+++ b/roles/lib/files/FWO.Api.Client/Data/DeviceFilter.cs
@@ -41,6 +41,11 @@ public class DeviceFilter
public DeviceFilter()
{}
+ public DeviceFilter(DeviceFilter devFilter)
+ {
+ Managements = devFilter.Managements;
+ }
+
public DeviceFilter(List devIds)
{
ManagementSelect dummyManagement = new ManagementSelect();
diff --git a/roles/lib/files/FWO.Api.Client/Data/DeviceType.cs b/roles/lib/files/FWO.Api.Client/Data/DeviceType.cs
index 3f29d0d6c..ad221bab7 100644
--- a/roles/lib/files/FWO.Api.Client/Data/DeviceType.cs
+++ b/roles/lib/files/FWO.Api.Client/Data/DeviceType.cs
@@ -5,8 +5,6 @@
using Newtonsoft.Json;
namespace FWO.Api.Data
{
- [Newtonsoft.Json.JsonConverter(typeof(NoTypeConverterJsonConverter))]
- [TypeConverter(typeof(JsonStringConverter))]
public class DeviceType
{
[JsonProperty("id"), JsonPropertyName("id")]
@@ -22,15 +20,12 @@ public class DeviceType
public string Manufacturer { get; set; } = "";
[JsonProperty("isPureRoutingDevice"), JsonPropertyName("isPureRoutingDevice")]
- public Boolean IsPureRoutingDevice { get; set; }
+ public bool IsPureRoutingDevice { get; set; }
[JsonProperty("isManagement"), JsonPropertyName("isManagement")]
- public Boolean IsManagement { get; set; }
+ public bool IsManagement { get; set; }
- // [JsonProperty("predefinedObjects"), JsonPropertyName("predefinedObjects")]
- // public ??? PredefinedObjects { get; set; }
-
- public static List LegacyDevTypeList = new List
+ private static List LegacyDevTypeList = new List
{
2, // Netscreen 5.x-6.x
4, // FortiGateStandalone 5ff
@@ -40,13 +35,13 @@ public class DeviceType
8 // JUNOS 10-21
};
- public static Dictionary SupermanagerMap = new Dictionary
+ private static Dictionary SupermanagerMap = new Dictionary
{
// Mgmt -> Supermgmt
{ 11, 12 }, // FortiADOM 5ff -> FortiManager 5ff
{ 9, 13 } // Check Point R8x -> Check Point MDS R8x
};
- public static Dictionary SupermanagerGatewayMap = new Dictionary
+ private static Dictionary SupermanagerGatewayMap = new Dictionary
{
// Supermgmt -> Gateway
{ 12, 10}, // FortiManager 5ff-> FortiGate 5ff
@@ -55,16 +50,17 @@ public class DeviceType
{ 14, 16} // Cisco Firepower
};
- public static List CheckPointManagers = new List
+ private static List CheckPointManagers = new List
{
13, 9 // Check Point MDS R8x and Check Point R8x
};
- public static List FortiManagers = new List
+ private static List FortiManagers = new List
{
12 // FortiManager 5ff
};
+
public DeviceType()
{}
diff --git a/roles/lib/files/FWO.Api.Client/Data/NetworkZone.cs b/roles/lib/files/FWO.Api.Client/Data/NetworkZone.cs
index d1dbdf2b9..aeb0c16b1 100644
--- a/roles/lib/files/FWO.Api.Client/Data/NetworkZone.cs
+++ b/roles/lib/files/FWO.Api.Client/Data/NetworkZone.cs
@@ -1,4 +1,6 @@
-using System.Text.Json.Serialization;
+using System.Net;
+using System.Text.Json.Serialization;
+using NetTools;
using Newtonsoft.Json;
namespace FWO.Api.Data
@@ -10,5 +12,6 @@ public class NetworkZone
[JsonProperty("zone_name"), JsonPropertyName("zone_name")]
public string Name { get; set; } = "";
+
}
}
diff --git a/roles/lib/files/FWO.Api.Client/Data/Recertification.cs b/roles/lib/files/FWO.Api.Client/Data/Recertification.cs
index 133cc5531..d10fa1576 100644
--- a/roles/lib/files/FWO.Api.Client/Data/Recertification.cs
+++ b/roles/lib/files/FWO.Api.Client/Data/Recertification.cs
@@ -7,6 +7,8 @@ public class Recertification : RecertificationBase
{
[JsonProperty("owner"), JsonPropertyName("owner")]
public FwoOwner? FwoOwner { get; set; } = new FwoOwner();
- }
+ [JsonProperty("user_dn"), JsonPropertyName("user_dn")]
+ public string UserDn { get; set; } = "";
+ }
}
diff --git a/roles/lib/files/FWO.Api.Client/Data/ReportTemplate.cs b/roles/lib/files/FWO.Api.Client/Data/ReportTemplate.cs
index 70bd4446b..a3d2476e1 100644
--- a/roles/lib/files/FWO.Api.Client/Data/ReportTemplate.cs
+++ b/roles/lib/files/FWO.Api.Client/Data/ReportTemplate.cs
@@ -32,13 +32,14 @@ public class ReportTemplate
public ReportTemplate()
{}
- public ReportTemplate(string filter, DeviceFilter deviceFilter, int? reportType, TimeFilter timeFilter, RecertFilter recertFilter)
+ public ReportTemplate(string filter, DeviceFilter deviceFilter, int? reportType, TimeFilter timeFilter, RecertFilter recertFilter, UnusedFilter? unusedFilter)
{
Filter = filter;
ReportParams.DeviceFilter = deviceFilter;
ReportParams.ReportType = reportType;
ReportParams.TimeFilter = timeFilter;
ReportParams.RecertFilter = recertFilter;
+ ReportParams.UnusedFilter = unusedFilter ?? new UnusedFilter();
Detailed = false;
}
@@ -64,5 +65,9 @@ public class ReportParams
[JsonProperty("recert_filter"), JsonPropertyName("recert_filter")]
public RecertFilter RecertFilter { get; set; } = new RecertFilter();
+
+ [JsonProperty("unused_filter"), JsonPropertyName("unused_filter")]
+ public UnusedFilter UnusedFilter { get; set; } = new UnusedFilter();
+
}
}
diff --git a/roles/lib/files/FWO.Api.Client/Data/Rule.cs b/roles/lib/files/FWO.Api.Client/Data/Rule.cs
index 773e67690..9d69e7383 100644
--- a/roles/lib/files/FWO.Api.Client/Data/Rule.cs
+++ b/roles/lib/files/FWO.Api.Client/Data/Rule.cs
@@ -83,10 +83,12 @@ public class Rule
[JsonProperty("matches"), JsonPropertyName("matches")]
public string IpMatch {get; set;} = "";
+ [JsonProperty("dev_id"), JsonPropertyName("dev_id")]
+ public int DeviceId { get; set; }
+
public int DisplayOrderNumber { get; set; }
public bool Certified { get; set; }
- public int DeviceId { get; set; }
public string DeviceName { get; set; } = "";
}
diff --git a/roles/lib/files/FWO.Api.Client/Data/RuleMetadata.cs b/roles/lib/files/FWO.Api.Client/Data/RuleMetadata.cs
index c906a0e37..a3b7a32f7 100644
--- a/roles/lib/files/FWO.Api.Client/Data/RuleMetadata.cs
+++ b/roles/lib/files/FWO.Api.Client/Data/RuleMetadata.cs
@@ -38,6 +38,9 @@ public class RuleMetadata
[JsonProperty("recertification"), JsonPropertyName("recertification")]
public List RuleRecertification { get; set; } = new List();
+ [JsonProperty("recert_history"), JsonPropertyName("recert_history")]
+ public List RecertHistory { get; set; } = new List();
+
public DateTime NextRecert { get; set; }
public string LastCertifierName { get; set; } = "";
diff --git a/roles/lib/files/FWO.Api.Client/Data/UiUser.cs b/roles/lib/files/FWO.Api.Client/Data/UiUser.cs
index e3889fb57..872b4d33f 100644
--- a/roles/lib/files/FWO.Api.Client/Data/UiUser.cs
+++ b/roles/lib/files/FWO.Api.Client/Data/UiUser.cs
@@ -38,8 +38,6 @@ public class UiUser
[JsonProperty("ldap_connection"), JsonPropertyName("ldap_connection")]
public UiLdapConnection LdapConnection { get; set;} = new UiLdapConnection();
- public string DefaultRole { get; set; } = "";
-
public List Roles { get; set; } = new List();
public string Jwt { get; set; } = "";
diff --git a/roles/lib/files/FWO.Api.Client/Data/UnusedFilter.cs b/roles/lib/files/FWO.Api.Client/Data/UnusedFilter.cs
new file mode 100644
index 000000000..83d65ed9b
--- /dev/null
+++ b/roles/lib/files/FWO.Api.Client/Data/UnusedFilter.cs
@@ -0,0 +1,8 @@
+namespace FWO.Api.Data
+{
+ public class UnusedFilter
+ {
+ public int UnusedForDays = int.MaxValue;
+ public int CreationTolerance = 0;
+ }
+}
diff --git a/roles/lib/files/FWO.Api.Client/GraphQlApiConnection.cs b/roles/lib/files/FWO.Api.Client/GraphQlApiConnection.cs
index 1a4dd93e5..c8685a538 100644
--- a/roles/lib/files/FWO.Api.Client/GraphQlApiConnection.cs
+++ b/roles/lib/files/FWO.Api.Client/GraphQlApiConnection.cs
@@ -23,6 +23,7 @@ public class GraphQlApiConnection : ApiConnection
private GraphQLHttpClient graphQlClient;
private string? jwt;
+ private string prevRole = "";
private void Initialize(string ApiServerUri)
{
@@ -72,6 +73,39 @@ public override void SetRole(string role)
graphQlClient.HttpClient.DefaultRequestHeaders.Add("x-hasura-role", role);
}
+ public override void SetProperRole(System.Security.Claims.ClaimsPrincipal user, List targetRoleList)
+ {
+ try
+ {
+ prevRole = graphQlClient.HttpClient.DefaultRequestHeaders.GetValues("x-hasura-role")?.First() ?? "";
+ }
+ catch(Exception){}
+
+ // first look if user is already in one of the target roles
+ foreach(string role in targetRoleList)
+ {
+ if (user.IsInRole(role))
+ {
+ SetRole(role);
+ return;
+ }
+ }
+ // now look if user has a target role as allowed role
+ foreach(string role in targetRoleList)
+ {
+ if(user.Claims.FirstOrDefault(claim => claim.Type == "x-hasura-allowed-roles" && claim.Value == role) != null)
+ {
+ SetRole(role);
+ return;
+ }
+ }
+ }
+
+ public override void SwitchBack()
+ {
+ SetRole(prevRole);
+ }
+
///
/// Sends an APICall (query, mutation)
/// NB: SendQueryAsync always returns an array of objects (even if the result is a single element)
@@ -141,12 +175,12 @@ public override async Task SendQueryAsync(
}
}
- public override ApiSubscription GetSubscription(Action exceptionHandler, ApiSubscription.SubscriptionUpdate subscriptionUpdateHandler, string subscription, object? variables = null, string? operationName = null)
+ public override GraphQlApiSubscription GetSubscription(Action exceptionHandler, GraphQlApiSubscription.SubscriptionUpdate subscriptionUpdateHandler, string subscription, object? variables = null, string? operationName = null)
{
try
{
GraphQLRequest request = new GraphQLRequest(subscription, variables, operationName);
- return new ApiSubscription(this, graphQlClient, request, exceptionHandler, subscriptionUpdateHandler);
+ return new GraphQlApiSubscription(this, graphQlClient, request, exceptionHandler, subscriptionUpdateHandler);
}
catch (Exception exception)
{
@@ -154,5 +188,13 @@ public override ApiSubscription GetSubscription : ApiSubscription, IDisposable
+ {
+ public delegate void SubscriptionUpdate(SubscriptionResponseType reponse);
+ public event SubscriptionUpdate OnUpdate;
+
+ private IObservable> subscriptionStream;
+ private IDisposable subscription;
+ private readonly GraphQLHttpClient graphQlClient;
+ private readonly GraphQLRequest request;
+ private readonly Action internalExceptionHandler;
+
+ public GraphQlApiSubscription(ApiConnection apiConnection, GraphQLHttpClient graphQlClient, GraphQLRequest request, Action exceptionHandler, SubscriptionUpdate OnUpdate)
+ {
+ this.OnUpdate = OnUpdate;
+ this.graphQlClient = graphQlClient;
+ this.request = request;
+
+ // handle subscription terminating exceptions
+ internalExceptionHandler = (Exception exception) =>
+ {
+ // Case: Jwt expired
+ if (exception.Message.Contains("JWTExpired"))
+ {
+ // Quit subscription by throwing exception.
+ // This does NOT lead to a real thrown exception within the application but is instead handled by the graphql library
+ throw exception;
+ }
+ exceptionHandler(exception);
+ };
+
+ CreateSubscription();
+
+ apiConnection.OnAuthHeaderChanged += ApiConnectionOnAuthHeaderChanged;
+ }
+
+ private void CreateSubscription()
+ {
+ Log.WriteDebug("API", $"Creating API subscription {request.OperationName}.");
+ subscriptionStream = graphQlClient.CreateSubscriptionStream(request, internalExceptionHandler);
+ Log.WriteDebug("API", "API subscription created.");
+
+ subscription = subscriptionStream.Subscribe(response =>
+ {
+ if (ApiConstants.UseSystemTextJsonSerializer)
+ {
+ JsonElement.ObjectEnumerator responseObjectEnumerator = response.Data.EnumerateObject();
+ responseObjectEnumerator.MoveNext();
+ SubscriptionResponseType returnValue = JsonSerializer.Deserialize(responseObjectEnumerator.Current.Value.GetRawText()) ??
+ throw new Exception($"Could not convert result from Json to {nameof(SubscriptionResponseType)}.\nJson: {responseObjectEnumerator.Current.Value.GetRawText()}"); ;
+ OnUpdate(returnValue);
+ }
+ else
+ {
+ try
+ {
+ // If repsonse.Data == null -> Jwt expired - connection was closed
+ // Leads to this method getting called again
+ if (response.Data == null)
+ {
+ // Terminate subscription
+ subscription.Dispose();
+ }
+ else
+ {
+ JObject data = (JObject)response.Data;
+ JProperty prop = (JProperty)(data.First ?? throw new Exception($"Could not retrieve unique result attribute from Json.\nJson: {response.Data}"));
+ JToken result = prop.Value;
+ SubscriptionResponseType returnValue = result.ToObject() ?? throw new Exception($"Could not convert result from Json to {typeof(SubscriptionResponseType)}.\nJson: {response.Data}");
+ OnUpdate(returnValue);
+ }
+ }
+ catch (Exception ex)
+ {
+ Log.WriteError("GraphQL Subscription", "Subscription lead to exception", ex);
+ throw;
+ }
+ }
+ });
+ }
+
+ private void ApiConnectionOnAuthHeaderChanged(object? sender, string jwt)
+ {
+ subscription.Dispose();
+ CreateSubscription();
+ }
+
+ protected override void Dispose(bool disposing)
+ {
+ if (disposing)
+ {
+ subscription.Dispose();
+ }
+ }
+ }
+}
diff --git a/roles/lib/files/FWO.Api.Client/JsonCustomConverters.cs b/roles/lib/files/FWO.Api.Client/JsonCustomConverters.cs
new file mode 100644
index 000000000..06dd7624f
--- /dev/null
+++ b/roles/lib/files/FWO.Api.Client/JsonCustomConverters.cs
@@ -0,0 +1,82 @@
+using NetTools;
+using Newtonsoft.Json;
+using Newtonsoft.Json.Linq;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Linq;
+using System.Net;
+using System.Text;
+using System.Text.Json.Nodes;
+using System.Threading.Tasks;
+
+namespace FWO.Api.Client
+{
+ public class WrapperConverter : JsonConverter
+ {
+ private readonly string wrappedObjectName = "";
+
+ public WrapperConverter(string wrappedObjectName)
+ {
+ this.wrappedObjectName = wrappedObjectName;
+ }
+
+ public override bool CanConvert(Type objectType) => typeof(ValueType).IsAssignableFrom(objectType);
+
+ public override object? ReadJson(JsonReader reader, Type objectType, object? existingValue, JsonSerializer serializer)
+ {
+ // Load the JSON as a JObject
+ JObject jsonObject = JObject.Load(reader);
+
+ // Check if the "wrappedObjectName" property exists
+ if (jsonObject.TryGetValue(wrappedObjectName, out JToken? wrappedObjectToken))
+ {
+ // Deserialize the wrapped object
+ return wrappedObjectToken.ToObject(serializer);
+ }
+
+ // Deserialize the wrapper object otherwise
+ return jsonObject.ToObject(serializer);
+ }
+
+ public override void WriteJson(JsonWriter writer, object? value, JsonSerializer serializer)
+ {
+ // Wrap the object with a property named "wrappedObjectName"
+ JObject jsonObject = new JObject
+ {
+ { wrappedObjectName, value == null ? null : JToken.FromObject(value, serializer) }
+ };
+
+ // Write the JSON
+ jsonObject.WriteTo(writer);
+ }
+ }
+
+ public class IpAddressRangeJsonTypeConverter : JsonConverter
+ {
+ public override IPAddressRange ReadJson(JsonReader reader, Type objectType, IPAddressRange? existingValue, bool hasExistingValue, JsonSerializer serializer)
+ {
+ // Load the JSON as a JObject
+ JObject jsonObject = JObject.Load(reader);
+ // Deserialize the IP address range based on the properties ip_range_start and ip_range_end
+ IPAddress start = IPAddress.Parse((jsonObject.GetValue("ip_range_start")?.ToObject() ?? throw new ArgumentNullException("ip_range_start")).Replace("/32", ""));
+ IPAddress end = IPAddress.Parse((jsonObject.GetValue("ip_range_end")?.ToObject() ?? throw new ArgumentNullException("ip_range_start")).Replace("/32", ""));
+ return new IPAddressRange(start, end);
+ }
+
+ public override void WriteJson(JsonWriter writer, IPAddressRange? value, JsonSerializer serializer)
+ {
+ if (value != null)
+ {
+ // Create a JSON JObject
+ JObject result = new JObject
+ {
+ { "ip_range_start", value.Begin.ToString() },
+ { "ip_range_end", value.Begin.ToString() }
+ };
+
+ result.WriteTo(writer);
+ }
+ }
+ }
+}
diff --git a/roles/lib/files/FWO.Api.Client/JsonStringConverter.cs b/roles/lib/files/FWO.Api.Client/JsonStringConverter.cs
deleted file mode 100644
index 06d64253c..000000000
--- a/roles/lib/files/FWO.Api.Client/JsonStringConverter.cs
+++ /dev/null
@@ -1,79 +0,0 @@
-using FWO.Api.Data;
-using Newtonsoft.Json;
-using Newtonsoft.Json.Serialization;
-using System;
-using System.Collections.Generic;
-using System.ComponentModel;
-using System.Globalization;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-
-namespace FWO.Api.Client
-{
- public class JsonStringConverter : TypeConverter
- {
- public override bool CanConvertFrom(ITypeDescriptorContext? context, Type sourceType)
- {
- return sourceType == typeof(string) || base.CanConvertFrom(context, sourceType);
- }
-
- public override object? ConvertFrom(ITypeDescriptorContext? context, CultureInfo? culture, object value)
- {
- if (value is string stringValue)
- {
- return JsonConvert.DeserializeObject(stringValue);
- }
- else
- {
- return base.ConvertFrom(context, culture, value);
- }
- }
-
- public override object? ConvertTo(ITypeDescriptorContext? context, CultureInfo? culture, object? value, Type destinationType)
- {
- if (destinationType == null || destinationType == typeof(string))
- {
- return JsonConvert.SerializeObject(value);
- }
- else
- {
- return base.ConvertTo(context, culture, value, destinationType);
- }
- }
- }
-
- public class NoTypeConverterJsonConverter : JsonConverter
- {
- static readonly IContractResolver resolver = new NoTypeConverterContractResolver();
-
- class NoTypeConverterContractResolver : DefaultContractResolver
- {
- protected override JsonContract CreateContract(Type objectType)
- {
- if (typeof(T).IsAssignableFrom(objectType))
- {
- var contract = this.CreateObjectContract(objectType);
- contract.Converter = null; // Also null out the converter to prevent infinite recursion.
- return contract;
- }
- return base.CreateContract(objectType);
- }
- }
-
- public override bool CanConvert(Type objectType)
- {
- return typeof(T).IsAssignableFrom(objectType);
- }
-
- public override object? ReadJson(JsonReader reader, Type objectType, object? existingValue, JsonSerializer serializer)
- {
- return JsonSerializer.CreateDefault(new JsonSerializerSettings { ContractResolver = resolver }).Deserialize(reader, objectType);
- }
-
- public override void WriteJson(JsonWriter writer, object? value, JsonSerializer serializer)
- {
- JsonSerializer.CreateDefault(new JsonSerializerSettings { ContractResolver = resolver }).Serialize(writer, value);
- }
- }
-}
diff --git a/roles/lib/files/FWO.Api.Client/Queries/ComplianceQueries.cs b/roles/lib/files/FWO.Api.Client/Queries/ComplianceQueries.cs
new file mode 100644
index 000000000..a31b87581
--- /dev/null
+++ b/roles/lib/files/FWO.Api.Client/Queries/ComplianceQueries.cs
@@ -0,0 +1,30 @@
+using FWO.Logging;
+
+namespace FWO.Api.Client.Queries
+{
+ public class ComplianceQueries : Queries
+ {
+ public static readonly string addNetworkZone;
+ public static readonly string deleteNetworkZone;
+ public static readonly string getNetworkZones;
+ public static readonly string updateNetworkZones;
+ public static readonly string modifyNetworkZoneCommunication;
+
+ static ComplianceQueries()
+ {
+ try
+ {
+ addNetworkZone = File.ReadAllText(QueryPath + "compliance/addNetworkZone.graphql");
+ deleteNetworkZone = File.ReadAllText(QueryPath + "compliance/deleteNetworkZone.graphql");
+ getNetworkZones = File.ReadAllText(QueryPath + "compliance/getNetworkZones.graphql");
+ updateNetworkZones = File.ReadAllText(QueryPath + "compliance/updateNetworkZone.graphql");
+ modifyNetworkZoneCommunication = File.ReadAllText(QueryPath + "compliance/updateNetworkZoneCommunication.graphql");
+ }
+ catch (Exception exception)
+ {
+ Log.WriteError("Initialize Compliance Queries", "Api compliance queries could not be loaded.", exception);
+ Environment.Exit(-1);
+ }
+ }
+ }
+}
diff --git a/roles/lib/files/FWO.Api.Client/Queries/DeviceQueries.cs b/roles/lib/files/FWO.Api.Client/Queries/DeviceQueries.cs
index f84d43478..5eafa3d52 100644
--- a/roles/lib/files/FWO.Api.Client/Queries/DeviceQueries.cs
+++ b/roles/lib/files/FWO.Api.Client/Queries/DeviceQueries.cs
@@ -22,7 +22,6 @@ public class DeviceQueries : Queries
public static readonly string updateDevice;
public static readonly string changeDeviceState;
public static readonly string deleteDevice;
- public static readonly string getImportStatus;
public static readonly string deleteImport;
public static readonly string getCredentials;
public static readonly string getCredentialsWithoutSecrets;
@@ -58,9 +57,8 @@ static DeviceQueries()
updateDevice = File.ReadAllText(QueryPath + "device/updateDevice.graphql");
changeDeviceState = File.ReadAllText(QueryPath + "device/changeDeviceState.graphql");
deleteDevice = File.ReadAllText(QueryPath + "device/deleteDevice.graphql");
- getImportStatus = File.ReadAllText(QueryPath + "device/getImportStatus.graphql");
deleteImport = File.ReadAllText(QueryPath + "device/deleteImport.graphql");
-
+
getCredentials = File.ReadAllText(QueryPath + "device/getCredentials.graphql") + " "
+ File.ReadAllText(QueryPath + "device/fragments/importCredentials.graphql");
getCredentialsWithoutSecrets = File.ReadAllText(QueryPath + "device/getCredentialsWithoutSecrets.graphql") + " "
diff --git a/roles/lib/files/FWO.Api.Client/Queries/MonitorQueries.cs b/roles/lib/files/FWO.Api.Client/Queries/MonitorQueries.cs
index fc8631815..debe8d00e 100644
--- a/roles/lib/files/FWO.Api.Client/Queries/MonitorQueries.cs
+++ b/roles/lib/files/FWO.Api.Client/Queries/MonitorQueries.cs
@@ -8,6 +8,7 @@ public class MonitorQueries : Queries
public static readonly string getLogEntrys;
public static readonly string addUiLogEntry;
public static readonly string getUiLogEntrys;
+ public static readonly string getAllUiLogEntrys;
public static readonly string getImportLogEntrys;
public static readonly string addAlert;
public static readonly string getOpenAlerts;
@@ -18,6 +19,7 @@ public class MonitorQueries : Queries
public static readonly string addAutodiscoveryLogEntry;
public static readonly string getAutodiscoveryLogEntrys;
public static readonly string getDailyCheckLogEntrys;
+ public static readonly string getImportStatus;
static MonitorQueries()
@@ -29,6 +31,7 @@ static MonitorQueries()
addUiLogEntry = File.ReadAllText(QueryPath + "monitor/addUiLogEntry.graphql");
getUiLogEntrys = File.ReadAllText(QueryPath + "monitor/getUiLogEntrys.graphql");
+ getAllUiLogEntrys = File.ReadAllText(QueryPath + "monitor/getAllUiLogEntrys.graphql");
getImportLogEntrys = File.ReadAllText(QueryPath + "monitor/getImportLogEntrys.graphql");
@@ -39,6 +42,8 @@ static MonitorQueries()
acknowledgeAlert = File.ReadAllText(QueryPath + "monitor/acknowledgeAlert.graphql");
subscribeAlertChanges = File.ReadAllText(QueryPath + "monitor/subscribeAlertChanges.graphql");
+ getImportStatus = File.ReadAllText(QueryPath + "monitor/getImportStatus.graphql");
+
addAutodiscoveryLogEntry = File.ReadAllText(QueryPath + "monitor/addAutodiscoveryLogEntry.graphql");
getAutodiscoveryLogEntrys = File.ReadAllText(QueryPath + "monitor/getAutodiscoveryLogEntrys.graphql");
getDailyCheckLogEntrys = File.ReadAllText(QueryPath + "monitor/getDailyCheckLogEntrys.graphql");
diff --git a/roles/lib/files/FWO.Api.Client/Queries/ReportQueries.cs b/roles/lib/files/FWO.Api.Client/Queries/ReportQueries.cs
index fa649d4fe..c222c3986 100644
--- a/roles/lib/files/FWO.Api.Client/Queries/ReportQueries.cs
+++ b/roles/lib/files/FWO.Api.Client/Queries/ReportQueries.cs
@@ -28,11 +28,14 @@ public class ReportQueries : Queries
public static readonly string getRelevantImportIdsAtTime;
public static readonly string statisticsReportCurrent;
+ public static readonly string subscribeGeneratedReportsChanges;
public static readonly string getGeneratedReport;
public static readonly string getGeneratedReports;
public static readonly string deleteGeneratedReport;
public static readonly string addGeneratedReport;
+ public static readonly string getUsageDataCount;
+
static ReportQueries()
{
try
@@ -53,10 +56,12 @@ static ReportQueries()
editReportTemplate = File.ReadAllText(QueryPath + "report/editReportTemplate.graphql");
deleteReportTemplate = File.ReadAllText(QueryPath + "report/deleteReportTemplate.graphql");
subscribeReportScheduleChanges = File.ReadAllText(QueryPath + "report/subscribeReportScheduleChanges.graphql");
+ subscribeGeneratedReportsChanges = File.ReadAllText(QueryPath + "report/subscribeGeneratedReportsChanges.graphql");
getGeneratedReports = File.ReadAllText(QueryPath + "report/getGeneratedReports.graphql");
getGeneratedReport = File.ReadAllText(QueryPath + "report/getGeneratedReport.graphql");
deleteGeneratedReport = File.ReadAllText(QueryPath + "report/deleteGeneratedReport.graphql");
addGeneratedReport = File.ReadAllText(QueryPath + "report/addGeneratedReport.graphql");
+ getUsageDataCount = File.ReadAllText(QueryPath + "report/getUsageDataCount.graphql");
}
catch (Exception exception)
{
diff --git a/roles/lib/files/FWO.Config.Api/Data/ConfigData.cs b/roles/lib/files/FWO.Config.Api/Data/ConfigData.cs
index 923d6cbc5..c818d0415 100644
--- a/roles/lib/files/FWO.Config.Api/Data/ConfigData.cs
+++ b/roles/lib/files/FWO.Config.Api/Data/ConfigData.cs
@@ -34,6 +34,12 @@ public class ConfigData : ICloneable
[JsonProperty("autoFillRightSidebar"), JsonPropertyName("autoFillRightSidebar")]
public bool AutoFillRightSidebar { get; set; } = false;
+ [JsonProperty("unusedTolerance"), JsonPropertyName("unusedTolerance")]
+ public int UnusedTolerance { get; set; } = 400;
+
+ [JsonProperty("creationTolerance"), JsonPropertyName("creationTolerance")]
+ public int CreationTolerance { get; set; } = 90;
+
[JsonProperty("dataRetentionTime"), JsonPropertyName("dataRetentionTime")]
public int DataRetentionTime { get; set; } = 731;
@@ -55,6 +61,19 @@ public class ConfigData : ICloneable
[JsonProperty("fwApiElementsPerFetch"), JsonPropertyName("fwApiElementsPerFetch")]
public int FwApiElementsPerFetch { get; set; } = 150;
+ [JsonProperty("impChangeNotifyRecipients"), JsonPropertyName("impChangeNotifyRecipients")]
+ public string ImpChangeNotifyRecipients { get; set; } = "";
+
+ [JsonProperty("impChangeNotifySubject"), JsonPropertyName("impChangeNotifySubject")]
+ public string ImpChangeNotifySubject { get; set; } = "";
+
+ [JsonProperty("impChangeNotifyBody"), JsonPropertyName("impChangeNotifyBody")]
+ public string ImpChangeNotifyBody { get; set; } = "";
+
+ [JsonProperty("impChangeNotifyActive"), JsonPropertyName("impChangeNotifyActive")]
+ public bool ImpChangeNotifyActive { get; set; } = false;
+
+
[JsonProperty("recertificationPeriod"), JsonPropertyName("recertificationPeriod")]
public int RecertificationPeriod { get; set; } = 365;
@@ -131,7 +150,7 @@ public class ConfigData : ICloneable
public string EmailServerAddress { get; set; } = "";
[JsonProperty("emailPort"), JsonPropertyName("emailPort")]
- public int EmailPort { get; set; } = 25;
+ public int EmailPort { get; set; }
[JsonProperty("emailTls"), JsonPropertyName("emailTls")]
public EmailEncryptionMethod EmailTls { get; set; } = EmailEncryptionMethod.None;
diff --git a/roles/lib/files/FWO.Config.Api/UserConfig.cs b/roles/lib/files/FWO.Config.Api/UserConfig.cs
index fc9807fde..16fd11830 100644
--- a/roles/lib/files/FWO.Config.Api/UserConfig.cs
+++ b/roles/lib/files/FWO.Config.Api/UserConfig.cs
@@ -136,7 +136,7 @@ public override string GetText(string key)
}
}
- public string Convert(string rawText)
+ private string Convert(string rawText)
{
string plainText = System.Web.HttpUtility.HtmlDecode(rawText);
@@ -153,7 +153,7 @@ public string Convert(string rawText)
while (cont)
{
begin = plainText.IndexOf(startLink, index);
- if (begin > 0)
+ if (begin >= 0)
{
end = plainText.IndexOf("\"", begin + startLink.Length);
if (end > 0)
diff --git a/roles/lib/files/FWO.Logging/Log.cs b/roles/lib/files/FWO.Logging/Log.cs
index 7001a7284..16fc7da66 100644
--- a/roles/lib/files/FWO.Logging/Log.cs
+++ b/roles/lib/files/FWO.Logging/Log.cs
@@ -1,6 +1,4 @@
-using System;
-using System.Diagnostics;
-using System.Linq;
+using System.Diagnostics;
using System.Reflection;
using System.Runtime.CompilerServices;
@@ -17,61 +15,62 @@ static Log()
Task.Factory.StartNew(async () =>
{
// log switch - log file locking
- bool logOwned = false;
+ bool logOwnedByExternal = false;
Stopwatch stopwatch = new Stopwatch();
while (true)
{
try
{
+ // Open file
using FileStream file = await GetFile(lockFilePath);
- // read file content
+ // Read file content
using StreamReader reader = new StreamReader(file);
string lockFileContent = (await reader.ReadToEndAsync()).Trim();
- // REQUESTED - lock was requested by log swap process
+ // Forcefully release lock after timeout
+ if (logOwnedByExternal && stopwatch.ElapsedMilliseconds > 10_000)
+ {
+ using StreamWriter writer = new StreamWriter(file);
+ await writer.WriteLineAsync("FORCEFULLY RELEASED");
+ stopwatch.Reset();
+ semaphore.Release();
+ logOwnedByExternal = false;
+ }
// GRANTED - lock was granted by us
- // RELEASED - lock was released by log swap process
- if (lockFileContent.EndsWith("GRANTED"))
+ else if (lockFileContent.EndsWith("GRANTED"))
{
// Request lock if it is not already requested by us
// (in case of restart with log already granted)
- if (!logOwned)
+ if (!logOwnedByExternal)
{
semaphore.Wait();
stopwatch.Restart();
- logOwned = true;
- }
- // Forcefully release lock after timeout
- else if (stopwatch.ElapsedMilliseconds > 10 * 1000)
- {
- using StreamWriter writer = new StreamWriter(file);
- await writer.WriteLineAsync("FORCEFULLY RELEASED");
- stopwatch.Reset();
- semaphore.Release();
- logOwned = false;
+ logOwnedByExternal = true;
}
}
- if (lockFileContent.EndsWith("REQUESTED"))
+ // REQUESTED - lock was requested by log swap process
+ else if (lockFileContent.EndsWith("REQUESTED"))
{
// only request lock if it is not already requested by us
- if (!logOwned)
+ if (!logOwnedByExternal)
{
semaphore.Wait();
stopwatch.Restart();
- logOwned = true;
+ logOwnedByExternal = true;
}
using StreamWriter writer = new StreamWriter(file);
await writer.WriteLineAsync("GRANTED");
}
- if (lockFileContent.EndsWith("RELEASED"))
+ // RELEASED - lock was released by log swap process
+ else if (lockFileContent.EndsWith("RELEASED"))
{
// only release lock if it was formerly requested by us
- if (logOwned)
+ if (logOwnedByExternal)
{
stopwatch.Reset();
semaphore.Release();
- logOwned = false;
+ logOwnedByExternal = false;
}
}
}
@@ -116,12 +115,12 @@ public static void WriteWarning(string Title, string Text, [CallerMemberName] st
WriteLog("Warning", Title, Text, callerName, callerFile, callerLineNumber, ConsoleColor.DarkYellow);
}
- public static void WriteError(string Title, string? Text = null, Exception? Error = null, [CallerMemberName] string callerName = "", [CallerFilePath] string callerFile = "", [CallerLineNumber] int callerLineNumber = 0)
+ public static void WriteError(string Title, string? Text = null, Exception? Error = null, string? User = null, string? Role = null, [CallerMemberName] string callerName = "", [CallerFilePath] string callerFile = "", [CallerLineNumber] int callerLineNumber = 0)
{
string DisplayText =
- (Text != null ?
- $"{Text}"
- : "") +
+ (User != null ? $"User: {User}, " : "") +
+ (Role != null ? $"Role: {Role}, " : "") +
+ (Text != null ? $"{Text}" : "") +
(Error != null ?
"\n ---\n" +
$"Exception thrown: \n {Error?.GetType().Name} \n" +
diff --git a/roles/lib/files/FWO.Mail/MailerMailKit.cs b/roles/lib/files/FWO.Mail/MailerMailKit.cs
index abcbcecfc..ac5f5852d 100644
--- a/roles/lib/files/FWO.Mail/MailerMailKit.cs
+++ b/roles/lib/files/FWO.Mail/MailerMailKit.cs
@@ -28,7 +28,17 @@ public class MailData
public string? Body { get; }
- public MailData(List to, string subject, string? body = null, string? from = null, string? displayName = null, string? replyTo = null, string? replyToName = null, List? bcc = null, List? cc = null)
+ public MailData(
+ List to,
+ string subject,
+ string? body = null,
+ string? from = null,
+ string? displayName = null,
+ string? replyTo = null,
+ string? replyToName = null,
+ List? bcc = null,
+ List? cc = null
+ )
{
// Receiver
To = to;
@@ -40,7 +50,7 @@ public MailData(List to, string subject, string? body = null, string? fr
DisplayName = displayName;
ReplyTo = replyTo;
ReplyToName = replyToName;
-
+
// Content
Subject = subject;
Body = body;
@@ -61,7 +71,12 @@ public MailKitMailer(EmailConnection emailConn)
EmailConn = emailConn;
}
- public async Task SendAsync(MailData mailData, EmailConnection emailConn, CancellationToken ct = default, bool mailFormatHtml = false)
+ public async Task SendAsync(
+ MailData mailData,
+ EmailConnection emailConn,
+ CancellationToken ct = default,
+ bool mailFormatHtml = false
+ )
{
try
{
@@ -83,7 +98,7 @@ public async Task SendAsync(MailData mailData, EmailConnection emailConn,
mail.To.Add(MailboxAddress.Parse(mailAddress));
// Set Reply to if specified in mail data
- if(!string.IsNullOrEmpty(mailData.ReplyTo))
+ if (!string.IsNullOrEmpty(mailData.ReplyTo))
mail.ReplyTo.Add(new MailboxAddress(mailData.ReplyToName, mailData.ReplyTo));
// BCC
@@ -91,7 +106,9 @@ public async Task SendAsync(MailData mailData, EmailConnection emailConn,
if (mailData.Bcc != null)
{
// Get only addresses where value is not null or with whitespace. x = value of address
- foreach (string mailAddress in mailData.Bcc.Where(x => !string.IsNullOrWhiteSpace(x)))
+ foreach (
+ string mailAddress in mailData.Bcc.Where(x => !string.IsNullOrWhiteSpace(x))
+ )
mail.Bcc.Add(MailboxAddress.Parse(mailAddress.Trim()));
}
@@ -99,7 +116,9 @@ public async Task SendAsync(MailData mailData, EmailConnection emailConn,
// Check if a CC address was supplied in the request
if (mailData.Cc != null)
{
- foreach (string mailAddress in mailData.Cc.Where(x => !string.IsNullOrWhiteSpace(x)))
+ foreach (
+ string mailAddress in mailData.Cc.Where(x => !string.IsNullOrWhiteSpace(x))
+ )
mail.Cc.Add(MailboxAddress.Parse(mailAddress.Trim()));
}
#endregion
@@ -125,13 +144,30 @@ public async Task SendAsync(MailData mailData, EmailConnection emailConn,
switch (emailConn.Encryption)
{
case EmailEncryptionMethod.None:
- await smtp.ConnectAsync(emailConn.ServerAddress, emailConn.Port, SecureSocketOptions.None, ct);
+ await smtp.ConnectAsync(
+ emailConn.ServerAddress,
+ emailConn.Port,
+ SecureSocketOptions.None,
+ ct
+ );
break;
case EmailEncryptionMethod.StartTls:
- await smtp.ConnectAsync(emailConn.ServerAddress, emailConn.Port, SecureSocketOptions.StartTls, ct);
+ smtp.ServerCertificateValidationCallback = (s, c, h, e) => true; //accept all SSL certificates
+ await smtp.ConnectAsync(
+ emailConn.ServerAddress,
+ emailConn.Port,
+ SecureSocketOptions.StartTls,
+ ct
+ );
break;
case EmailEncryptionMethod.Tls:
- await smtp.ConnectAsync(emailConn.ServerAddress, emailConn.Port, SecureSocketOptions.SslOnConnect, ct);
+ smtp.ServerCertificateValidationCallback = (s, c, h, e) => true; //accept all SSL certificates
+ await smtp.ConnectAsync(
+ emailConn.ServerAddress,
+ emailConn.Port,
+ SecureSocketOptions.SslOnConnect,
+ ct
+ );
break;
}
if (emailConn.User != null && emailConn.User != "")
@@ -140,11 +176,10 @@ public async Task SendAsync(MailData mailData, EmailConnection emailConn,
}
await smtp.SendAsync(mail, ct);
await smtp.DisconnectAsync(true, ct);
-
+
#endregion
return true;
-
}
catch (Exception)
{
diff --git a/roles/lib/files/FWO.Middleware.Client/JwtReader.cs b/roles/lib/files/FWO.Middleware.Client/JwtReader.cs
index 70e63f6cc..c3d9b5252 100644
--- a/roles/lib/files/FWO.Middleware.Client/JwtReader.cs
+++ b/roles/lib/files/FWO.Middleware.Client/JwtReader.cs
@@ -122,5 +122,12 @@ public TimeSpan TimeUntilExpiry()
return jwt.ValidTo - DateTime.UtcNow;
}
+
+ public string GetRole()
+ {
+ if (jwt == null)
+ throw new ArgumentNullException(nameof(jwt), "Jwt was not validated yet.");
+ return jwt.Claims.FirstOrDefault(claim => claim.Type == "role")?.Value ?? "";
+ }
}
}
diff --git a/roles/lib/files/FWO.Middleware.Client/MiddlewareClient.cs b/roles/lib/files/FWO.Middleware.Client/MiddlewareClient.cs
index 8794475c0..08ed76d5c 100644
--- a/roles/lib/files/FWO.Middleware.Client/MiddlewareClient.cs
+++ b/roles/lib/files/FWO.Middleware.Client/MiddlewareClient.cs
@@ -10,8 +10,9 @@
namespace FWO.Middleware.Client
{
- public class MiddlewareClient
+ public class MiddlewareClient : IDisposable
{
+ private bool disposed = false;
private RestClient restClient;
readonly string middlewareServerUri;
@@ -241,5 +242,26 @@ public async Task> DeleteTenant(TenantDeleteParameters parame
request.AddJsonBody(parameters);
return await restClient.ExecuteAsync(request);
}
+
+ protected virtual void Dispose(bool disposing)
+ {
+ if (disposed) return;
+ if (disposing)
+ {
+ restClient.Dispose();
+ disposed = true;
+ }
+ }
+
+ public void Dispose()
+ {
+ Dispose(true);
+ GC.SuppressFinalize(this);
+ }
+
+ ~ MiddlewareClient()
+ {
+ Dispose(false);
+ }
}
}
diff --git a/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilter.cs b/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilter.cs
index 6fe55e0fb..8e989e6d2 100644
--- a/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilter.cs
+++ b/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilter.cs
@@ -1,6 +1,3 @@
-using System.ComponentModel;
-using System.Net;
-using FWO.Logging;
using FWO.Report.Filter.Exceptions;
namespace FWO.Report.Filter.Ast
@@ -89,104 +86,5 @@ protected string AddVariable(DynGraphqlQuery query, string name, TokenKind
public abstract void ConvertToSemanticType();
- //public void ConvertToSemanticType()
- //{
- // TypeConverter converter = TypeDescriptor.GetConverter(this.GetType());
- // if (converter.CanConvertFrom(this.GetType()))
- // {
- // try
- // {
- // object convertedValue = converter.ConvertFrom(this) ?? throw new NullReferenceException("Error while converting: converted value is null");
- // SemanticValue = (SemanticType)convertedValue ?? throw new NullReferenceException($"Error while converting: value could not be converted to semantic type: {typeof(SemanticType)}");
- // }
- // catch (SemanticException)
- // {
- // throw;
- // }
- // catch (Exception ex)
- // {
- // throw new SemanticException($"Filter could not be converted to expected semantic type {typeof(SemanticType)}: {ex.Message}", Value.Position);
- // }
- // }
- // else
- // {
- // throw new NotSupportedException($"Internal error: TypeConverter does not support conversion from {this.GetType()} to {typeof(SemanticType)}");
- // }
- //}
-
- //public override void Extract(ref DynGraphqlQuery query)
- //{
- // switch (Name.Kind)
- //
-
- // // "xy" and "FullText=xy" are the same filter
- // case TokenKind.FullText:
- // case TokenKind.Value:
- // ExtractFullTextFilter(query);
- // break;
- // case TokenKind.ReportType:
- // ExtractReportTypeFilter(query);
- // break;
- // case TokenKind.Source:
- // ExtractSourceFilter(query);
- // break;
- // case TokenKind.Destination:
- // ExtractDestinationFilter(query);
- // break;
- // case TokenKind.Action:
- // ExtractActionFilter(query);
- // break;
- // case TokenKind.Service:
- // ExtractServiceFilter(query);
- // break;
- // case TokenKind.DestinationPort:
- // ExtractDestinationPortFilter(query);
- // break;
- // case TokenKind.Protocol:
- // ExtractProtocolFilter(query);
- // break;
- // case TokenKind.Management:
- // ExtractManagementFilter(query);
- // break;
- // case TokenKind.Gateway:
- // ExtractGatewayFilter(query);
- // break;
- // case TokenKind.Remove:
- // ExtractRemoveFilter(query);
- // break;
- // case TokenKind.RecertDisplay:
- // ExtractRecertDisplayFilter(query); //, (int)(SemanticValue as int?)!);
- // break;
- // case TokenKind.Time:
- // ExtractTimeFilter(query);
- // break;
- // default:
- // throw new NotSupportedException($"### Compiler Error: Found unexpected and unsupported filter token: \"{Name}\" ###");
- // }
- //}
-
- //private static string SetQueryOpString(Token @operator, Token filter, string value)
- //{
- // string operation;
- // switch (@operator.Kind)
- // {
- // case TokenKind.EQ:
- // if (filter.Kind == TokenKind.Time || filter.Kind == TokenKind.DestinationPort)
- // operation = "_eq";
- // else if ((filter.Kind == TokenKind.Source && IsCidr(value)) || filter.Kind == TokenKind.DestinationPort)
- // operation = "_eq";
- // else if (filter.Kind == TokenKind.Management && int.TryParse(value, out int _))
- // operation = "_eq";
- // else
- // operation = "_ilike";
- // break;
- // case TokenKind.NEQ:
- // operation = "_nilike";
- // break;
- // default:
- // throw new Exception("### Parser Error: Expected Operator Token (and thought there is one) ###");
- // }
- // return operation;
- //}
}
}
diff --git a/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterDateTimeRange.cs b/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterDateTimeRange.cs
index 201b3082a..0de31b27e 100644
--- a/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterDateTimeRange.cs
+++ b/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterDateTimeRange.cs
@@ -1,10 +1,4 @@
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-
-namespace FWO.Report.Filter.Ast
+namespace FWO.Report.Filter.Ast
{
internal class AstNodeFilterDateTimeRange : AstNodeFilter
{
@@ -66,118 +60,5 @@ private DynGraphqlQuery ExtractLastHitFilter(DynGraphqlQuery query, ReportType r
}
return query;
}
-
- //private DynGraphqlQuery ExtractTimeFilter(DynGraphqlQuery query)
- //{
- // switch (query.ReportType)
- // {
- // case ReportType.Rules:
- // case ReportType.Statistics:
- // case ReportType.NatRules:
- // switch (Operator.Kind)
- // {
- // case TokenKind.EQ:
- // case TokenKind.EEQ:
- // query.ruleWhereStatement +=
- // $"import_control: {{ control_id: {{_lte: $relevantImportId }} }}, " +
- // $"importControlByRuleLastSeen: {{ control_id: {{_gte: $relevantImportId }} }}";
- // query.nwObjWhereStatement +=
- // $"import_control: {{ control_id: {{_lte: $relevantImportId }} }}, " +
- // $"importControlByObjLastSeen: {{ control_id: {{_gte: $relevantImportId }} }}";
- // query.svcObjWhereStatement +=
- // $"import_control: {{ control_id: {{_lte: $relevantImportId }} }}, " +
- // $"importControlBySvcLastSeen: {{ control_id: {{_gte: $relevantImportId }} }}";
- // query.userObjWhereStatement +=
- // $"import_control: {{ control_id: {{_lte: $relevantImportId }} }}, " +
- // $"importControlByUserLastSeen: {{ control_id: {{_gte: $relevantImportId }} }}";
- // query.ReportTime = Value.Text;
- // break;
- // default:
- // throw new SemanticException($"Unexpected operator token. Expected equals token.", Operator.Position);
- // }
- // break;
- // case ReportType.Changes:
- // switch (Operator.Kind)
- // {
- // case TokenKind.EQ:
- // case TokenKind.EEQ:
- // case TokenKind.GRT:
- // case TokenKind.LSS:
- // (string start, string stop) = ResolveTimeRange(Value.Text);
- // query.QueryVariables["start"] = start;
- // query.QueryVariables["stop"] = stop;
- // query.QueryParameters.Add("$start: timestamp! ");
- // query.QueryParameters.Add("$stop: timestamp! ");
-
- // query.ruleWhereStatement += $@"
- // _and: [
- // {{ import_control: {{ stop_time: {{ _gte: $start }} }} }}
- // {{ import_control: {{ stop_time: {{ _lte: $stop }} }} }}
- // ]
- // change_type_id: {{ _eq: 3 }}
- // security_relevant: {{ _eq: true }}";
- // break;
- // default:
- // throw new SemanticException($"Unexpected operator token.", Operator.Position);
- // }
- // break;
- // default:
- // Log.WriteError("Filter", $"Unexpected report type found: {query.ReportType}");
- // break;
- // }
- // // todo: deal with time ranges for changes report type
- // return query;
- //}
-
- //private (string, string) ResolveTimeRange(string timeRange)
- //{
- // string start;
- // string stop;
- // //string currentTime = (string)DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss");
- // string currentYear = (string)DateTime.Now.ToString("yyyy");
- // string currentMonth = (string)DateTime.Now.ToString("MM");
- // string currentDay = (string)DateTime.Now.ToString("dd");
- // DateTime startOfCurrentMonth = new DateTime(Convert.ToInt16(currentYear), Convert.ToInt16(currentMonth), 1);
- // DateTime startOfNextMonth = startOfCurrentMonth.AddMonths(1);
- // DateTime startOfPrevMonth = startOfCurrentMonth.AddMonths(-1);
-
- // switch (timeRange)
- // {
- // // todo: add today, yesterday, this week, last week
- // case "last year":
- // start = $"{(Convert.ToInt16(currentYear) - 1)}-01-01";
- // stop = $"{Convert.ToInt16(currentYear)}-01-01";
- // break;
- // case "this year":
- // start = $"{Convert.ToInt16(currentYear)}-01-01";
- // stop = $"{Convert.ToInt16(currentYear) + 1}-01-01";
- // break;
- // case "this month":
- // start = startOfCurrentMonth.ToString("yyyy-MM-dd");
- // stop = startOfNextMonth.ToString("yyyy-MM-dd");
- // break;
- // case "last month":
- // start = startOfPrevMonth.ToString("yyyy-MM-dd");
- // stop = startOfCurrentMonth.ToString("yyyy-MM-dd");
- // break;
- // default:
- // string[] times = timeRange.Split('/');
- // if (times.Length == 2)
- // {
- // start = Convert.ToDateTime(times[0]).ToString("yyyy-MM-dd HH:mm:ss");
- // if (times[1].Trim().Length < 11)
- // {
- // times[1] += " 23:59:59";
- // }
- // stop = Convert.ToDateTime(times[1]).ToString("yyyy-MM-dd HH:mm:ss");
- // }
- // else
- // throw new SyntaxException($"Error: wrong time range format.", Value.Position); // Unexpected token
- // // we have some hard coded string positions here which we should get rid off
- // // how can we access the tokens[position].Position information here?
- // break;
- // }
- // return (start, stop);
- //}
}
}
diff --git a/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterInt.cs b/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterInt.cs
index 9e2180a12..2e5611e3d 100644
--- a/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterInt.cs
+++ b/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterInt.cs
@@ -1,9 +1,4 @@
using FWO.Report.Filter.Exceptions;
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
namespace FWO.Report.Filter.Ast
{
@@ -39,6 +34,9 @@ public override void Extract(ref DynGraphqlQuery query, ReportType? reportType)
case TokenKind.Owner:
ExtractOwnerFilter(query);
break;
+ case TokenKind.Unused:
+ ExtractUnusedFilter(query);
+ break;
default:
break;
}
@@ -65,6 +63,15 @@ private DynGraphqlQuery ExtractOwnerFilter(DynGraphqlQuery query)
query.ruleWhereStatement += $"owner: {{ {ExtractOperator()}: ${QueryVarName} }}";
return query;
}
-
+
+ private DynGraphqlQuery ExtractUnusedFilter(DynGraphqlQuery query)
+ {
+ string QueryVarName = AddVariable(query, "cut", Operator.Kind, DateTime.Now.AddDays(-semanticValue));
+ query.ruleWhereStatement += $@"rule_metadatum: {{_or: [
+ {{_and: [{{rule_last_hit: {{_is_null: false}} }}, {{rule_last_hit: {{_lte: ${QueryVarName} }} }} ] }},
+ {{ rule_last_hit: {{_is_null: true}} }}
+ ]}}";
+ return query;
+ }
}
}
diff --git a/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterReportType.cs b/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterReportType.cs
index 2ac1e59e1..feb603858 100644
--- a/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterReportType.cs
+++ b/roles/lib/files/FWO.Report.Filter/Ast/AstNodeFilterReportType.cs
@@ -13,10 +13,14 @@ public override void ConvertToSemanticType()
{
"rules" or "rule" => ReportType.Rules,
"resolvedrules" or "resolvedrule" => ReportType.ResolvedRules,
+ "resolvedrulestech" or "resolvedruletech" => ReportType.ResolvedRulesTech,
+ "unusedrules" or "unusedrule" => ReportType.UnusedRules,
"statistics" or "statistic" => ReportType.Statistics,
"changes" or "change" => ReportType.Changes,
"resolvedchanges" or "resolvedchange" => ReportType.ResolvedChanges,
+ "resolvedchangestech" or "resolvedchangetech" => ReportType.ResolvedChangesTech,
"natrules" or "nat_rules" => ReportType.NatRules,
+ "recertifications" or "recertification" => ReportType.Recertification,
_ => throw new SemanticException($"Unexpected report type found", Value.Position)
};
}
diff --git a/roles/lib/files/FWO.Report.Filter/DynGraphqlQuery.cs b/roles/lib/files/FWO.Report.Filter/DynGraphqlQuery.cs
index 71dd5bab6..fc5382cc0 100644
--- a/roles/lib/files/FWO.Report.Filter/DynGraphqlQuery.cs
+++ b/roles/lib/files/FWO.Report.Filter/DynGraphqlQuery.cs
@@ -21,9 +21,8 @@ public class DynGraphqlQuery
{
" $limit: Int ",
" $offset: Int ",
- " $mgmId: [Int!]",
- " $relevantImportId: bigint"
- }; // $mgmId and $relevantImporId are only needed for time based filtering
+ " $mgmId: [Int!]" // not needed for change reports??
+ };
public string ReportTimeString { get; set; } = "";
public List RelevantManagementIds { get; set; } = new List();
@@ -60,18 +59,6 @@ private static void SetDeviceFilter(ref DynGraphqlQuery query, DeviceFilter? dev
query.ruleWhereStatement += "}]}, ";
}
}
- private static List GetDeviceFilterAsList(DeviceFilter? deviceFilter)
- {
- List devIdList = new List();
- if (deviceFilter != null)
- {
- foreach (ManagementSelect mgmt in deviceFilter.Managements)
- foreach (DeviceSelect dev in mgmt.Devices)
- if (dev.Selected == true)
- devIdList.Add(dev.Id);
- }
- return devIdList;
- }
private static void SetTimeFilter(ref DynGraphqlQuery query, TimeFilter? timeFilter, ReportType? reportType, RecertFilter recertFilter)
{
@@ -85,6 +72,8 @@ private static void SetTimeFilter(ref DynGraphqlQuery query, TimeFilter? timeFil
case ReportType.ResolvedRulesTech:
case ReportType.Statistics:
case ReportType.NatRules:
+ case ReportType.UnusedRules:
+ query.QueryParameters.Add("$relevantImportId: bigint ");
query.ruleWhereStatement +=
$"import_control: {{ control_id: {{_lte: $relevantImportId }} }}, " +
$"importControlByRuleLastSeen: {{ control_id: {{_gte: $relevantImportId }} }}";
@@ -109,6 +98,7 @@ private static void SetTimeFilter(ref DynGraphqlQuery query, TimeFilter? timeFil
query.QueryVariables["stop"] = stop;
query.QueryParameters.Add("$start: timestamp! ");
query.QueryParameters.Add("$stop: timestamp! ");
+ query.QueryParameters.Add("$relevantImportId: bigint ");
query.ruleWhereStatement += $@"
_and: [
@@ -214,32 +204,40 @@ private static (string, string) ResolveTimeRange(TimeFilter timeFilter)
return (start, stop);
}
-
- private static void SetRecertFilter(ref DynGraphqlQuery query, RecertFilter? recertFilter, DeviceFilter deviceFilter)
+ private static void SetRecertFilter(ref DynGraphqlQuery query, RecertFilter? recertFilter)
{
- // bool first = true;
-
- List deviceIdFilter = GetDeviceFilterAsList(deviceFilter);
if (recertFilter != null)
{
- // query.QueryParameters.Add("$ownerIds: [Int!] ");
- // query.QueryParameters.Add("$refdate1: Timestamp!");
-
// setting owner filter:
if (recertFilter.RecertOwnerList.Count > 0)
{
- // query.QueryVariables["ownerIds"] = recertFilter.RecertOwnerList;
query.QueryParameters.Add("$ownerWhere: owner_bool_exp");
query.QueryVariables["ownerWhere"] = new {id = new {_in = recertFilter.RecertOwnerList}};
}
else
- { // if no ownerIds are set in the filter, return all recerts
+ {
+ // if no ownerIds are set in the filter, return all recerts
query.QueryParameters.Add("$ownerWhere: owner_bool_exp");
query.QueryVariables["ownerWhere"] = new {id = new {}};
}
}
}
+ private static void SetUnusedFilter(ref DynGraphqlQuery query, UnusedFilter? unusedFilter)
+ {
+ if (unusedFilter != null)
+ {
+ query.QueryParameters.Add("$cut: timestamp");
+ query.QueryParameters.Add("$tolerance: timestamp");
+ query.QueryVariables["cut"] = DateTime.Now.AddDays(-unusedFilter.UnusedForDays);
+ query.QueryVariables["tolerance"] = DateTime.Now.AddDays(-unusedFilter.CreationTolerance);
+ query.ruleWhereStatement += $@"{{rule_metadatum: {{_or: [
+ {{_and: [{{rule_last_hit: {{_is_null: false}} }}, {{rule_last_hit: {{_lte: $cut}} }} ] }},
+ {{_and: [{{rule_last_hit: {{_is_null: true}} }}, {{rule_created: {{_lte: $tolerance}} }} ] }}
+ ]}} }}";
+ }
+ }
+
private static void SetFixedFilters(ref DynGraphqlQuery query, ReportTemplate reportParams)
{
// leave out all header texts
@@ -253,7 +251,11 @@ private static void SetFixedFilters(ref DynGraphqlQuery query, ReportTemplate re
SetTimeFilter(ref query, reportParams.ReportParams.TimeFilter, (ReportType)(reportParams.ReportParams.ReportType ?? throw new Exception("No report type set")), reportParams.ReportParams.RecertFilter);
if (reportParams.ReportParams.ReportType!= null && (ReportType)reportParams.ReportParams.ReportType==ReportType.Recertification)
{
- SetRecertFilter(ref query, reportParams.ReportParams.RecertFilter, reportParams.ReportParams.DeviceFilter);
+ SetRecertFilter(ref query, reportParams.ReportParams.RecertFilter);
+ }
+ if (reportParams.ReportParams.ReportType!= null && (ReportType)reportParams.ReportParams.ReportType==ReportType.UnusedRules)
+ {
+ SetUnusedFilter(ref query, reportParams.ReportParams.UnusedFilter);
}
}
@@ -275,6 +277,15 @@ public static DynGraphqlQuery GenerateQuery(ReportTemplate filter, AstNode? ast)
string paramString = string.Join(" ", query.QueryParameters.ToArray());
+ string mgmtWhereString = $@"where: {{ hide_in_gui: {{_eq: false }}
+ mgm_id: {{_in: $mgmId }}
+ stm_dev_typ: {{dev_typ_is_multi_mgmt: {{_eq: false}} is_pure_routing_device: {{_eq: false}} }}
+ }} order_by: {{ mgm_name: asc }}";
+
+ string devWhereString = $@"where: {{ hide_in_gui: {{_eq: false }},
+ stm_dev_typ: {{is_pure_routing_device:{{_eq:false}} }}
+ }} order_by: {{ dev_name: asc }}";
+
if (((ReportType)(filter.ReportParams.ReportType ?? throw new Exception("No report type set"))).IsResolvedReport())
filter.Detailed = true;
@@ -282,141 +293,120 @@ public static DynGraphqlQuery GenerateQuery(ReportTemplate filter, AstNode? ast)
{
case ReportType.Statistics:
query.FullQuery = Queries.compact($@"
- query statisticsReport ({paramString})
- {{
- management(
- where: {{
- hide_in_gui: {{_eq: false }}
- mgm_id: {{_in: $mgmId }}
- stm_dev_typ: {{dev_typ_is_multi_mgmt: {{_eq: false}} is_pure_routing_device: {{_eq: false}} }}
- }}
- order_by: {{ mgm_name: asc }}
- )
- {{
- name: mgm_name
- id: mgm_id
- objects_aggregate(where: {{ {query.nwObjWhereStatement} }}) {{ aggregate {{ count }} }}
- services_aggregate(where: {{ {query.svcObjWhereStatement} }}) {{ aggregate {{ count }} }}
- usrs_aggregate(where: {{ {query.userObjWhereStatement} }}) {{ aggregate {{ count }} }}
- rules_aggregate(where: {{ {query.ruleWhereStatement} }}) {{ aggregate {{ count }} }}
- devices( where: {{ hide_in_gui: {{_eq: false }}, stm_dev_typ: {{is_pure_routing_device:{{_eq:false}} }} }} order_by: {{ dev_name: asc }} )
+ query statisticsReport ({paramString})
+ {{
+ management({mgmtWhereString})
{{
- name: dev_name
- id: dev_id
+ name: mgm_name
+ id: mgm_id
+ objects_aggregate(where: {{ {query.nwObjWhereStatement} }}) {{ aggregate {{ count }} }}
+ services_aggregate(where: {{ {query.svcObjWhereStatement} }}) {{ aggregate {{ count }} }}
+ usrs_aggregate(where: {{ {query.userObjWhereStatement} }}) {{ aggregate {{ count }} }}
rules_aggregate(where: {{ {query.ruleWhereStatement} }}) {{ aggregate {{ count }} }}
+ devices({devWhereString})
+ {{
+ name: dev_name
+ id: dev_id
+ rules_aggregate(where: {{ {query.ruleWhereStatement} }}) {{ aggregate {{ count }} }}
+ }}
}}
}}
- }}");
+ ");
break;
case ReportType.Rules:
case ReportType.ResolvedRules:
case ReportType.ResolvedRulesTech:
+ case ReportType.UnusedRules:
query.FullQuery = Queries.compact($@"
- {(filter.Detailed ? RuleQueries.ruleDetailsForReportFragments : RuleQueries.ruleOverviewFragments)}
-
- query rulesReport ({paramString})
- {{
- management( where:
- {{
- mgm_id: {{_in: $mgmId }},
- hide_in_gui: {{_eq: false }}
- stm_dev_typ: {{dev_typ_is_multi_mgmt: {{_eq: false}} is_pure_routing_device: {{_eq: false}} }}
- }} order_by: {{ mgm_name: asc }} )
+ {(filter.Detailed ? RuleQueries.ruleDetailsForReportFragments : RuleQueries.ruleOverviewFragments)}
+ query rulesReport ({paramString})
+ {{
+ management({mgmtWhereString})
{{
id: mgm_id
name: mgm_name
- devices ( where: {{ hide_in_gui: {{_eq: false }} }} order_by: {{ dev_name: asc }} )
+ devices ({devWhereString})
+ {{
+ id: dev_id
+ name: dev_name
+ rules(
+ limit: $limit
+ offset: $offset
+ where: {{ access_rule: {{_eq: true}} {query.ruleWhereStatement} }}
+ order_by: {{ rule_num_numeric: asc }} )
{{
- id: dev_id
- name: dev_name
- rules(
- limit: $limit
- offset: $offset
- where: {{ access_rule: {{_eq: true}} {query.ruleWhereStatement} }}
- order_by: {{ rule_num_numeric: asc }} )
- {{
- mgm_id: mgm_id
- ...{(filter.Detailed ? "ruleDetails" : "ruleOverview")}
- }}
- }}
+ mgm_id: mgm_id
+ {((ReportType)filter.ReportParams.ReportType == ReportType.UnusedRules ? "rule_metadatum { rule_last_hit }" : "")}
+ ...{(filter.Detailed ? "ruleDetails" : "ruleOverview")}
+ }}
+ }}
}}
- }}");
+ }}
+ ");
break;
case ReportType.Recertification:
- // remove Query Parameter relevant import id
- var itemToRemove = query.QueryParameters.Single(r => r == " $relevantImportId: bigint");
- query.QueryParameters.Remove(itemToRemove);
- paramString = string.Join(" ", query.QueryParameters.ToArray());
-
- query.FullQuery = Queries.compact($@"{RecertQueries.ruleOpenRecertFragments}
- query rulesCertReport({paramString}) {{
- management(
- where: {{
- mgm_id: {{ _in: $mgmId }}
- hide_in_gui: {{ _eq: false }}
- stm_dev_typ: {{
- dev_typ_is_multi_mgmt: {{ _eq: false }}
- is_pure_routing_device: {{ _eq: false }}
- }}
- }}
- order_by: {{ mgm_name: asc }}
- ) {{
- id: mgm_id
- name: mgm_name
- devices(
- where: {{ hide_in_gui: {{ _eq: false }} }}
- order_by: {{ dev_name: asc }}
- ) {{
- id: dev_id
- name: dev_name
- rules(
- where: {{
- rule_metadatum: {{ recertifications_aggregate: {{ count: {{ filter: {{ _and: [{{owner: $ownerWhere}}, {{recert_date: {{_is_null: true}}}}, {{next_recert_date: {{_lte: $refdate1}}}}]}}, predicate: {{_gt: 0}}}}}}}}
- active:{{ _eq:true }}
- {query.ruleWhereStatement}
- }}
- limit: $limit
- offset: $offset
- order_by: {{ rule_num_numeric: asc }}
- ) {{
- mgm_id: mgm_id
- ...ruleOpenCertOverview
+ query.FullQuery = Queries.compact($@"
+ {RecertQueries.ruleOpenRecertFragments}
+ query rulesCertReport({paramString})
+ {{
+ management({mgmtWhereString})
+ {{
+ id: mgm_id
+ name: mgm_name
+ devices({devWhereString})
+ {{
+ id: dev_id
+ name: dev_name
+ rules(
+ where: {{
+ rule_metadatum: {{ recertifications_aggregate: {{ count: {{ filter: {{ _and: [{{owner: $ownerWhere}}, {{recert_date: {{_is_null: true}}}}, {{next_recert_date: {{_lte: $refdate1}}}}]}}, predicate: {{_gt: 0}}}}}}}}
+ active:{{ _eq:true }}
+ {query.ruleWhereStatement}
+ }}
+ limit: $limit
+ offset: $offset
+ order_by: {{ rule_num_numeric: asc }}
+ )
+ {{
+ mgm_id: mgm_id
+ ...ruleOpenCertOverview
+ }}
}}
}}
}}
- }}");
+ ");
break;
case ReportType.Changes:
case ReportType.ResolvedChanges:
case ReportType.ResolvedChangesTech:
query.FullQuery = Queries.compact($@"
- {(filter.Detailed ? RuleQueries.ruleDetailsForReportFragments : RuleQueries.ruleOverviewFragments)}
-
- query changeReport({paramString}) {{
- management(where: {{ hide_in_gui: {{_eq: false }} stm_dev_typ: {{dev_typ_is_multi_mgmt: {{_eq: false}} is_pure_routing_device: {{_eq: false}} }} }} order_by: {{mgm_name: asc}})
+ {(filter.Detailed ? RuleQueries.ruleDetailsForReportFragments : RuleQueries.ruleOverviewFragments)}
+ query changeReport({paramString})
{{
- id: mgm_id
- name: mgm_name
- devices (where: {{ hide_in_gui: {{_eq: false}} stm_dev_typ:{{is_pure_routing_device:{{_eq:false}} }} }}, order_by: {{dev_name: asc}} )
+ management(where: {{ hide_in_gui: {{_eq: false }} stm_dev_typ: {{dev_typ_is_multi_mgmt: {{_eq: false}} is_pure_routing_device: {{_eq: false}} }} }} order_by: {{mgm_name: asc}})
{{
- id: dev_id
- name: dev_name
- changelog_rules(
- offset: $offset
- limit: $limit
- where: {{
- _or:[
- {{_and: [{{change_action:{{_eq:""I""}}}}, {{rule: {{access_rule:{{_eq:true}}}}}}]}},
- {{_and: [{{change_action:{{_eq:""D""}}}}, {{ruleByOldRuleId: {{access_rule:{{_eq:true}}}}}}]}},
- {{_and: [{{change_action:{{_eq:""C""}}}}, {{rule: {{access_rule:{{_eq:true}}}}}}, {{ruleByOldRuleId: {{access_rule:{{_eq:true}}}}}}]}}
- ]
- {query.ruleWhereStatement}
- }}
- order_by: {{ control_id: asc }}
- )
+ id: mgm_id
+ name: mgm_name
+ devices ({devWhereString})
+ {{
+ id: dev_id
+ name: dev_name
+ changelog_rules(
+ offset: $offset
+ limit: $limit
+ where: {{
+ _or:[
+ {{_and: [{{change_action:{{_eq:""I""}}}}, {{rule: {{access_rule:{{_eq:true}}}}}}]}},
+ {{_and: [{{change_action:{{_eq:""D""}}}}, {{ruleByOldRuleId: {{access_rule:{{_eq:true}}}}}}]}},
+ {{_and: [{{change_action:{{_eq:""C""}}}}, {{rule: {{access_rule:{{_eq:true}}}}}}, {{ruleByOldRuleId: {{access_rule:{{_eq:true}}}}}}]}}
+ ]
+ {query.ruleWhereStatement}
+ }}
+ order_by: {{ control_id: asc }}
+ )
{{
import: import_control {{ time: stop_time }}
change_action
@@ -437,30 +427,30 @@ query changeReport({paramString}) {{
case ReportType.NatRules:
query.FullQuery = Queries.compact($@"
- {(filter.Detailed ? RuleQueries.natRuleDetailsForReportFragments : RuleQueries.natRuleOverviewFragments)}
-
- query natRulesReport ({paramString})
- {{
- management( where: {{ mgm_id: {{_in: $mgmId }}, hide_in_gui: {{_eq: false }} stm_dev_typ: {{dev_typ_is_multi_mgmt: {{_eq: false}} is_pure_routing_device: {{_eq: false}} }} }} order_by: {{ mgm_name: asc }} )
+ {(filter.Detailed ? RuleQueries.natRuleDetailsForReportFragments : RuleQueries.natRuleOverviewFragments)}
+ query natRulesReport ({paramString})
+ {{
+ management({mgmtWhereString})
{{
id: mgm_id
name: mgm_name
- devices ( where: {{ hide_in_gui: {{_eq: false }} stm_dev_typ:{{is_pure_routing_device:{{_eq:false}} }} }} order_by: {{ dev_name: asc }} )
- {{
- id: dev_id
- name: dev_name
- rules(
- limit: $limit
- offset: $offset
- where: {{ nat_rule: {{_eq: true}}, ruleByXlateRule: {{}} {query.ruleWhereStatement} }}
- order_by: {{ rule_num_numeric: asc }} )
- {{
- mgm_id: mgm_id
- ...{(filter.Detailed ? "natRuleDetails" : "natRuleOverview")}
- }}
- }}
+ devices ({devWhereString})
+ {{
+ id: dev_id
+ name: dev_name
+ rules(
+ limit: $limit
+ offset: $offset
+ where: {{ nat_rule: {{_eq: true}}, ruleByXlateRule: {{}} {query.ruleWhereStatement} }}
+ order_by: {{ rule_num_numeric: asc }} )
+ {{
+ mgm_id: mgm_id
+ ...{(filter.Detailed ? "natRuleDetails" : "natRuleOverview")}
+ }}
+ }}
}}
- }}");
+ }}
+ ");
break;
}
diff --git a/roles/lib/files/FWO.Report.Filter/FilterTypes/ReportType.cs b/roles/lib/files/FWO.Report.Filter/FilterTypes/ReportType.cs
index 93f3b9485..d4d650809 100644
--- a/roles/lib/files/FWO.Report.Filter/FilterTypes/ReportType.cs
+++ b/roles/lib/files/FWO.Report.Filter/FilterTypes/ReportType.cs
@@ -10,7 +10,8 @@ public enum ReportType
ResolvedRulesTech = 6,
Recertification = 7,
ResolvedChanges = 8,
- ResolvedChangesTech = 9
+ ResolvedChangesTech = 9,
+ UnusedRules = 10
}
public static class ReportTypeGroups
@@ -24,6 +25,7 @@ public static bool IsRuleReport(this ReportType reportType)
case ReportType.ResolvedRulesTech:
case ReportType.NatRules:
case ReportType.Recertification:
+ case ReportType.UnusedRules:
return true;
default:
return false;
diff --git a/roles/lib/files/FWO.Report.Filter/Parser.cs b/roles/lib/files/FWO.Report.Filter/Parser.cs
index 6bcb231f9..133f64aeb 100644
--- a/roles/lib/files/FWO.Report.Filter/Parser.cs
+++ b/roles/lib/files/FWO.Report.Filter/Parser.cs
@@ -29,107 +29,20 @@ public Parser(List tokens)
private AstNode? ParseStart()
{
- // if (NextTokenExists())
- // {
- if (GetNextToken().Kind == TokenKind.Value)
- {
- // Left = new AstNodeFilterReportType()
- // {
- // Name = new Token(new Range(0, 0), "", TokenKind.ReportType),
- // Operator = new Token(new Range(0, 0), "", TokenKind.EEQ),
- // Value = new Token(new Range(0, 0), "rules", TokenKind.Value)
- // },
- // Connector = new Token(new Range(0, 0), "", TokenKind.And),
-
- // Right = ParseTime()
- // };
- // }
- // else
- // {
- // AstNodeConnector root = new AstNodeConnector
- // {
- // Left = new AstNodeFilterReportType()
- // {
- // Name = CheckToken(TokenKind.ReportType),
- // Operator = CheckToken(TokenKind.EQ, TokenKind.EEQ),
- return new AstNodeFilterString
- {
- Name = new Token(new Range(0, 0), "", TokenKind.Value),
- Operator = new Token(new Range(0, 0), "", TokenKind.EQ),
- Value = CheckToken(TokenKind.Value)
- };
- }
- else
+ if (GetNextToken().Kind == TokenKind.Value)
+ {
+ return new AstNodeFilterString
{
- return ParseOr();
- }
+ Name = new Token(new Range(0, 0), "", TokenKind.Value),
+ Operator = new Token(new Range(0, 0), "", TokenKind.EQ),
+ Value = CheckToken(TokenKind.Value)
+ };
}
- // }
-
- // private AstNode ParseTime()
- // {
- // if (NextTokenExists() == false || GetNextToken().Kind != TokenKind.Time)
- // {
- // AstNodeConnector root = new AstNodeConnector
- // {
- // Left = new AstNodeFilterDateTimeRange()
- // {
- // Name = new Token(new Range(0, 0), "", TokenKind.Time),
- // Operator = new Token(new Range(0, 0), "", TokenKind.EQ),
- // Value = new Token(new Range(0, 0), "now", TokenKind.Value) //DateTime.Now.ToString()
- // }
- // };
-
- // if (NextTokenExists())
- // {
- // root.Connector = new Token(new Range(0, 0), "", TokenKind.And);
- // root.Right = ParseStart();
- // return root;
- // }
- // else
- // {
- // return root.Left;
- // }
- // }
-
- // else // TokenKinde == Time
- // {
- // AstNodeConnector root = new AstNodeConnector
- // {
- // Left = new AstNodeFilterDateTimeRange()
- // {
- // Name = CheckToken(TokenKind.Time),
- // Operator = ParseOperator(),
- // Value = CheckToken(TokenKind.Value)
- // }
- // };
-
- // if (NextTokenExists() && GetNextToken().Kind == TokenKind.And)
- // {
- // root.Connector = CheckToken(TokenKind.And);
- // root.Right = ParseStart();
- // return root;
- // }
-
- // else
- // {
- // return root.Left;
- // }
- // }
- // }
-
- // private AstNode ParseStart()
- // {
- // if (GetNextToken().Kind == TokenKind.Value)
- // {
- // return new AstNodeFilterString
- // {
- // Name = new Token(new Range(0, 0), "", TokenKind.Value),
- // Operator = new Token(new Range(0, 0), "", TokenKind.EQ),
- // Value = CheckToken(TokenKind.Value)
- // };
- // }
- // }
+ else
+ {
+ return ParseOr();
+ }
+ }
private AstNode ParseOr()
{
@@ -237,7 +150,7 @@ TokenKind.Time or TokenKind.LastHit
TokenKind.ReportType
=> new AstNodeFilterReportType() { Name = Name, Operator = Operator, Value = Value },
- TokenKind.DestinationPort or TokenKind.RecertDisplay
+ TokenKind.DestinationPort or TokenKind.RecertDisplay or TokenKind.Unused
=> new AstNodeFilterInt() { Name = Name, Operator = Operator, Value = Value },
TokenKind.Source or TokenKind.Destination
@@ -257,7 +170,7 @@ private Token ParseFilterName()
return CheckToken(
TokenKind.LastHit, TokenKind.Owner, TokenKind.Destination, TokenKind.Source, TokenKind.Service, TokenKind.Protocol,
TokenKind.DestinationPort, TokenKind.Action, TokenKind.FullText, TokenKind.Gateway,
- TokenKind.Management, TokenKind.Remove, TokenKind.RecertDisplay, TokenKind.Disabled);
+ TokenKind.Management, TokenKind.Remove, TokenKind.RecertDisplay, TokenKind.Disabled, TokenKind.Unused);
}
private Token CheckToken(params TokenKind[] expectedTokenKinds)
diff --git a/roles/lib/files/FWO.Report.Filter/TokenKind.cs b/roles/lib/files/FWO.Report.Filter/TokenKind.cs
index 5c7b67fa2..1ac4c2b2c 100644
--- a/roles/lib/files/FWO.Report.Filter/TokenKind.cs
+++ b/roles/lib/files/FWO.Report.Filter/TokenKind.cs
@@ -22,6 +22,7 @@ public enum TokenKind
RecertDisplay,
FullText,
LastHit,
+ Unused,
BL, // (
BR, // )
And,
diff --git a/roles/lib/files/FWO.Report.Filter/TokenSyntax.cs b/roles/lib/files/FWO.Report.Filter/TokenSyntax.cs
index a5bc3fa14..0350b6620 100644
--- a/roles/lib/files/FWO.Report.Filter/TokenSyntax.cs
+++ b/roles/lib/files/FWO.Report.Filter/TokenSyntax.cs
@@ -57,6 +57,12 @@ public static TokenSyntax Get(TokenKind tokenKind)
NoWhiteSpaceRequiered: new string[] { }
),
+ TokenKind.Unused => new TokenSyntax
+ (
+ WhiteSpaceRequiered: new string[] { "not-used-for-days", "unused", "unused-days", "not-used" },
+ NoWhiteSpaceRequiered: new string[] { }
+ ),
+
TokenKind.Source => new TokenSyntax
(
WhiteSpaceRequiered: new string[] { "source", "src" },
diff --git a/roles/lib/files/FWO.Report/Display/RuleDisplayHtml.cs b/roles/lib/files/FWO.Report/Display/RuleDisplayHtml.cs
index 2a9f1b701..d3151622e 100644
--- a/roles/lib/files/FWO.Report/Display/RuleDisplayHtml.cs
+++ b/roles/lib/files/FWO.Report/Display/RuleDisplayHtml.cs
@@ -94,7 +94,7 @@ public string DisplayLastRecertifier(Rule rule)
protected string constructLink(string type, string symbol, long id, string name, OutputLocation location, int mgmtId, string style)
{
- string link = location == OutputLocation.export ? $"#" : $"{location.ToString()}#goto-report-m{mgmtId}-";
+ string link = location == OutputLocation.export ? $"#" : $"{location.ToString()}/generation#goto-report-m{mgmtId}-";
return $"{name}";
}
diff --git a/roles/lib/files/FWO.Report/ReportBase.cs b/roles/lib/files/FWO.Report/ReportBase.cs
index 7bc4963d7..2f4c839dd 100644
--- a/roles/lib/files/FWO.Report/ReportBase.cs
+++ b/roles/lib/files/FWO.Report/ReportBase.cs
@@ -228,6 +228,7 @@ public static ReportBase ConstructReport(ReportTemplate reportFilter, UserConfig
ReportType.ResolvedChangesTech => new ReportChanges(query, userConfig, repType),
ReportType.NatRules => new ReportNatRules(query, userConfig, repType),
ReportType.Recertification => new ReportRules(query, userConfig, repType),
+ ReportType.UnusedRules => new ReportRules(query, userConfig, repType),
_ => throw new NotSupportedException("Report Type is not supported."),
};
}
diff --git a/roles/lib/files/FWO.Report/ReportRules.cs b/roles/lib/files/FWO.Report/ReportRules.cs
index d8a307f30..2f929d003 100644
--- a/roles/lib/files/FWO.Report/ReportRules.cs
+++ b/roles/lib/files/FWO.Report/ReportRules.cs
@@ -15,47 +15,23 @@ public class ReportRules : ReportBase
{
public ReportRules(DynGraphqlQuery query, UserConfig userConfig, ReportType reportType) : base(query, userConfig, reportType) { }
- public bool GotReportedRuleIds { get; protected set; } = false;
private const int ColumnCount = 12;
- public async Task GetReportedRuleIds(ApiConnection apiConnection)
- {
- List relevantDevIds = DeviceFilter.ExtractSelectedDevIds(Managements);
- if (relevantDevIds.Count == 0)
- relevantDevIds = DeviceFilter.ExtractAllDevIds(Managements);
-
- for (int i = 0; i < Managements.Length; i++)
- {
- Dictionary ruleQueryVariables = new Dictionary();
- if (Managements[i].Import.ImportAggregate.ImportAggregateMax.RelevantImportId != null)
- {
- ruleQueryVariables["importId"] = Managements[i].Import.ImportAggregate.ImportAggregateMax.RelevantImportId!;
- ruleQueryVariables["devIds"] = relevantDevIds;
- Rule[] rules = await apiConnection.SendQueryAsync(RuleQueries.getRuleIdsOfImport, ruleQueryVariables);
- Managements[i].ReportedRuleIds = rules.Select(x => x.Id).Distinct().ToList();
- }
- }
- GotReportedRuleIds = true;
- }
public override async Task GetObjectsInReport(int objectsPerFetch, ApiConnection apiConnection, Func callback) // to be called when exporting
{
- // get rule ids per import (= management)
- if (!GotReportedRuleIds)
- await GetReportedRuleIds(apiConnection);
-
bool gotAllObjects = true; //whether the fetch count limit was reached during fetching
if (!GotObjectsInReport)
{
- for (int i = 0; i < Managements.Length; i++)
+ foreach (Management management in Managements)
{
- if (Managements[i].Import.ImportAggregate.ImportAggregateMax.RelevantImportId is not null)
+ if (management.Import.ImportAggregate.ImportAggregateMax.RelevantImportId is not null)
{
// set query variables for object query
var objQueryVariables = new Dictionary
{
- { "mgmIds", Managements[i].Id },
+ { "mgmIds", management.Id },
{ "limit", objectsPerFetch },
{ "offset", 0 },
};
@@ -78,9 +54,6 @@ public override async Task GetObjectsForManagementInReport(Dictionary m.Id == mid) ?? throw new ArgumentException("Given management id does not exist for this report");
- if (!GotReportedRuleIds)
- await GetReportedRuleIds(apiConnection);
-
objQueryVariables.Add("ruleIds", "{" + string.Join(", ", management.ReportedRuleIds) + "}");
objQueryVariables.Add("importId", management.Import.ImportAggregate.ImportAggregateMax.RelevantImportId!);
@@ -170,6 +143,7 @@ public override async Task Generate(int rulesPerFetch, ApiConnection apiConnecti
}
await callback(Managements);
}
+ SetReportedRuleIds();
}
public override string SetDescription()
@@ -190,6 +164,21 @@ public override string SetDescription()
return $"{managementCounter} {userConfig.GetText("managements")}, {deviceCounter} {userConfig.GetText("gateways")}, {ruleCounter} {userConfig.GetText("rules")}";
}
+ private void SetReportedRuleIds()
+ {
+ foreach (Management mgt in Managements)
+ {
+ foreach (Device dev in mgt.Devices.Where(d => (d.Rules != null && d.Rules.Length > 0)))
+ {
+ foreach (Rule rule in dev.Rules)
+ {
+ mgt.ReportedRuleIds.Add(rule.Id);
+ }
+ }
+ mgt.ReportedRuleIds = mgt.ReportedRuleIds.Distinct().ToList();
+ }
+ }
+
public override string ExportToCsv()
{
if (ReportType.IsResolvedReport())
@@ -367,6 +356,10 @@ private void appendRuleHeadlineHtml(ref StringBuilder report)
report.AppendLine($"