From 384b04338e2b2bbb862ad2bc0c47f198325acdb9 Mon Sep 17 00:00:00 2001 From: Elizabeth Thompson Date: Fri, 31 May 2024 11:48:09 -0700 Subject: [PATCH] use channel id with new slack api for file uploads --- superset/reports/notifications/slack.py | 84 ++++--- superset/tasks/slack_util.py | 62 ----- .../reports/commands_tests.py | 225 ++++++++++++------ 3 files changed, 207 insertions(+), 164 deletions(-) delete mode 100644 superset/tasks/slack_util.py diff --git a/superset/reports/notifications/slack.py b/superset/reports/notifications/slack.py index 9ecf8de6d5ef8..5aada81b3da3f 100644 --- a/superset/reports/notifications/slack.py +++ b/superset/reports/notifications/slack.py @@ -17,12 +17,13 @@ import logging from collections.abc import Sequence from io import IOBase -from typing import Union +from typing import List, Union import backoff import pandas as pd from flask import g from flask_babel import gettext as __ +from slack_sdk import WebClient from slack_sdk.errors import ( BotUserAccessError, SlackApiError, @@ -60,16 +61,24 @@ class SlackNotification(BaseNotification): # pylint: disable=too-few-public-met type = ReportRecipientType.SLACK - def _get_channel(self) -> str: + def _get_channels(self, client: WebClient) -> List[str]: """ Get the recipient's channel(s). - Note Slack SDK uses "channel" to refer to one or more - channels. Multiple channels are demarcated by a comma. - :returns: The comma separated list of channel(s) + :returns: A list of channel ids: "EID676L" """ recipient_str = json.loads(self._recipient.recipient_config_json)["target"] - return ",".join(get_email_address_list(recipient_str)) + channel_recipients = get_email_address_list(recipient_str) + + conversations_list_response = client.conversations_list( + types="public_channel,private_channel" + ) + + return [ + c["id"] + for c in conversations_list_response["channels"] + if c["name"] in channel_recipients + ] def _message_template(self, table: str = "") -> str: return __( @@ -115,15 +124,19 @@ def _get_body(self) -> str: # Flatten columns/index so they show up nicely in the table df.columns = [ - " ".join(str(name) for name in column).strip() - if isinstance(column, tuple) - else column + ( + " ".join(str(name) for name in column).strip() + if isinstance(column, tuple) + else column + ) for column in df.columns ] df.index = [ - " ".join(str(name) for name in index).strip() - if isinstance(index, tuple) - else index + ( + " ".join(str(name) for name in index).strip() + if isinstance(index, tuple) + else index + ) for index in df.index ] @@ -162,37 +175,44 @@ def _get_body(self) -> str: def _get_inline_files( self, - ) -> tuple[Union[str, None], Sequence[Union[str, IOBase, bytes]]]: + ) -> Sequence[Union[str, IOBase, bytes]]: if self._content.csv: - return ("csv", [self._content.csv]) + return [self._content.csv] if self._content.screenshots: - return ("png", self._content.screenshots) + return self._content.screenshots if self._content.pdf: - return ("pdf", [self._content.pdf]) - return (None, []) + return [self._content.pdf] + return [] @backoff.on_exception(backoff.expo, SlackApiError, factor=10, base=2, max_tries=5) @statsd_gauge("reports.slack.send") def send(self) -> None: - file_type, files = self._get_inline_files() - title = self._content.name - channel = self._get_channel() - body = self._get_body() global_logs_context = getattr(g, "logs_context", {}) or {} try: client = get_slack_client() + channels = self._get_channels(client) + + if channels == []: + raise NotificationParamException("No valid channel found") + + files = self._get_inline_files() + title = self._content.name + + body = self._get_body() + # files_upload returns SlackResponse as we run it in sync mode. - if files: - for file in files: - client.files_upload_v2( - channels=channel, - file=file, - initial_comment=body, - title=title, - filetype=file_type, - ) - else: - client.chat_postMessage(channel=channel, text=body) + for channel in channels: + if len(files): + for file in files: + client.files_upload_v2( + channel=channel, + file=file, + initial_comment=body, + title=title, + ) + else: + client.chat_postMessage(channel=channel, text=body) + logger.info( "Report sent to slack", extra={ diff --git a/superset/tasks/slack_util.py b/superset/tasks/slack_util.py deleted file mode 100644 index 5226f50dc69eb..0000000000000 --- a/superset/tasks/slack_util.py +++ /dev/null @@ -1,62 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -""" -DEPRECATION NOTICE: this module is deprecated and will be removed on 2.0. -""" - -import logging -from io import IOBase -from typing import cast, Optional, Union - -import backoff -from flask import current_app -from slack_sdk import WebClient -from slack_sdk.errors import SlackApiError -from slack_sdk.web.slack_response import SlackResponse - -# Globals -logger = logging.getLogger("tasks.slack_util") - - -@backoff.on_exception(backoff.expo, SlackApiError, factor=10, base=2, max_tries=5) -def deliver_slack_msg( - slack_channel: str, - subject: str, - body: str, - file: Optional[Union[str, IOBase, bytes]], -) -> None: - config = current_app.config - token = config["SLACK_API_TOKEN"] - if callable(token): - token = token() - client = WebClient(token=token, proxy=config["SLACK_PROXY"]) - # files_upload returns SlackResponse as we run it in sync mode. - if file: - response = cast( - SlackResponse, - client.files_upload_v2( - channels=slack_channel, file=file, initial_comment=body, title=subject - ), - ) - assert response["file"], str(response) # the uploaded file - else: - response = cast( - SlackResponse, - client.chat_postMessage(channel=slack_channel, text=body), - ) - assert response["message"]["text"], str(response) - logger.info("Sent the report to the slack %s", slack_channel) diff --git a/tests/integration_tests/reports/commands_tests.py b/tests/integration_tests/reports/commands_tests.py index e57912759a23e..65a73b0cbd4a9 100644 --- a/tests/integration_tests/reports/commands_tests.py +++ b/tests/integration_tests/reports/commands_tests.py @@ -624,7 +624,7 @@ def test_email_chart_report_schedule( with freeze_time("2020-01-01T00:00:00Z"): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_email_chart.id, datetime.utcnow() + TEST_ID, create_report_email_chart.id, datetime.now(timezone.utc) ).run() notification_targets = get_target_from_report_schedule( @@ -676,7 +676,9 @@ def _screenshot_side_effect(user: User) -> Optional[bytes]: with freeze_time("2020-01-01T00:00:00Z"): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_email_chart_alpha_owner.id, datetime.utcnow() + TEST_ID, + create_report_email_chart_alpha_owner.id, + datetime.now(timezone.utc), ).run() notification_targets = get_target_from_report_schedule( @@ -724,7 +726,9 @@ def test_email_chart_report_schedule_force_screenshot( with freeze_time("2020-01-01T00:00:00Z"): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_email_chart_force_screenshot.id, datetime.utcnow() + TEST_ID, + create_report_email_chart_force_screenshot.id, + datetime.now(timezone.utc), ).run() notification_targets = get_target_from_report_schedule( @@ -763,7 +767,7 @@ def test_email_chart_alert_schedule( with freeze_time("2020-01-01T00:00:00Z"): AsyncExecuteReportScheduleCommand( - TEST_ID, create_alert_email_chart.id, datetime.utcnow() + TEST_ID, create_alert_email_chart.id, datetime.now(timezone.utc) ).run() notification_targets = get_target_from_report_schedule(create_alert_email_chart) @@ -800,7 +804,7 @@ def test_email_chart_report_dry_run( app.config["ALERT_REPORTS_NOTIFICATION_DRY_RUN"] = True with freeze_time("2020-01-01T00:00:00Z"): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_email_chart.id, datetime.utcnow() + TEST_ID, create_report_email_chart.id, datetime.now(timezone.utc) ).run() email_mock.assert_not_called() @@ -834,7 +838,7 @@ def test_email_chart_report_schedule_with_csv( with freeze_time("2020-01-01T00:00:00Z"): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_email_chart_with_csv.id, datetime.utcnow() + TEST_ID, create_report_email_chart_with_csv.id, datetime.now(timezone.utc) ).run() notification_targets = get_target_from_report_schedule( @@ -889,7 +893,7 @@ def test_email_chart_report_schedule_with_csv_no_query_context( AsyncExecuteReportScheduleCommand( TEST_ID, create_report_email_chart_with_csv_no_query_context.id, - datetime.utcnow(), + datetime.now(timezone.utc), ).run() # verify that when query context is null we request a screenshot @@ -940,7 +944,7 @@ def test_email_chart_report_schedule_with_text( with freeze_time("2020-01-01T00:00:00Z"): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_email_chart_with_text.id, datetime.utcnow() + TEST_ID, create_report_email_chart_with_text.id, datetime.now(timezone.utc) ).run() # assert that the data is embedded correctly @@ -995,7 +999,7 @@ def test_email_chart_report_schedule_with_text( with freeze_time("2020-01-01T00:00:00Z"): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_email_chart_with_text.id, datetime.utcnow() + TEST_ID, create_report_email_chart_with_text.id, datetime.now(timezone.utc) ).run() # assert that the data is embedded correctly @@ -1044,7 +1048,7 @@ def test_email_dashboard_report_schedule( with freeze_time("2020-01-01T00:00:00Z"): with patch.object(current_app.config["STATS_LOGGER"], "gauge") as statsd_mock: AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_email_dashboard.id, datetime.utcnow() + TEST_ID, create_report_email_dashboard.id, datetime.now(timezone.utc) ).run() notification_targets = get_target_from_report_schedule( @@ -1079,7 +1083,7 @@ def test_email_dashboard_report_schedule_force_screenshot( AsyncExecuteReportScheduleCommand( TEST_ID, create_report_email_dashboard_force_screenshot.id, - datetime.utcnow(), + datetime.now(timezone.utc), ).run() notification_targets = get_target_from_report_schedule( @@ -1098,11 +1102,11 @@ def test_email_dashboard_report_schedule_force_screenshot( @pytest.mark.usefixtures( "load_birth_names_dashboard_with_slices", "create_report_slack_chart" ) -@patch("superset.utils.slack.WebClient.files_upload_v2") +@patch("superset.reports.notifications.slack.get_slack_client") @patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") def test_slack_chart_report_schedule( screenshot_mock, - file_upload_mock, + slack_client_mock, create_report_slack_chart, ): """ @@ -1110,19 +1114,28 @@ def test_slack_chart_report_schedule( """ # setup screenshot mock screenshot_mock.return_value = SCREENSHOT_FILE + notification_targets = get_target_from_report_schedule(create_report_slack_chart) + + channel_name = notification_targets[0] + channel_id = "channel_id" + slack_client_mock.return_value.conversations_list.return_value = { + "channels": [{"id": channel_id, "name": channel_name}] + } with freeze_time("2020-01-01T00:00:00Z"): with patch.object(current_app.config["STATS_LOGGER"], "gauge") as statsd_mock: AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_slack_chart.id, datetime.utcnow() + TEST_ID, create_report_slack_chart.id, datetime.now(timezone.utc) ).run() - notification_targets = get_target_from_report_schedule( - create_report_slack_chart + assert ( + slack_client_mock.return_value.files_upload_v2.call_args[1]["channel"] + == channel_id + ) + assert ( + slack_client_mock.return_value.files_upload_v2.call_args[1]["file"] + == SCREENSHOT_FILE ) - - assert file_upload_mock.call_args[1]["channels"] == notification_targets[0] - assert file_upload_mock.call_args[1]["file"] == SCREENSHOT_FILE # Assert logs are correct assert_log(ReportState.SUCCESS) @@ -1162,7 +1175,7 @@ def test_slack_chart_report_schedule_with_errors( with pytest.raises(ReportScheduleClientErrorsException): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_slack_chart.id, datetime.utcnow() + TEST_ID, create_report_slack_chart.id, datetime.now(timezone.utc) ).run() db.session.commit() @@ -1186,7 +1199,7 @@ def test_slack_chart_report_schedule_with_errors( @pytest.mark.usefixtures( "load_birth_names_dashboard_with_slices", "create_report_slack_chart_with_csv" ) -@patch("superset.utils.slack.WebClient.files_upload_v2") +@patch("superset.reports.notifications.slack.get_slack_client") @patch("superset.utils.csv.urllib.request.urlopen") @patch("superset.utils.csv.urllib.request.OpenerDirector.open") @patch("superset.utils.csv.get_chart_csv_data") @@ -1194,7 +1207,7 @@ def test_slack_chart_report_schedule_with_csv( csv_mock, mock_open, mock_urlopen, - file_upload_mock, + slack_client_mock_class, create_report_slack_chart_with_csv, ): """ @@ -1207,16 +1220,30 @@ def test_slack_chart_report_schedule_with_csv( mock_urlopen.return_value.getcode.return_value = 200 response.read.return_value = CSV_FILE + notification_targets = get_target_from_report_schedule( + create_report_slack_chart_with_csv + ) + + channel_name = notification_targets[0] + channel_id = "channel_id" + slack_client_mock_class.return_value = Mock() + slack_client_mock_class.return_value.conversations_list.return_value = { + "channels": [{"id": channel_id, "name": channel_name}] + } + with freeze_time("2020-01-01T00:00:00Z"): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_slack_chart_with_csv.id, datetime.utcnow() + TEST_ID, create_report_slack_chart_with_csv.id, datetime.now(timezone.utc) ).run() - notification_targets = get_target_from_report_schedule( - create_report_slack_chart_with_csv + assert ( + slack_client_mock_class.return_value.files_upload_v2.call_args[1]["channel"] + == channel_id + ) + assert ( + slack_client_mock_class.return_value.files_upload_v2.call_args[1]["file"] + == CSV_FILE ) - assert file_upload_mock.call_args[1]["channels"] == notification_targets[0] - assert file_upload_mock.call_args[1]["file"] == CSV_FILE # Assert logs are correct assert_log(ReportState.SUCCESS) @@ -1225,15 +1252,15 @@ def test_slack_chart_report_schedule_with_csv( @pytest.mark.usefixtures( "load_birth_names_dashboard_with_slices", "create_report_slack_chart_with_text" ) -@patch("superset.utils.slack.WebClient.chat_postMessage") @patch("superset.utils.csv.urllib.request.urlopen") @patch("superset.utils.csv.urllib.request.OpenerDirector.open") +@patch("superset.reports.notifications.slack.get_slack_client") @patch("superset.utils.csv.get_chart_dataframe") def test_slack_chart_report_schedule_with_text( dataframe_mock, + slack_client_mock_class, mock_open, mock_urlopen, - post_message_mock, create_report_slack_chart_with_text, ): """ @@ -1255,24 +1282,43 @@ def test_slack_chart_report_schedule_with_text( }, "colnames": [("t1",), ("t2",), ("t3__sum",)], "indexnames": [(0,), (1,)], + "coltypes": [1, 1, 0], }, ], } ).encode("utf-8") + notification_targets = get_target_from_report_schedule( + create_report_slack_chart_with_text + ) + + channel_name = notification_targets[0] + channel_id = "channel_id" + + slack_client_mock_class.return_value.conversations_list.return_value = { + "channels": [{"id": channel_id, "name": channel_name}] + } + with freeze_time("2020-01-01T00:00:00Z"): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_slack_chart_with_text.id, datetime.utcnow() + TEST_ID, create_report_slack_chart_with_text.id, datetime.now(timezone.utc) ).run() table_markdown = """| | t1 | t2 | t3__sum | |---:|:-----|:-----|:----------| | 0 | c11 | c12 | c13 | | 1 | c21 | c22 | c23 |""" - assert table_markdown in post_message_mock.call_args[1]["text"] + assert ( + table_markdown + in slack_client_mock_class.return_value.chat_postMessage.call_args[1][ + "text" + ] + ) assert ( f"" - in post_message_mock.call_args[1]["text"] + in slack_client_mock_class.return_value.chat_postMessage.call_args[1][ + "text" + ] ) # Assert logs are correct @@ -1286,7 +1332,9 @@ def test_report_schedule_not_found(create_report_slack_chart): """ max_id = db.session.query(func.max(ReportSchedule.id)).scalar() with pytest.raises(ReportScheduleNotFoundError): - AsyncExecuteReportScheduleCommand(TEST_ID, max_id + 1, datetime.utcnow()).run() + AsyncExecuteReportScheduleCommand( + TEST_ID, max_id + 1, datetime.now(timezone.utc) + ).run() @pytest.mark.usefixtures("create_report_slack_chart_working") @@ -1298,7 +1346,9 @@ def test_report_schedule_working(create_report_slack_chart_working): with freeze_time("2020-01-01T00:00:00Z"): with pytest.raises(ReportSchedulePreviousWorkingError): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_slack_chart_working.id, datetime.utcnow() + TEST_ID, + create_report_slack_chart_working.id, + datetime.now(timezone.utc), ).run() assert_log( @@ -1319,7 +1369,9 @@ def test_report_schedule_working_timeout(create_report_slack_chart_working): with freeze_time(current_time): with pytest.raises(ReportScheduleWorkingTimeoutError): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_slack_chart_working.id, datetime.utcnow() + TEST_ID, + create_report_slack_chart_working.id, + datetime.now(timezone.utc), ).run() # Only needed for MySQL, understand why @@ -1345,7 +1397,7 @@ def test_report_schedule_success_grace(create_alert_slack_chart_success): with freeze_time(current_time): AsyncExecuteReportScheduleCommand( - TEST_ID, create_alert_slack_chart_success.id, datetime.utcnow() + TEST_ID, create_alert_slack_chart_success.id, datetime.now(timezone.utc) ).run() db.session.commit() @@ -1353,10 +1405,14 @@ def test_report_schedule_success_grace(create_alert_slack_chart_success): @pytest.mark.usefixtures("create_alert_slack_chart_grace") -@patch("superset.utils.slack.WebClient.files_upload_v2") +@patch("superset.utils.slack.WebClient.files_upload") @patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +@patch("superset.reports.notifications.slack.get_slack_client") def test_report_schedule_success_grace_end( - screenshot_mock, file_upload_mock, create_alert_slack_chart_grace + slack_client_mock_class, + screenshot_mock, + file_upload_mock, + create_alert_slack_chart_grace, ): """ ExecuteReport Command: Test report schedule on grace to noop @@ -1369,9 +1425,20 @@ def test_report_schedule_success_grace_end( seconds=create_alert_slack_chart_grace.grace_period + 1 ) + notification_targets = get_target_from_report_schedule( + create_alert_slack_chart_grace + ) + + channel_name = notification_targets[0] + channel_id = "channel_id" + + slack_client_mock_class.return_value.conversations_list.return_value = { + "channels": [{"id": channel_id, "name": channel_name}] + } + with freeze_time(current_time): AsyncExecuteReportScheduleCommand( - TEST_ID, create_alert_slack_chart_grace.id, datetime.utcnow() + TEST_ID, create_alert_slack_chart_grace.id, datetime.now(timezone.utc) ).run() db.session.commit() @@ -1400,7 +1467,7 @@ def test_alert_limit_is_applied( return_value=None, ): # noqa: F841 AsyncExecuteReportScheduleCommand( - TEST_ID, create_alert_email_chart.id, datetime.utcnow() + TEST_ID, create_alert_email_chart.id, datetime.now(timezone.utc) ).run() assert "LIMIT 2" in execute_mock.call_args[0][1] @@ -1424,7 +1491,7 @@ def test_email_dashboard_report_fails( with pytest.raises(ReportScheduleSystemErrorsException): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_email_dashboard.id, datetime.utcnow() + TEST_ID, create_report_email_dashboard.id, datetime.now(timezone.utc) ).run() assert_log(ReportState.ERROR, error_message="Could not connect to SMTP XPTO") @@ -1450,7 +1517,7 @@ def test_email_dashboard_report_fails_uncaught_exception( with pytest.raises(Exception): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_email_dashboard.id, datetime.utcnow() + TEST_ID, create_report_email_dashboard.id, datetime.now(timezone.utc) ).run() assert_log(ReportState.ERROR, error_message="Uncaught exception") @@ -1478,7 +1545,7 @@ def test_slack_chart_alert( with freeze_time("2020-01-01T00:00:00Z"): AsyncExecuteReportScheduleCommand( - TEST_ID, create_alert_email_chart.id, datetime.utcnow() + TEST_ID, create_alert_email_chart.id, datetime.now(timezone.utc) ).run() notification_targets = get_target_from_report_schedule(create_alert_email_chart) @@ -1507,7 +1574,7 @@ def test_slack_chart_alert_no_attachment(email_mock, create_alert_email_chart): with freeze_time("2020-01-01T00:00:00Z"): AsyncExecuteReportScheduleCommand( - TEST_ID, create_alert_email_chart.id, datetime.utcnow() + TEST_ID, create_alert_email_chart.id, datetime.now(timezone.utc) ).run() notification_targets = get_target_from_report_schedule(create_alert_email_chart) @@ -1533,16 +1600,24 @@ def test_slack_token_callable_chart_report( """ ExecuteReport Command: Test chart slack alert (slack token callable) """ + notification_targets = get_target_from_report_schedule(create_report_slack_chart) + + channel_name = notification_targets[0] + channel_id = "channel_id" slack_client_mock_class.return_value = Mock() + slack_client_mock_class.return_value.conversations_list.return_value = { + "channels": [{"id": channel_id, "name": channel_name}] + } + app.config["SLACK_API_TOKEN"] = Mock(return_value="cool_code") # setup screenshot mock screenshot_mock.return_value = SCREENSHOT_FILE with freeze_time("2020-01-01T00:00:00Z"): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_slack_chart.id, datetime.utcnow() + TEST_ID, create_report_slack_chart.id, datetime.now(timezone.utc) ).run() - app.config["SLACK_API_TOKEN"].assert_called_once() + app.config["SLACK_API_TOKEN"].assert_called() assert slack_client_mock_class.called_with(token="cool_code", proxy="") assert_log(ReportState.SUCCESS) @@ -1554,7 +1629,7 @@ def test_email_chart_no_alert(create_no_alert_email_chart): """ with freeze_time("2020-01-01T00:00:00Z"): AsyncExecuteReportScheduleCommand( - TEST_ID, create_no_alert_email_chart.id, datetime.utcnow() + TEST_ID, create_no_alert_email_chart.id, datetime.now(timezone.utc) ).run() assert_log(ReportState.NOOP) @@ -1569,7 +1644,7 @@ def test_email_mul_alert(create_mul_alert_email_chart): (AlertQueryMultipleRowsError, AlertQueryMultipleColumnsError) ): AsyncExecuteReportScheduleCommand( - TEST_ID, create_mul_alert_email_chart.id, datetime.utcnow() + TEST_ID, create_mul_alert_email_chart.id, datetime.now(timezone.utc) ).run() @@ -1591,7 +1666,7 @@ def test_soft_timeout_alert(email_mock, create_alert_email_chart): execute_mock.side_effect = SoftTimeLimitExceeded() with pytest.raises(AlertQueryTimeout): AsyncExecuteReportScheduleCommand( - TEST_ID, create_alert_email_chart.id, datetime.utcnow() + TEST_ID, create_alert_email_chart.id, datetime.now(timezone.utc) ).run() get_target_from_report_schedule(create_alert_email_chart) # noqa: F841 @@ -1621,7 +1696,7 @@ def test_soft_timeout_screenshot(screenshot_mock, email_mock, create_alert_email screenshot_mock.side_effect = SoftTimeLimitExceeded() with pytest.raises(ReportScheduleScreenshotTimeout): AsyncExecuteReportScheduleCommand( - TEST_ID, create_alert_email_chart.id, datetime.utcnow() + TEST_ID, create_alert_email_chart.id, datetime.now(timezone.utc) ).run() # Assert the email smtp address, asserts a notification was sent with the error @@ -1658,12 +1733,10 @@ def test_soft_timeout_csv( with pytest.raises(ReportScheduleCsvTimeout): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_email_chart_with_csv.id, datetime.utcnow() + TEST_ID, create_report_email_chart_with_csv.id, datetime.now(timezone.utc) ).run() - get_target_from_report_schedule( # noqa: F841 - create_report_email_chart_with_csv - ) + get_target_from_report_schedule(create_report_email_chart_with_csv) # noqa: F841 # Assert the email smtp address, asserts a notification was sent with the error assert email_mock.call_args[0][0] == DEFAULT_OWNER_EMAIL @@ -1698,12 +1771,10 @@ def test_generate_no_csv( with pytest.raises(ReportScheduleCsvFailedError): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_email_chart_with_csv.id, datetime.utcnow() + TEST_ID, create_report_email_chart_with_csv.id, datetime.now(timezone.utc) ).run() - get_target_from_report_schedule( # noqa: F841 - create_report_email_chart_with_csv - ) + get_target_from_report_schedule(create_report_email_chart_with_csv) # noqa: F841 # Assert the email smtp address, asserts a notification was sent with the error assert email_mock.call_args[0][0] == DEFAULT_OWNER_EMAIL @@ -1729,7 +1800,7 @@ def test_fail_screenshot(screenshot_mock, email_mock, create_report_email_chart) screenshot_mock.side_effect = Exception("Unexpected error") with pytest.raises(ReportScheduleScreenshotFailedError): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_email_chart.id, datetime.utcnow() + TEST_ID, create_report_email_chart.id, datetime.now(timezone.utc) ).run() get_target_from_report_schedule(create_report_email_chart) # noqa: F841 @@ -1762,7 +1833,7 @@ def test_fail_csv( with pytest.raises(ReportScheduleCsvFailedError): AsyncExecuteReportScheduleCommand( - TEST_ID, create_report_email_chart_with_csv.id, datetime.utcnow() + TEST_ID, create_report_email_chart_with_csv.id, datetime.now(timezone.utc) ).run() get_target_from_report_schedule(create_report_email_chart_with_csv) @@ -1788,7 +1859,7 @@ def test_email_disable_screenshot(email_mock, create_alert_email_chart): """ AsyncExecuteReportScheduleCommand( - TEST_ID, create_alert_email_chart.id, datetime.utcnow() + TEST_ID, create_alert_email_chart.id, datetime.now(timezone.utc) ).run() notification_targets = get_target_from_report_schedule(create_alert_email_chart) @@ -1808,7 +1879,9 @@ def test_invalid_sql_alert(email_mock, create_invalid_sql_alert_email_chart): with freeze_time("2020-01-01T00:00:00Z"): with pytest.raises((AlertQueryError, AlertQueryInvalidTypeError)): AsyncExecuteReportScheduleCommand( - TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + TEST_ID, + create_invalid_sql_alert_email_chart.id, + datetime.now(timezone.utc), ).run() # Assert the email smtp address, asserts a notification was sent with the error @@ -1824,7 +1897,9 @@ def test_grace_period_error(email_mock, create_invalid_sql_alert_email_chart): with freeze_time("2020-01-01T00:00:00Z"): with pytest.raises((AlertQueryError, AlertQueryInvalidTypeError)): AsyncExecuteReportScheduleCommand( - TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + TEST_ID, + create_invalid_sql_alert_email_chart.id, + datetime.now(timezone.utc), ).run() # Only needed for MySQL, understand why @@ -1839,7 +1914,9 @@ def test_grace_period_error(email_mock, create_invalid_sql_alert_email_chart): with freeze_time("2020-01-01T00:30:00Z"): with pytest.raises((AlertQueryError, AlertQueryInvalidTypeError)): AsyncExecuteReportScheduleCommand( - TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + TEST_ID, + create_invalid_sql_alert_email_chart.id, + datetime.now(timezone.utc), ).run() db.session.commit() assert ( @@ -1850,7 +1927,9 @@ def test_grace_period_error(email_mock, create_invalid_sql_alert_email_chart): with freeze_time("2020-01-01T01:30:00Z"): with pytest.raises((AlertQueryError, AlertQueryInvalidTypeError)): AsyncExecuteReportScheduleCommand( - TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + TEST_ID, + create_invalid_sql_alert_email_chart.id, + datetime.now(timezone.utc), ).run() db.session.commit() assert ( @@ -1871,7 +1950,9 @@ def test_grace_period_error_flap( with freeze_time("2020-01-01T00:00:00Z"): with pytest.raises((AlertQueryError, AlertQueryInvalidTypeError)): AsyncExecuteReportScheduleCommand( - TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + TEST_ID, + create_invalid_sql_alert_email_chart.id, + datetime.now(timezone.utc), ).run() db.session.commit() # Assert we have 1 notification sent on the log @@ -1882,7 +1963,9 @@ def test_grace_period_error_flap( with freeze_time("2020-01-01T00:30:00Z"): with pytest.raises((AlertQueryError, AlertQueryInvalidTypeError)): AsyncExecuteReportScheduleCommand( - TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + TEST_ID, + create_invalid_sql_alert_email_chart.id, + datetime.now(timezone.utc), ).run() db.session.commit() assert ( @@ -1897,11 +1980,11 @@ def test_grace_period_error_flap( with freeze_time("2020-01-01T00:31:00Z"): # One success AsyncExecuteReportScheduleCommand( - TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.now(timezone.utc) ).run() # Grace period ends AsyncExecuteReportScheduleCommand( - TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.now(timezone.utc) ).run() db.session.commit() @@ -1915,7 +1998,9 @@ def test_grace_period_error_flap( with freeze_time("2020-01-01T00:32:00Z"): with pytest.raises((AlertQueryError, AlertQueryInvalidTypeError)): AsyncExecuteReportScheduleCommand( - TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + TEST_ID, + create_invalid_sql_alert_email_chart.id, + datetime.now(timezone.utc), ).run() db.session.commit() assert (