This is an automated email from the ASF dual-hosted git repository.
taragolis pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new be72d6a258 Resolve `PT012` in `common.sql`, `datadog`, `dbt`, and
`jenkins` providers tests (#38429)
be72d6a258 is described below
commit be72d6a258c54779d58c1c6a34b1d5ba3f860323
Author: Andrey Anshin <[email protected]>
AuthorDate: Sun Mar 24 14:52:48 2024 +0400
Resolve `PT012` in `common.sql`, `datadog`, `dbt`, and `jenkins` providers
tests (#38429)
---
pyproject.toml | 4 ---
tests/providers/common/sql/operators/test_sql.py | 44 +++++++++++++-----------
tests/providers/datadog/sensors/test_datadog.py | 22 ++++++------
tests/providers/dbt/cloud/operators/test_dbt.py | 13 +++----
tests/providers/jenkins/sensors/test_jenkins.py | 2 +-
5 files changed, 41 insertions(+), 44 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index ed40e6df1f..43f530e18c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1516,13 +1516,10 @@ combine-as-imports = true
"tests/providers/cncf/kubernetes/operators/test_pod.py" = ["PT012"]
"tests/providers/cncf/kubernetes/utils/test_k8s_resource_iterator.py" =
["PT012"]
"tests/providers/cncf/kubernetes/utils/test_pod_manager.py" = ["PT012"]
-"tests/providers/common/sql/operators/test_sql.py" = ["PT012"]
"tests/providers/databricks/hooks/test_databricks.py" = ["PT012"]
"tests/providers/databricks/operators/test_databricks.py" = ["PT012"]
"tests/providers/databricks/operators/test_databricks_repos.py" = ["PT012"]
"tests/providers/databricks/sensors/test_databricks_partition.py" = ["PT012"]
-"tests/providers/datadog/sensors/test_datadog.py" = ["PT012"]
-"tests/providers/dbt/cloud/operators/test_dbt.py" = ["PT012"]
"tests/providers/google/cloud/hooks/test_bigquery.py" = ["PT012"]
"tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service.py" =
["PT012"]
"tests/providers/google/cloud/hooks/test_dataflow.py" = ["PT012"]
@@ -1546,7 +1543,6 @@ combine-as-imports = true
"tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py" = ["PT012"]
"tests/providers/google/cloud/utils/test_credentials_provider.py" = ["PT012"]
"tests/providers/google/common/hooks/test_base_google.py" = ["PT012"]
-"tests/providers/jenkins/sensors/test_jenkins.py" = ["PT012"]
"tests/providers/microsoft/azure/hooks/test_data_factory.py" = ["PT012"]
"tests/providers/microsoft/azure/hooks/test_wasb.py" = ["PT012"]
"tests/providers/microsoft/psrp/hooks/test_psrp.py" = ["PT012"]
diff --git a/tests/providers/common/sql/operators/test_sql.py
b/tests/providers/common/sql/operators/test_sql.py
index 97ede82079..1ded44f2ec 100644
--- a/tests/providers/common/sql/operators/test_sql.py
+++ b/tests/providers/common/sql/operators/test_sql.py
@@ -186,30 +186,34 @@ class TestColumnCheckOperator:
]
def test_max_less_than_fails_check(self, monkeypatch):
- with pytest.raises(AirflowException):
- records = [
- ("X", "null_check", 1),
- ("X", "distinct_check", 10),
- ("X", "unique_check", 10),
- ("X", "min", 1),
- ("X", "max", 21),
- ]
- operator = self._construct_operator(monkeypatch,
self.valid_column_mapping, records)
+ records = [
+ ("X", "null_check", 1),
+ ("X", "distinct_check", 10),
+ ("X", "unique_check", 10),
+ ("X", "min", 1),
+ ("X", "max", 21),
+ ]
+ operator = self._construct_operator(monkeypatch,
self.valid_column_mapping, records)
+ with pytest.raises(AirflowException, match="Test failed") as err_ctx:
operator.execute(context=MagicMock())
- assert operator.column_mapping["X"]["max"]["success"] is False
+ assert "Check: max" in str(err_ctx.value)
+ assert "{'less_than': 20, 'greater_than': 10, 'result': 21, 'success':
False}" in str(err_ctx.value)
+ assert operator.column_mapping["X"]["max"]["success"] is False
def test_max_greater_than_fails_check(self, monkeypatch):
- with pytest.raises(AirflowException):
- records = [
- ("X", "null_check", 1),
- ("X", "distinct_check", 10),
- ("X", "unique_check", 10),
- ("X", "min", 1),
- ("X", "max", 9),
- ]
- operator = self._construct_operator(monkeypatch,
self.valid_column_mapping, records)
+ records = [
+ ("X", "null_check", 1),
+ ("X", "distinct_check", 10),
+ ("X", "unique_check", 10),
+ ("X", "min", 1),
+ ("X", "max", 9),
+ ]
+ operator = self._construct_operator(monkeypatch,
self.valid_column_mapping, records)
+ with pytest.raises(AirflowException, match="Test failed") as err_ctx:
operator.execute(context=MagicMock())
- assert operator.column_mapping["X"]["max"]["success"] is False
+ assert "Check: max" in str(err_ctx.value)
+ assert "{'less_than': 20, 'greater_than': 10, 'result': 9, 'success':
False}" in str(err_ctx.value)
+ assert operator.column_mapping["X"]["max"]["success"] is False
def test_pass_all_checks_inexact_check(self, monkeypatch):
records = [
diff --git a/tests/providers/datadog/sensors/test_datadog.py
b/tests/providers/datadog/sensors/test_datadog.py
index c608c9790e..86633ecd5c 100644
--- a/tests/providers/datadog/sensors/test_datadog.py
+++ b/tests/providers/datadog/sensors/test_datadog.py
@@ -126,16 +126,16 @@ class TestDatadogSensor:
api1.return_value = zero_events
api2.return_value = {"status": "error"}
+ sensor = DatadogSensor(
+ task_id="test_datadog",
+ datadog_conn_id="datadog_default",
+ from_seconds_ago=0,
+ up_to_seconds_from_now=0,
+ priority=None,
+ sources=None,
+ tags=None,
+ response_check=None,
+ soft_fail=soft_fail,
+ )
with pytest.raises(expected_exception):
- sensor = DatadogSensor(
- task_id="test_datadog",
- datadog_conn_id="datadog_default",
- from_seconds_ago=0,
- up_to_seconds_from_now=0,
- priority=None,
- sources=None,
- tags=None,
- response_check=None,
- soft_fail=soft_fail,
- )
sensor.poke({})
diff --git a/tests/providers/dbt/cloud/operators/test_dbt.py
b/tests/providers/dbt/cloud/operators/test_dbt.py
index 90465602dc..7df954c105 100644
--- a/tests/providers/dbt/cloud/operators/test_dbt.py
+++ b/tests/providers/dbt/cloud/operators/test_dbt.py
@@ -235,19 +235,16 @@ class TestDbtCloudRunJobOperator:
assert mock_run_job.return_value.data["id"] == RUN_ID
elif expected_output == "exception":
# The operator should fail if the job run fails or is
cancelled.
- with pytest.raises(DbtCloudJobRunException) as err:
+ error_message = "has failed or has been cancelled\.$"
+ with pytest.raises(DbtCloudJobRunException,
match=error_message):
operator.execute(context=self.mock_context)
-
- assert err.value.endswith("has failed or has been
cancelled.")
else:
# Demonstrating the operator timing out after surpassing the
configured timeout value.
- with pytest.raises(DbtCloudJobRunException) as err:
+ timeout = self.config["timeout"]
+ error_message = f"has not reached a terminal status after
{timeout} seconds\.$"
+ with pytest.raises(DbtCloudJobRunException,
match=error_message):
operator.execute(context=self.mock_context)
- assert err.value.endswith(
- f"has not reached a terminal status after
{self.config['timeout']} seconds."
- )
-
mock_run_job.assert_called_once_with(
account_id=account_id,
job_id=JOB_ID,
diff --git a/tests/providers/jenkins/sensors/test_jenkins.py
b/tests/providers/jenkins/sensors/test_jenkins.py
index c63275105a..df6a7360df 100644
--- a/tests/providers/jenkins/sensors/test_jenkins.py
+++ b/tests/providers/jenkins/sensors/test_jenkins.py
@@ -113,7 +113,7 @@ class TestJenkinsBuildSensor:
if result not in sensor.target_states:
with pytest.raises(expected_exception):
sensor.poke(None)
- assert jenkins_mock.get_build_info.call_count == 2
+ assert jenkins_mock.get_build_info.call_count == 2
else:
output = sensor.poke(None)
assert output == (not build_state)