diff --git a/airflow/datadog_checks/airflow/airflow.py b/airflow/datadog_checks/airflow/airflow.py index 9e1f082520d87..e20617253fee7 100644 --- a/airflow/datadog_checks/airflow/airflow.py +++ b/airflow/datadog_checks/airflow/airflow.py @@ -7,6 +7,7 @@ import requests from datadog_checks.base import AgentCheck, ConfigurationError +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError from datadog_checks.base.utils.time import get_timestamp AIRFLOW_STATUS_OK = "OK" @@ -141,6 +142,6 @@ def _get_json(self, url): self.warning( "Couldn't connect to URL: %s with exception: %s. Please verify the address is reachable", url, e ) - except requests.exceptions.Timeout as e: + except (requests.exceptions.Timeout, HTTPTimeoutError) as e: self.warning("Connection timeout when connecting to %s: %s", url, e) return None diff --git a/airflow/tests/test_unit.py b/airflow/tests/test_unit.py index cbdef5dc22efd..d8d70878c741e 100644 --- a/airflow/tests/test_unit.py +++ b/airflow/tests/test_unit.py @@ -26,18 +26,16 @@ def test_service_checks_cannot_connect(aggregator): 'json_resp, expected_healthy_status, expected_healthy_value', [({'status': 'OK'}, AgentCheck.OK, 1), ({'status': 'KO'}, AgentCheck.CRITICAL, 0), ({}, AgentCheck.CRITICAL, 0)], ) -def test_service_checks_healthy_exp(aggregator, json_resp, expected_healthy_status, expected_healthy_value): +def test_service_checks_healthy_exp(aggregator, mock_http, json_resp, expected_healthy_status, expected_healthy_value): instance = common.FULL_CONFIG['instances'][0] check = AirflowCheck('airflow', common.FULL_CONFIG, [instance]) - with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value=None): - mock_session = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=mock_session): - mock_resp = mock.MagicMock(status_code=200) - mock_resp.json.side_effect = [json_resp] - mock_session.get.return_value = mock_resp + mock_resp = mock.MagicMock(status_code=200) + mock_resp.json.side_effect = [json_resp] + mock_http.get.return_value = mock_resp - check.check(None) + with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value=None): + check.check(None) tags = ['key:my-tag', 'url:http://localhost:8080'] @@ -54,22 +52,20 @@ def test_service_checks_healthy_exp(aggregator, json_resp, expected_healthy_stat ], ) def test_service_checks_healthy_stable( - aggregator, metadb_status, scheduler_status, expected_healthy_status, expected_healthy_value + aggregator, mock_http, metadb_status, scheduler_status, expected_healthy_status, expected_healthy_value ): # Stable is only defined in the context of Airflow 2 instance = common.FULL_CONFIG['instances'][0] check = AirflowCheck('airflow', common.FULL_CONFIG, [instance]) - with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'): - mock_session = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=mock_session): - mock_resp = mock.MagicMock(status_code=200) - mock_resp.json.side_effect = [ - {'metadatabase': {'status': metadb_status}, 'scheduler': {'status': scheduler_status}}, - {'status': 'OK'}, - ] - mock_session.get.return_value = mock_resp + mock_resp = mock.MagicMock(status_code=200) + mock_resp.json.side_effect = [ + {'metadatabase': {'status': metadb_status}, 'scheduler': {'status': scheduler_status}}, + {'status': 'OK'}, + ] + mock_http.get.return_value = mock_resp - check.check(None) + with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'): + check.check(None) tags = ['key:my-tag', 'url:http://localhost:8080'] @@ -77,42 +73,39 @@ def test_service_checks_healthy_stable( aggregator.assert_metric('airflow.healthy', expected_healthy_value, tags=tags, count=1) -def test_dag_total_tasks(aggregator, task_instance): +def test_dag_total_tasks(aggregator, mock_http, task_instance): instance = common.FULL_CONFIG['instances'][0] check = AirflowCheck('airflow', common.FULL_CONFIG, [instance]) - with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'): - req = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): - mock_resp = mock.MagicMock(status_code=200) - mock_resp.json.side_effect = [ - {'metadatabase': {'status': 'healthy'}, 'scheduler': {'status': 'healthy'}}, - task_instance, - ] - req.get.return_value = mock_resp + mock_resp = mock.MagicMock(status_code=200) + mock_resp.json.side_effect = [ + {'metadatabase': {'status': 'healthy'}, 'scheduler': {'status': 'healthy'}}, + task_instance, + ] + mock_http.get.return_value = mock_resp - check.check(None) + with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'): + check.check(None) aggregator.assert_metric('airflow.dag.task.total_running', value=1, count=1) -def test_dag_task_ongoing_duration(aggregator, task_instance): +def test_dag_task_ongoing_duration(aggregator, mock_http, task_instance): instance = common.FULL_CONFIG['instances'][0] check = AirflowCheck('airflow', common.FULL_CONFIG, [instance]) + mock_resp = mock.MagicMock(status_code=200) + mock_resp.json.side_effect = [ + {'metadatabase': {'status': 'healthy'}, 'scheduler': {'status': 'healthy'}}, + ] + mock_http.get.return_value = mock_resp + with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'): - req = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): - mock_resp = mock.MagicMock(status_code=200) - mock_resp.json.side_effect = [ - {'metadatabase': {'status': 'healthy'}, 'scheduler': {'status': 'healthy'}}, - ] - req.get.return_value = mock_resp - with mock.patch( - 'datadog_checks.airflow.airflow.AirflowCheck._get_all_task_instances', - return_value=task_instance.get('task_instances'), - ): - check.check(None) + with mock.patch( + 'datadog_checks.airflow.airflow.AirflowCheck._get_all_task_instances', + return_value=task_instance.get('task_instances'), + ): + check.check(None) aggregator.assert_metric( 'airflow.dag.task.ongoing_duration', @@ -141,23 +134,21 @@ def test_dag_task_ongoing_duration(aggregator, task_instance): ), ], ) -def test_config_collect_ongoing_duration(collect_ongoing_duration, should_call_method): +def test_config_collect_ongoing_duration(mock_http, collect_ongoing_duration, should_call_method): instance = {**common.FULL_CONFIG['instances'][0], 'collect_ongoing_duration': collect_ongoing_duration} check = AirflowCheck('airflow', common.FULL_CONFIG, [instance]) + mock_resp = mock.MagicMock(status_code=200) + mock_resp.json.side_effect = [ + {'metadatabase': {'status': 'healthy'}, 'scheduler': {'status': 'healthy'}}, + ] + mock_http.get.return_value = mock_resp + with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'): - req = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): - mock_resp = mock.MagicMock(status_code=200) - mock_resp.json.side_effect = [ - {'metadatabase': {'status': 'healthy'}, 'scheduler': {'status': 'healthy'}}, - ] - req.get.return_value = mock_resp - - with mock.patch( - 'datadog_checks.airflow.airflow.AirflowCheck._get_all_task_instances' - ) as mock_get_all_task_instances: - check.check(None) - - # Assert method calls - mock_get_all_task_instances.assert_has_calls(should_call_method, any_order=False) + with mock.patch( + 'datadog_checks.airflow.airflow.AirflowCheck._get_all_task_instances' + ) as mock_get_all_task_instances: + check.check(None) + + # Assert method calls + mock_get_all_task_instances.assert_has_calls(should_call_method, any_order=False) diff --git a/appgate_sdp/tests/test_unit.py b/appgate_sdp/tests/test_unit.py index d7ba90e710b2b..ded2350b51ac2 100644 --- a/appgate_sdp/tests/test_unit.py +++ b/appgate_sdp/tests/test_unit.py @@ -31,7 +31,7 @@ def test_check_appgate_sdp(dd_run_check, aggregator, instance, mock_http_respons def test_emits_critical_service_check_when_service_is_down(dd_run_check, aggregator, instance, mock_http_response): mock_http_response(status_code=404) check = AppgateSDPCheck('appgate_sdp', {}, [instance]) - with pytest.raises(Exception, match='requests.exceptions.HTTPError'): + with pytest.raises(Exception, match='HTTPStatusError'): dd_run_check(check) aggregator.assert_service_check('appgate_sdp.openmetrics.health', AppgateSDPCheck.CRITICAL) diff --git a/arangodb/tests/test_arangodb.py b/arangodb/tests/test_arangodb.py index 14ec1ce25b8c1..3790a3e599304 100644 --- a/arangodb/tests/test_arangodb.py +++ b/arangodb/tests/test_arangodb.py @@ -9,7 +9,7 @@ from requests import HTTPError from datadog_checks.arangodb import ArangodbCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev.utils import get_metadata_metrics from .common import METRICS @@ -54,7 +54,7 @@ def test_check(instance, dd_run_check, aggregator, tag_condition, base_tags): def mock_requests_get(session, url, *args, **kwargs): fixture = url.rsplit('/', 1)[-1] - return MockResponse(file_path=os.path.join(os.path.dirname(__file__), 'fixtures', tag_condition, fixture)) + return MockHTTPResponse(file_path=os.path.join(os.path.dirname(__file__), 'fixtures', tag_condition, fixture)) with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(check) diff --git a/argo_workflows/tests/test_unit.py b/argo_workflows/tests/test_unit.py index 28b73a6ab547a..58e90ed1632fb 100644 --- a/argo_workflows/tests/test_unit.py +++ b/argo_workflows/tests/test_unit.py @@ -135,6 +135,6 @@ def test_check_with_fixtures(dd_run_check, aggregator, instance, mock_http_respo def test_emits_critical_service_check_when_service_is_down(dd_run_check, aggregator, instance, mock_http_response): mock_http_response(status_code=404) check = ArgoWorkflowsCheck('argo_workflows', {}, [instance]) - with pytest.raises(Exception, match='requests.exceptions.HTTPError'): + with pytest.raises(Exception, match='HTTPStatusError'): dd_run_check(check) aggregator.assert_service_check('argo_workflows.openmetrics.health', ArgoWorkflowsCheck.CRITICAL) diff --git a/avi_vantage/tests/conftest.py b/avi_vantage/tests/conftest.py index 8de057987afac..46c029fde49d9 100644 --- a/avi_vantage/tests/conftest.py +++ b/avi_vantage/tests/conftest.py @@ -4,14 +4,14 @@ import json import os from typing import Any, AnyStr +from unittest.mock import MagicMock from urllib.parse import urlparse -import mock import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import docker_run, get_docker_hostname, get_here from datadog_checks.dev.conditions import CheckDockerLogs -from datadog_checks.dev.http import MockResponse HERE = get_here() @@ -57,7 +57,7 @@ def _get_metrics(metrics_folder=NO_TENANT_METRICS_FOLDER, endpoint=None): @pytest.fixture -def mock_client(): +def mock_client(mock_http): def mock_get(url: AnyStr, *__: Any, **___: Any): parsed = urlparse(url) resource = [part for part in parsed.path.split('/') if len(part) > 0][-1] @@ -69,20 +69,21 @@ def mock_get(url: AnyStr, *__: Any, **___: Any): path['tenant=admin%2Ctenant_a%2Ctenant_b'] = MULTIPLE_TENANTS_METRICS_FOLDER if query_params: - return MockResponse( + return MockHTTPResponse( file_path=os.path.join(HERE, 'compose', 'fixtures', path[query_params], f'{resource}_metrics') ) - return MockResponse( + return MockHTTPResponse( file_path=os.path.join(HERE, 'compose', 'fixtures', NO_TENANT_METRICS_FOLDER, f'{resource}_metrics') ) def mock_post(url: AnyStr, *__: Any, **___: Any): - return mock.MagicMock(status_code=200, content=b'{"results": []}') + return MockHTTPResponse(json_data={"results": []}) - with mock.patch('datadog_checks.base.utils.http.RequestsWrapper.get', side_effect=mock_get): - with mock.patch('datadog_checks.base.utils.http.RequestsWrapper.post', new=mock_post): - yield + mock_http.session = MagicMock(cookies={}) + mock_http.get.side_effect = mock_get + mock_http.post.side_effect = mock_post + yield @pytest.fixture(scope='session') diff --git a/celery/tests/test_unit.py b/celery/tests/test_unit.py index b1d50c192b122..c5173a14f0894 100644 --- a/celery/tests/test_unit.py +++ b/celery/tests/test_unit.py @@ -44,7 +44,7 @@ def test_emits_critical_openemtrics_service_check_when_service_is_down( """ mock_http_response(status_code=404) check = CeleryCheck("celery", {}, [instance]) - with pytest.raises(Exception, match="requests.exceptions.HTTPError"): + with pytest.raises(Exception, match="HTTPStatusError"): dd_run_check(check) aggregator.assert_all_metrics_covered() diff --git a/cert_manager/tests/test_cert_manager.py b/cert_manager/tests/test_cert_manager.py index ee1097015556e..28a6b9e1e7e6e 100644 --- a/cert_manager/tests/test_cert_manager.py +++ b/cert_manager/tests/test_cert_manager.py @@ -6,8 +6,8 @@ import mock import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.cert_manager import CertManagerCheck -from datadog_checks.dev.http import MockResponse from .common import ACME_METRICS, CERT_METRICS, CONTROLLER_METRICS, MOCK_INSTANCE @@ -32,7 +32,7 @@ def test_check(aggregator, dd_run_check): check = CertManagerCheck('cert_manager', {}, [MOCK_INSTANCE]) def mock_requests_get(url, *args, **kwargs): - return MockResponse(file_path=os.path.join(os.path.dirname(__file__), 'fixtures', 'cert_manager.txt')) + return MockHTTPResponse(file_path=os.path.join(os.path.dirname(__file__), 'fixtures', 'cert_manager.txt')) with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(check) diff --git a/cilium/tests/conftest.py b/cilium/tests/conftest.py index caa56b731ba88..e20a4f6f0760b 100644 --- a/cilium/tests/conftest.py +++ b/cilium/tests/conftest.py @@ -3,10 +3,10 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import os -import mock import pytest from datadog_checks.base.utils.common import get_docker_hostname +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.cilium import CiliumCheck from datadog_checks.dev import run_command from datadog_checks.dev.kind import kind_run @@ -198,28 +198,18 @@ def operator_instance_use_openmetrics(): @pytest.fixture() -def mock_agent_data(): +def mock_agent_data(mock_openmetrics_http): f_name = os.path.join(os.path.dirname(__file__), "fixtures", "agent_metrics.txt") with open(f_name, "r") as f: text_data = f.read() - with mock.patch( - 'requests.Session.get', - return_value=mock.MagicMock( - status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={"Content-Type": "text/plain"} - ), - ): - yield + mock_openmetrics_http.get.return_value = MockHTTPResponse(content=text_data, headers={"Content-Type": "text/plain"}) + yield @pytest.fixture() -def mock_operator_data(): +def mock_operator_data(mock_openmetrics_http): f_name = os.path.join(os.path.dirname(__file__), "fixtures", "operator_metrics.txt") with open(f_name, "r") as f: text_data = f.read() - with mock.patch( - 'requests.Session.get', - return_value=mock.MagicMock( - status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={"Content-Type": "text/plain"} - ), - ): - yield + mock_openmetrics_http.get.return_value = MockHTTPResponse(content=text_data, headers={"Content-Type": "text/plain"}) + yield diff --git a/citrix_hypervisor/datadog_checks/citrix_hypervisor/check.py b/citrix_hypervisor/datadog_checks/citrix_hypervisor/check.py index 17f269fb9961c..6704e0994a12c 100644 --- a/citrix_hypervisor/datadog_checks/citrix_hypervisor/check.py +++ b/citrix_hypervisor/datadog_checks/citrix_hypervisor/check.py @@ -3,6 +3,7 @@ # Licensed under a 3-clause BSD style license (see LICENSE) from __future__ import annotations +from json import JSONDecodeError as StdJSONDecodeError from typing import TYPE_CHECKING, Any, Dict, List # noqa: F401 from xmlrpc.client import ServerProxy @@ -25,7 +26,7 @@ def _safely_process_metrics_response(data: ResponseWrapper) -> dict[str, dict]: # See https://github.com/yaml/pyyaml/issues/443 try: return data.json() - except requests.exceptions.JSONDecodeError: + except (requests.exceptions.JSONDecodeError, StdJSONDecodeError): return yaml.load(data.content, Loader=yaml.SafeLoader) diff --git a/citrix_hypervisor/tests/conftest.py b/citrix_hypervisor/tests/conftest.py index 9670622329d2e..26537a41a4815 100644 --- a/citrix_hypervisor/tests/conftest.py +++ b/citrix_hypervisor/tests/conftest.py @@ -3,11 +3,10 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import os -import mock import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import docker_run -from datadog_checks.dev.http import MockResponse from . import common @@ -38,17 +37,17 @@ def mock_requests_get(url, *args, **kwargs): print(url_parts) if url_parts[0] == 'wrong': - return MockResponse(status_code=404) + return MockHTTPResponse(status_code=404) json_file = f"rrd_updates_{url_parts[0]}.json" if url_parts[1] == "rrd_updates" else f"{url_parts[1]}.json" path = os.path.join(common.HERE, 'fixtures', 'standalone', json_file) if not os.path.exists(path): - return MockResponse(status_code=404) + return MockHTTPResponse(status_code=404) - return MockResponse(file_path=path) + return MockHTTPResponse(file_path=path) @pytest.fixture -def mock_responses(): - with mock.patch('requests.Session.get', side_effect=mock_requests_get): - yield +def mock_responses(mock_http): + mock_http.get.side_effect = mock_requests_get + yield diff --git a/consul/datadog_checks/consul/consul.py b/consul/datadog_checks/consul/consul.py index eb33aa991a396..5ef43b2fcf764 100644 --- a/consul/datadog_checks/consul/consul.py +++ b/consul/datadog_checks/consul/consul.py @@ -16,6 +16,7 @@ from requests import HTTPError from datadog_checks.base import ConfigurationError, OpenMetricsBaseCheck, is_affirmative +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError from datadog_checks.base.utils.serialization import json from .common import ( @@ -152,7 +153,7 @@ def consul_request(self, endpoint): resp.raise_for_status() - except requests.exceptions.Timeout as e: + except (requests.exceptions.Timeout, HTTPTimeoutError) as e: msg = 'Consul request to {} timed out'.format(url) self.log.exception(msg) self.service_check( diff --git a/consul/tests/test_unit.py b/consul/tests/test_unit.py index 2b844e3601da8..0d3ecb460631e 100644 --- a/consul/tests/test_unit.py +++ b/consul/tests/test_unit.py @@ -3,7 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import logging -import mock import pytest from datadog_checks.consul import ConsulCheck @@ -155,25 +154,25 @@ def test_get_nodes_with_service_critical(aggregator): aggregator.assert_metric('consul.catalog.services_count', value=1, tags=expected_tags) -def test_consul_request(aggregator, instance, mocker): +def test_consul_request(aggregator, instance, mocker, mock_http): consul_check = ConsulCheck(common.CHECK_NAME, {}, [consul_mocks.MOCK_CONFIG]) mocker.patch("datadog_checks.base.utils.serialization.json.loads") - with mock.patch("datadog_checks.consul.consul.requests.Session.get") as mock_requests_get: + + consul_check.consul_request("foo") + url = "{}/{}".format(instance["url"], "foo") + aggregator.assert_service_check("consul.can_connect", ConsulCheck.OK, tags=["url:{}".format(url)], count=1) + + aggregator.reset() + mock_http.get.side_effect = Exception("message") + with pytest.raises(Exception): consul_check.consul_request("foo") - url = "{}/{}".format(instance["url"], "foo") - aggregator.assert_service_check("consul.can_connect", ConsulCheck.OK, tags=["url:{}".format(url)], count=1) - - aggregator.reset() - mock_requests_get.side_effect = Exception("message") - with pytest.raises(Exception): - consul_check.consul_request("foo") - aggregator.assert_service_check( - "consul.can_connect", - ConsulCheck.CRITICAL, - tags=["url:{}".format(url)], - count=1, - message="Consul request to {} failed: message".format(url), - ) + aggregator.assert_service_check( + "consul.can_connect", + ConsulCheck.CRITICAL, + tags=["url:{}".format(url)], + count=1, + message="Consul request to {} failed: message".format(url), + ) def test_service_checks(aggregator): @@ -648,26 +647,13 @@ def test_network_latency_node_name( ), ], ) -def test_config(test_case, extra_config, expected_http_kwargs, mocker): +def test_config(test_case, extra_config, expected_http_kwargs): instance = extra_config check = ConsulCheck(common.CHECK_NAME, {}, instances=[instance]) - mocker.patch("datadog_checks.base.utils.serialization.json.loads") - with mock.patch('datadog_checks.base.utils.http.requests.Session') as session: - mock_session = mock.MagicMock() - session.return_value = mock_session - mock_session.get.return_value = mock.MagicMock(status_code=200) - - check.check(None) - - http_wargs = { - 'auth': mock.ANY, - 'cert': mock.ANY, - 'headers': mock.ANY, - 'proxies': mock.ANY, - 'timeout': mock.ANY, - 'verify': mock.ANY, - 'allow_redirects': mock.ANY, - } - http_wargs.update(expected_http_kwargs) - mock_session.get.assert_called_with('/v1/status/leader', **http_wargs) + for key, value in expected_http_kwargs.items(): + if key == 'headers': + for h_key, h_value in value.items(): + assert check.http.get_header(h_key) == h_value + else: + assert check.http.options[key] == value diff --git a/couch/datadog_checks/couch/couch.py b/couch/datadog_checks/couch/couch.py index 14b3ea39da394..22c8ebe1305ab 100644 --- a/couch/datadog_checks/couch/couch.py +++ b/couch/datadog_checks/couch/couch.py @@ -12,6 +12,7 @@ from datadog_checks.base import AgentCheck from datadog_checks.base.errors import CheckException, ConfigurationError from datadog_checks.base.utils.headers import headers +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError from datadog_checks.couch import errors @@ -43,7 +44,7 @@ def get(self, url, service_check_tags, run_check=False): AgentCheck.OK, tags=service_check_tags, ) - except requests.exceptions.Timeout as e: + except (requests.exceptions.Timeout, HTTPTimeoutError) as e: self.service_check( self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, diff --git a/couch/tests/test_unit.py b/couch/tests/test_unit.py index 7f2ad8678ae2d..d1bf4b49af3e9 100644 --- a/couch/tests/test_unit.py +++ b/couch/tests/test_unit.py @@ -4,7 +4,6 @@ from copy import deepcopy from unittest.mock import MagicMock -import mock import pytest from datadog_checks.couch import CouchDb @@ -30,24 +29,8 @@ def test_config(test_case, extra_config, expected_http_kwargs): instance.update(extra_config) check = CouchDb(common.CHECK_NAME, {}, instances=[instance]) - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - r.get.return_value = mock.MagicMock(status_code=200, content='{}') - - check.check(instance) - - http_wargs = { - 'auth': mock.ANY, - 'cert': mock.ANY, - 'headers': mock.ANY, - 'proxies': mock.ANY, - 'timeout': mock.ANY, - 'verify': mock.ANY, - 'allow_redirects': mock.ANY, - } - http_wargs.update(expected_http_kwargs) - - r.get.assert_called_with('http://{}:5984/_all_dbs/'.format(common.HOST), **http_wargs) + for key, value in expected_http_kwargs.items(): + assert check.http.options[key] == value def test_new_version_system_metrics(load_test_data): diff --git a/couchbase/tests/conftest.py b/couchbase/tests/conftest.py index 5d5136982e14c..dfda5ee73fbb5 100644 --- a/couchbase/tests/conftest.py +++ b/couchbase/tests/conftest.py @@ -10,10 +10,10 @@ import pytest import requests +from datadog_checks.base.utils.http_testing import MockHTTPResponse # noqa: F401 from datadog_checks.couchbase import Couchbase from datadog_checks.dev import WaitFor, docker_run from datadog_checks.dev.docker import get_container_ip -from datadog_checks.dev.http import MockResponse from .common import ( BUCKET_NAME, @@ -301,4 +301,4 @@ def mock_http_responses(url, **_params): pytest.fail("url `{url}` not registered".format(url=url)) with open(os.path.join(HERE, 'fixtures', metrics_file)) as f: - return MockResponse(content=f.read()) + return MockHTTPResponse(content=f.read()) diff --git a/couchbase/tests/test_unit.py b/couchbase/tests/test_unit.py index 25a437c02f842..1836ea77f1e6e 100644 --- a/couchbase/tests/test_unit.py +++ b/couchbase/tests/test_unit.py @@ -124,8 +124,8 @@ def test_extract_index_tags(instance, test_input, expected_tags): assert eval(str(test_output)) == expected_tags -def test_unit(dd_run_check, check, instance, mocker, aggregator): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_unit(dd_run_check, check, instance, mock_http, aggregator): + mock_http.get.side_effect = mock_http_responses dd_run_check(check(instance)) @@ -140,8 +140,8 @@ def test_unit(dd_run_check, check, instance, mocker, aggregator): aggregator.assert_metrics_using_metadata(get_metadata_metrics()) -def test_unit_query_metrics(dd_run_check, check, instance_query, mocker, aggregator): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_unit_query_metrics(dd_run_check, check, instance_query, mock_http, aggregator): + mock_http.get.side_effect = mock_http_responses dd_run_check(check(instance_query)) diff --git a/datadog_checks_base/changelog.d/22676.added b/datadog_checks_base/changelog.d/22676.added new file mode 100644 index 0000000000000..ff63715e3c69d --- /dev/null +++ b/datadog_checks_base/changelog.d/22676.added @@ -0,0 +1 @@ +Add library-agnostic HTTP mocks/proto/exceptions and migrate intg tests. diff --git a/datadog_checks_base/datadog_checks/base/checks/base.py b/datadog_checks_base/datadog_checks/base/checks/base.py index 6da57bc3b11a0..13d107b5cea73 100644 --- a/datadog_checks_base/datadog_checks/base/checks/base.py +++ b/datadog_checks_base/datadog_checks/base/checks/base.py @@ -69,7 +69,7 @@ import unicodedata as _module_unicodedata from datadog_checks.base.utils.diagnose import Diagnosis - from datadog_checks.base.utils.http import RequestsWrapper + from datadog_checks.base.utils.http_protocol import HTTPClientProtocol from datadog_checks.base.utils.metadata import MetadataManager inspect: _module_inspect = lazy_loader.load('inspect') @@ -405,7 +405,7 @@ def _get_metric_limit(self, instance=None): return limit @property - def http(self) -> RequestsWrapper: + def http(self) -> HTTPClientProtocol: """ Provides logic to yield consistent network behavior based on user configuration. diff --git a/datadog_checks_base/datadog_checks/base/checks/openmetrics/v2/base.py b/datadog_checks_base/datadog_checks/base/checks/openmetrics/v2/base.py index 1a476138157de..6552a1723c4d1 100644 --- a/datadog_checks_base/datadog_checks/base/checks/openmetrics/v2/base.py +++ b/datadog_checks_base/datadog_checks/base/checks/openmetrics/v2/base.py @@ -8,6 +8,7 @@ from datadog_checks.base.checks import AgentCheck from datadog_checks.base.errors import ConfigurationError +from datadog_checks.base.utils.http_exceptions import HTTPRequestError, HTTPStatusError from datadog_checks.base.utils.tracing import traced_class from .scraper import OpenMetricsScraper @@ -71,7 +72,8 @@ def check(self, _): with self.adopt_namespace(scraper.namespace): try: scraper.scrape() - except (ConnectionError, RequestException) as e: + # Pairs requests-native + library-agnostic exceptions; simplify to HTTPError after migration. + except (ConnectionError, RequestException, HTTPRequestError, HTTPStatusError) as e: self.log.error("There was an error scraping endpoint %s: %s", endpoint, str(e)) raise type(e)("There was an error scraping endpoint {}: {}".format(endpoint, e)) from None diff --git a/datadog_checks_base/datadog_checks/base/checks/openmetrics/v2/scraper/base_scraper.py b/datadog_checks_base/datadog_checks/base/checks/openmetrics/v2/scraper/base_scraper.py index fad8fd4c2f309..24719c329efef 100644 --- a/datadog_checks_base/datadog_checks/base/checks/openmetrics/v2/scraper/base_scraper.py +++ b/datadog_checks_base/datadog_checks/base/checks/openmetrics/v2/scraper/base_scraper.py @@ -23,7 +23,6 @@ from datadog_checks.base.constants import ServiceCheck from datadog_checks.base.errors import ConfigurationError from datadog_checks.base.utils.functions import no_op, return_true -from datadog_checks.base.utils.http import RequestsWrapper class OpenMetricsScraper: @@ -217,18 +216,10 @@ def __init__(self, check, config): self.raw_line_filter = re.compile('|'.join(raw_line_filters)) - self.http = RequestsWrapper(config, self.check.init_config, self.check.HTTP_CONFIG_REMAPPER, self.check.log) + self.http = self.check.http self._content_type = '' self._use_latest_spec = is_affirmative(config.get('use_latest_spec', False)) - if self._use_latest_spec: - accept_header = 'application/openmetrics-text;version=1.0.0,application/openmetrics-text;version=0.0.1' - else: - accept_header = 'text/plain' - - # Request the appropriate exposition format - if self.http.options['headers'].get('Accept') == '*/*': - self.http.options['headers']['Accept'] = accept_header self.use_process_start_time = is_affirmative(config.get('use_process_start_time')) @@ -465,6 +456,13 @@ def send_request(self, **kwargs): """ kwargs['stream'] = True + extra_headers = kwargs.get('extra_headers', {}) + if self._use_latest_spec: + accept_header = 'application/openmetrics-text;version=1.0.0,application/openmetrics-text;version=0.0.1' + else: + accept_header = 'text/plain' + extra_headers['Accept'] = accept_header + kwargs['extra_headers'] = extra_headers return self.http.get(self.endpoint, **kwargs) def set_dynamic_tags(self, *tags): diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index ad52272e914a7..5b55effd5828d 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -11,6 +11,7 @@ from collections import ChainMap from contextlib import ExitStack, contextmanager from copy import deepcopy +from typing import TYPE_CHECKING from urllib.parse import quote, urlparse, urlunparse import lazy_loader @@ -28,9 +29,22 @@ from .common import ensure_bytes, ensure_unicode from .headers import get_default_headers, update_headers + +# Re-export HTTP exceptions for single import location +from .http_exceptions import ( # noqa: F401 + HTTPConnectionError, + HTTPError, + HTTPRequestError, + HTTPSSLError, + HTTPStatusError, + HTTPTimeoutError, +) from .time import get_timestamp from .tls import SUPPORTED_PROTOCOL_VERSIONS, TlsConfig, create_ssl_context +if TYPE_CHECKING: + from .http_protocol import HTTPClientProtocol, HTTPResponseProtocol # noqa: F401 + # See Performance Optimizations in this package's README.md. requests_kerberos = lazy_loader.load('requests_kerberos') requests_ntlm = lazy_loader.load('requests_ntlm') @@ -434,6 +448,20 @@ def __init__(self, instance, init_config, remapper=None, logger=None, session=No self.tls_config = {key: value for key, value in config.items() if key.startswith('tls_')} self._https_adapters = {} + def get_header(self, name: str, default: str | None = None) -> str | None: + """Look up a request header by name. Lookup is case-insensitive.""" + for key, value in self.options['headers'].items(): + if key.lower() == name.lower(): + return value + return default + + def set_header(self, name: str, value: str) -> None: + for key in self.options['headers']: + if key.lower() == name.lower(): + self.options['headers'][key] = value + return + self.options['headers'][name] = value + def get(self, url, **options): return self._request('get', url, options) diff --git a/datadog_checks_base/datadog_checks/base/utils/http_exceptions.py b/datadog_checks_base/datadog_checks/base/utils/http_exceptions.py new file mode 100644 index 0000000000000..a2863246f9d5b --- /dev/null +++ b/datadog_checks_base/datadog_checks/base/utils/http_exceptions.py @@ -0,0 +1,45 @@ +# (C) Datadog, Inc. 2026-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from typing import Any + +__all__ = [ + 'HTTPError', + 'HTTPRequestError', + 'HTTPStatusError', + 'HTTPTimeoutError', + 'HTTPConnectionError', + 'HTTPInvalidURLError', + 'HTTPSSLError', +] + + +class HTTPError(Exception): + def __init__(self, message: str, response: Any = None, request: Any = None): + super().__init__(message) + self.response = response + self.request = request + + +class HTTPRequestError(HTTPError): + pass + + +class HTTPStatusError(HTTPError): + pass + + +class HTTPTimeoutError(HTTPRequestError): + pass + + +class HTTPConnectionError(HTTPRequestError): + pass + + +class HTTPInvalidURLError(HTTPRequestError): + pass + + +class HTTPSSLError(HTTPConnectionError): + pass diff --git a/datadog_checks_base/datadog_checks/base/utils/http_protocol.py b/datadog_checks_base/datadog_checks/base/utils/http_protocol.py new file mode 100644 index 0000000000000..4d103de5930c3 --- /dev/null +++ b/datadog_checks_base/datadog_checks/base/utils/http_protocol.py @@ -0,0 +1,46 @@ +# (C) Datadog, Inc. 2026-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, Iterator, Protocol + + +class HTTPResponseProtocol(Protocol): + status_code: int + content: bytes + text: str + headers: Mapping[str, str] + + @property + def ok(self) -> bool: ... + @property + def reason(self) -> str: ... + + def json(self, **kwargs: Any) -> Any: ... + def raise_for_status(self) -> None: ... + def close(self) -> None: ... + def iter_content(self, chunk_size: int | None = None, decode_unicode: bool = False) -> Iterator[bytes | str]: ... + def iter_lines( + self, + chunk_size: int | None = None, + decode_unicode: bool = False, + delimiter: bytes | str | None = None, + ) -> Iterator[bytes | str]: ... + def __enter__(self) -> HTTPResponseProtocol: ... + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> bool | None: ... + + +class HTTPClientProtocol(Protocol): + options: dict[str, Any] + + def get(self, url: str, **options: Any) -> HTTPResponseProtocol: ... + def post(self, url: str, **options: Any) -> HTTPResponseProtocol: ... + def head(self, url: str, **options: Any) -> HTTPResponseProtocol: ... + def put(self, url: str, **options: Any) -> HTTPResponseProtocol: ... + def patch(self, url: str, **options: Any) -> HTTPResponseProtocol: ... + def delete(self, url: str, **options: Any) -> HTTPResponseProtocol: ... + def options_method(self, url: str, **options: Any) -> HTTPResponseProtocol: ... + def get_header(self, name: str, default: str | None = None) -> str | None: ... + def set_header(self, name: str, value: str) -> None: ... diff --git a/datadog_checks_base/datadog_checks/base/utils/http_testing.py b/datadog_checks_base/datadog_checks/base/utils/http_testing.py new file mode 100644 index 0000000000000..428b370bfd130 --- /dev/null +++ b/datadog_checks_base/datadog_checks/base/utils/http_testing.py @@ -0,0 +1,196 @@ +# (C) Datadog, Inc. 2026-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +import json +import re +from collections.abc import Mapping +from datetime import timedelta +from http.client import responses as http_responses +from io import BytesIO +from textwrap import dedent +from typing import Any, Iterator +from unittest.mock import MagicMock + +from datadog_checks.base.utils.http_exceptions import HTTPStatusError + +__all__ = ['MockHTTPResponse'] + + +class _CaseInsensitiveDict(dict): + """Case-insensitive dict for HTTP headers. Keys are stored lowercased.""" + + def __init__(self, data=None): + super().__init__() + if data: + for k, v in data.items(): + self[k] = v + + def __setitem__(self, key, value): + super().__setitem__(key.lower() if isinstance(key, str) else key, value) + + def __getitem__(self, key): + return super().__getitem__(key.lower() if isinstance(key, str) else key) + + def __contains__(self, key): + return super().__contains__(key.lower() if isinstance(key, str) else key) + + def __delitem__(self, key): + super().__delitem__(key.lower() if isinstance(key, str) else key) + + def get(self, key, default=None): + return super().get(key.lower() if isinstance(key, str) else key, default) + + def pop(self, key, *args): + return super().pop(key.lower() if isinstance(key, str) else key, *args) + + def update(self, other=(), **kwargs): + if isinstance(other, Mapping): + other = {(k.lower() if isinstance(k, str) else k): v for k, v in other.items()} + elif other: + other = [(k.lower() if isinstance(k, str) else k, v) for k, v in other] + kwargs = {k.lower(): v for k, v in kwargs.items()} + super().update(other, **kwargs) + + def setdefault(self, key, default=None): + return super().setdefault(key.lower() if isinstance(key, str) else key, default) + + +class MockHTTPResponse: + """Library-agnostic mock HTTP response implementing HTTPResponseProtocol.""" + + # Parameter order differs from MockResponse; not a compatibility concern since all callers use keyword args. + def __init__( + self, + content: str | bytes = '', + status_code: int = 200, + headers: dict[str, str] | None = None, + json_data: Any = None, + file_path: str | None = None, + cookies: dict[str, str] | None = None, + elapsed_seconds: float = 0.1, + normalize_content: bool = True, + url: str = '', + ): + self.url = url + + if json_data is not None: + content = json.dumps(json_data) + # Copy to avoid mutating the caller's dict + headers = dict(headers) if headers is not None else {} + headers.setdefault('Content-Type', 'application/json') + elif file_path is not None: + # Open in binary mode to handle both text and binary files correctly + # This prevents encoding errors and platform-specific newline translation + with open(file_path, 'rb') as f: + content = f.read() + + if normalize_content and ( + (isinstance(content, str) and content.startswith('\n')) + or (isinstance(content, bytes) and content.startswith(b'\n')) + ): + content = dedent(content[1:]) if isinstance(content, str) else content[1:] + + self._content = content.encode('utf-8') if isinstance(content, str) else content + self.status_code = status_code + self.headers = _CaseInsensitiveDict(headers or {}) + self.cookies = cookies or {} + self.encoding: str | None = None + self.elapsed = timedelta(seconds=elapsed_seconds) + self._stream = BytesIO(self._content) + + self.raw = MagicMock() + self.raw.read = self._stream.read + self.raw.connection.sock.getpeercert.side_effect = lambda binary_form=False: b'mock-cert' if binary_form else {} + + @property + def content(self) -> bytes: + return self._content + + @property + def text(self) -> str: + return self._content.decode('utf-8') + + @property + def ok(self) -> bool: + # Transitional: mirrors requests.Response.ok for current production code. + # httpx uses is_success/is_client_error/is_server_error instead. + return self.status_code < 400 + + @property + def reason(self) -> str: + return http_responses.get(self.status_code, '') + + @property + def links(self) -> dict[str, dict[str, str]]: + """Parse Link header into a dict keyed by rel, matching requests.Response.links.""" + header = self.headers.get('link', '').strip().strip("'\"") + result: dict[str, dict[str, str]] = {} + if not header: + return result + # Split on ", <" to avoid breaking URLs that contain commas (matches requests behavior) + for val in re.split(', *<', header): + try: + url, params_str = val.split(';', 1) + except ValueError: + url, params_str = val, '' + link: dict[str, str] = {'url': url.strip("<> '\"")} + for param in params_str.split(';'): + try: + key, value = param.split('=') + except ValueError: + break + link[key.strip(" '\"")] = value.strip(" '\"") + key = link.get('rel') or link.get('url') + if key: + result[key] = link + return result + + def json(self, **kwargs: Any) -> Any: + return json.loads(self.text, **kwargs) + + def raise_for_status(self) -> None: + if self.status_code >= 400: + message = ( + f'{self.status_code} Client Error' if self.status_code < 500 else f'{self.status_code} Server Error' + ) + raise HTTPStatusError(message, response=self) + + def iter_content(self, chunk_size: int | None = None, decode_unicode: bool = False) -> Iterator[bytes | str]: + # chunk_size=None means return the entire content as a single chunk (matches requests behavior) + chunk_size = chunk_size if chunk_size is not None else len(self._content) or 1 + self._stream.seek(0) + while chunk := self._stream.read(chunk_size): + # Decode to string when decode_unicode=True (matches requests behavior) + yield chunk.decode('utf-8') if decode_unicode else chunk + + def iter_lines( + self, chunk_size: int | None = None, decode_unicode: bool = False, delimiter: bytes | str | None = None + ) -> Iterator[bytes | str]: + # Handle string delimiter by converting to bytes + if isinstance(delimiter, str): + delimiter = delimiter.encode('utf-8') + delimiter = delimiter or b'\n' + + self._stream.seek(0) + lines = self._stream.read().split(delimiter) + # bytes.split() produces a trailing empty element when content ends with the + # delimiter (e.g. b'a\nb\n'.split(b'\n') == [b'a', b'b', b'']). requests uses + # splitlines() for the default case which does not have this behavior, so we + # strip the trailing empty element to match. + if lines and not lines[-1]: + lines.pop() + for line in lines: + # Decode to string when decode_unicode=True (matches requests behavior) + yield line.decode('utf-8') if decode_unicode else line + + def close(self) -> None: + # No-op: requests.Response.close() releases the network connection, but + # content is already buffered in memory. Matching that behaviour here + # so the same instance can be returned by a mock multiple times. + pass + + def __enter__(self) -> 'MockHTTPResponse': + return self + + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> bool | None: + return None diff --git a/datadog_checks_base/tests/base/checks/openmetrics/test_legacy/test_openmetrics.py b/datadog_checks_base/tests/base/checks/openmetrics/test_legacy/test_openmetrics.py index 1435ee2d2f6e1..363de7a8ce3bc 100644 --- a/datadog_checks_base/tests/base/checks/openmetrics/test_legacy/test_openmetrics.py +++ b/datadog_checks_base/tests/base/checks/openmetrics/test_legacy/test_openmetrics.py @@ -19,9 +19,9 @@ from prometheus_client.samples import Sample from datadog_checks.base import ensure_bytes +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.checks.openmetrics import OpenMetricsBaseCheck from datadog_checks.dev import get_here -from datadog_checks.dev.http import MockResponse text_content_type = 'text/plain; version=0.0.4' FIXTURE_PATH = os.path.abspath(os.path.join(get_here(), '..', '..', '..', '..', 'fixtures', 'prometheus')) @@ -113,7 +113,7 @@ def test_config_instance(mocked_prometheus_check): def test_process(text_data, mocked_prometheus_check, mocked_prometheus_scraper_config, ref_gauge): check = mocked_prometheus_check - check.poll = mock.MagicMock(return_value=MockResponse(text_data, headers={'Content-Type': text_content_type})) + check.poll = mock.MagicMock(return_value=MockHTTPResponse(text_data, headers={'Content-Type': text_content_type})) check.process_metric = mock.MagicMock() check.process(mocked_prometheus_scraper_config) check.poll.assert_called_with(mocked_prometheus_scraper_config) @@ -734,7 +734,7 @@ def test_filter_sample_on_gauge(p_check, mocked_prometheus_scraper_config): expected_metric.add_metric(['heapster-v1.4.3'], 1) # Iter on the generator to get all metrics - response = MockResponse(text_data, headers={'Content-Type': text_content_type}) + response = MockHTTPResponse(text_data, headers={'Content-Type': text_content_type}) check = p_check mocked_prometheus_scraper_config['_text_filter_blacklist'] = ["deployment=\"kube-dns\""] metrics = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) @@ -767,7 +767,7 @@ def test_parse_one_gauge(p_check, mocked_prometheus_scraper_config): expected_etcd_metric.add_metric([], 1) # Iter on the generator to get all metrics - response = MockResponse(text_data, headers={'Content-Type': text_content_type}) + response = MockHTTPResponse(text_data, headers={'Content-Type': text_content_type}) check = p_check metrics = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) @@ -799,7 +799,7 @@ def test_parse_one_counter(p_check, mocked_prometheus_scraper_config): expected_etcd_metric.name = 'go_memstats_mallocs_total' # Iter on the generator to get all metrics - response = MockResponse(text_data, headers={'Content-Type': text_content_type}) + response = MockHTTPResponse(text_data, headers={'Content-Type': text_content_type}) check = p_check metrics = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) @@ -857,7 +857,7 @@ def test_parse_one_histograms_with_label(p_check, mocked_prometheus_scraper_conf ) # Iter on the generator to get all metrics - response = MockResponse(text_data, headers={'Content-Type': text_content_type}) + response = MockHTTPResponse(text_data, headers={'Content-Type': text_content_type}) check = p_check metrics = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) @@ -991,7 +991,7 @@ def test_parse_one_histogram(p_check, mocked_prometheus_scraper_config): ) # Iter on the generator to get all metrics - response = MockResponse(text_data, headers={'Content-Type': text_content_type}) + response = MockHTTPResponse(text_data, headers={'Content-Type': text_content_type}) check = p_check metrics = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) assert 1 == len(metrics) @@ -1093,7 +1093,7 @@ def test_parse_two_histograms_with_label(p_check, mocked_prometheus_scraper_conf ) # Iter on the generator to get all metrics - response = MockResponse(text_data, headers={'Content-Type': text_content_type}) + response = MockHTTPResponse(text_data, headers={'Content-Type': text_content_type}) check = p_check metrics = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) @@ -1131,7 +1131,7 @@ def test_decumulate_histogram_buckets(p_check, mocked_prometheus_scraper_config) 'rest_client_request_latency_seconds_count{url="http://127.0.0.1:8080/api",verb="GET"} 755\n' ) - response = MockResponse(text_data, headers={'Content-Type': text_content_type}) + response = MockHTTPResponse(text_data, headers={'Content-Type': text_content_type}) check = p_check metrics = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) @@ -1220,7 +1220,7 @@ def test_decumulate_histogram_buckets_single_bucket(p_check, mocked_prometheus_s 'rest_client_request_latency_seconds_count{url="http://127.0.0.1:8080/api",verb="GET"} 755\n' ) - response = MockResponse(text_data, headers={'Content-Type': text_content_type}) + response = MockHTTPResponse(text_data, headers={'Content-Type': text_content_type}) check = p_check metrics = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) @@ -1283,7 +1283,7 @@ def test_decumulate_histogram_buckets_multiple_contexts(p_check, mocked_promethe 'rest_client_request_latency_seconds_count{url="http://127.0.0.1:8080/api",verb="POST"} 150\n' ) - response = MockResponse(text_data, headers={'Content-Type': text_content_type}) + response = MockHTTPResponse(text_data, headers={'Content-Type': text_content_type}) check = p_check metrics = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) @@ -1351,7 +1351,7 @@ def test_decumulate_histogram_buckets_negative_buckets(p_check, mocked_prometheu 'random_histogram_count{url="http://127.0.0.1:8080/api",verb="GET"} 70\n' ) - response = MockResponse(text_data, headers={'Content-Type': text_content_type}) + response = MockHTTPResponse(text_data, headers={'Content-Type': text_content_type}) check = p_check metrics = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) @@ -1403,7 +1403,7 @@ def test_decumulate_histogram_buckets_no_buckets(p_check, mocked_prometheus_scra 'rest_client_request_latency_seconds_count{url="http://127.0.0.1:8080/api",verb="GET"} 755\n' ) - response = MockResponse(text_data, headers={'Content-Type': text_content_type}) + response = MockHTTPResponse(text_data, headers={'Content-Type': text_content_type}) check = p_check metrics = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) @@ -1474,7 +1474,7 @@ def test_parse_one_summary(p_check, mocked_prometheus_scraper_config): expected_etcd_metric.add_sample("http_response_size_bytes", {"handler": "prometheus", "quantile": "0.99"}, 25763.0) # Iter on the generator to get all metrics - response = MockResponse(text_data, headers={'Content-Type': text_content_type}) + response = MockHTTPResponse(text_data, headers={'Content-Type': text_content_type}) check = p_check metrics = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) @@ -1517,7 +1517,7 @@ def test_parse_one_summary_with_no_quantile(p_check, mocked_prometheus_scraper_c expected_etcd_metric.add_metric(["prometheus"], 5.0, 120512.0) # Iter on the generator to get all metrics - response = MockResponse(text_data, headers={'Content-Type': text_content_type}) + response = MockHTTPResponse(text_data, headers={'Content-Type': text_content_type}) check = p_check metrics = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) @@ -1572,7 +1572,7 @@ def test_parse_two_summaries_with_labels(p_check, mocked_prometheus_scraper_conf ) # Iter on the generator to get all metrics - response = MockResponse(text_data, headers={'Content-Type': text_content_type}) + response = MockHTTPResponse(text_data, headers={'Content-Type': text_content_type}) check = p_check metrics = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) @@ -1613,7 +1613,7 @@ def test_parse_one_summary_with_none_values(p_check, mocked_prometheus_scraper_c ) # Iter on the generator to get all metrics - response = MockResponse(text_data, headers={'Content-Type': text_content_type}) + response = MockHTTPResponse(text_data, headers={'Content-Type': text_content_type}) check = p_check metrics = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) assert 1 == len(metrics) @@ -2634,7 +2634,7 @@ def test_filter_metrics( def test_metadata_default(mocked_openmetrics_check_factory, text_data, datadog_agent): instance = dict(OPENMETRICS_CHECK_INSTANCE) check = mocked_openmetrics_check_factory(instance) - check.poll = mock.MagicMock(return_value=MockResponse(text_data, headers={'Content-Type': text_content_type})) + check.poll = mock.MagicMock(return_value=MockHTTPResponse(text_data, headers={'Content-Type': text_content_type})) check.check(instance) datadog_agent.assert_metadata_count(0) @@ -2645,7 +2645,7 @@ def test_metadata_transformer(mocked_openmetrics_check_factory, text_data, datad instance['metadata_metric_name'] = 'kubernetes_build_info' instance['metadata_label_map'] = {'version': 'gitVersion'} check = mocked_openmetrics_check_factory(instance) - check.poll = mock.MagicMock(return_value=MockResponse(text_data, headers={'Content-Type': text_content_type})) + check.poll = mock.MagicMock(return_value=MockHTTPResponse(text_data, headers={'Content-Type': text_content_type})) version_metadata = { 'version.major': '1', @@ -2672,7 +2672,10 @@ def test_ssl_verify_not_raise_warning(caplog, mocked_openmetrics_check_factory, check = mocked_openmetrics_check_factory(instance) scraper_config = check.get_scraper_config(instance) - with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get', return_value=MockResponse('httpbin.org')): + with ( + caplog.at_level(logging.DEBUG), + mock.patch('requests.Session.get', return_value=MockHTTPResponse('httpbin.org')), + ): resp = check.send_request('https://httpbin.org/get', scraper_config) assert "httpbin.org" in resp.content.decode('utf-8') @@ -2696,7 +2699,10 @@ def test_send_request_with_dynamic_prometheus_url(caplog, mocked_openmetrics_che # `prometheus_url` changed just before calling `send_request` scraper_config['prometheus_url'] = 'https://www.example.com/foo/bar' - with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get', return_value=MockResponse('httpbin.org')): + with ( + caplog.at_level(logging.DEBUG), + mock.patch('requests.Session.get', return_value=MockHTTPResponse('httpbin.org')), + ): resp = check.send_request('https://httpbin.org/get', scraper_config) assert "httpbin.org" in resp.content.decode('utf-8') @@ -2736,7 +2742,7 @@ def test_simple_type_overrides(aggregator, mocked_prometheus_check, text_data): config = check.get_scraper_config(instance) config['_dry_run'] = False - check.poll = mock.MagicMock(return_value=MockResponse(text_data, headers={'Content-Type': text_content_type})) + check.poll = mock.MagicMock(return_value=MockHTTPResponse(text_data, headers={'Content-Type': text_content_type})) check.process(config) aggregator.assert_metric('prometheus.process.vm.bytes', count=1, metric_type=aggregator.MONOTONIC_COUNT) @@ -2759,7 +2765,7 @@ def test_wildcard_type_overrides(aggregator, mocked_prometheus_check, text_data) config = check.get_scraper_config(instance) config['_dry_run'] = False - check.poll = mock.MagicMock(return_value=MockResponse(text_data, headers={'Content-Type': text_content_type})) + check.poll = mock.MagicMock(return_value=MockHTTPResponse(text_data, headers={'Content-Type': text_content_type})) check.process(config) aggregator.assert_metric('prometheus.process.vm.bytes', count=1, metric_type=aggregator.MONOTONIC_COUNT) @@ -2942,7 +2948,7 @@ def test_use_process_start_time( check = mocked_openmetrics_check_factory(instance) test_data = _make_test_use_process_start_time_data(process_start_time) - check.poll = mock.MagicMock(return_value=MockResponse(test_data, headers={'Content-Type': text_content_type})) + check.poll = mock.MagicMock(return_value=MockHTTPResponse(test_data, headers={'Content-Type': text_content_type})) for _ in range(0, 5): aggregator.reset() @@ -2995,7 +3001,9 @@ def test_refresh_bearer_token(text_data, mocked_openmetrics_check_factory): with patch.object(OpenMetricsBaseCheck, 'KUBERNETES_TOKEN_PATH', os.path.join(TOKENS_PATH, 'default_token')): check = mocked_openmetrics_check_factory(instance) - check.poll = mock.MagicMock(return_value=MockResponse(text_data, headers={'Content-Type': text_content_type})) + check.poll = mock.MagicMock( + return_value=MockHTTPResponse(text_data, headers={'Content-Type': text_content_type}) + ) instance = check.get_scraper_config(instance) assert instance['_bearer_token'] == 'my default token' time.sleep(1.5) diff --git a/datadog_checks_base/tests/base/checks/openmetrics/test_v2/test_config.py b/datadog_checks_base/tests/base/checks/openmetrics/test_v2/test_config.py index 0f3f6b7812e68..4defa72e944c1 100644 --- a/datadog_checks_base/tests/base/checks/openmetrics/test_v2/test_config.py +++ b/datadog_checks_base/tests/base/checks/openmetrics/test_v2/test_config.py @@ -417,12 +417,10 @@ def test_strict_latest_spec(self, dd_run_check): check = get_check({'use_latest_spec': True}) check.configure_scrapers() scraper = check.scrapers['test'] - assert scraper.http.options['headers']['Accept'] == ( - 'application/openmetrics-text;version=1.0.0,application/openmetrics-text;version=0.0.1' - ) + assert scraper._use_latest_spec is True def test_dynamic_spec(self, dd_run_check): check = get_check({'use_latest_spec': False}) check.configure_scrapers() scraper = check.scrapers['test'] - assert scraper.http.options['headers']['Accept'] == 'text/plain' + assert scraper._use_latest_spec is False diff --git a/datadog_checks_base/tests/base/checks/prometheus/test_prometheus.py b/datadog_checks_base/tests/base/checks/prometheus/test_prometheus.py index 479a85e38bf1a..bc45101f2d8ab 100644 --- a/datadog_checks_base/tests/base/checks/prometheus/test_prometheus.py +++ b/datadog_checks_base/tests/base/checks/prometheus/test_prometheus.py @@ -1963,11 +1963,14 @@ def test_text_filter_input(): def test_ssl_verify_not_raise_warning(caplog, mocked_prometheus_check, text_data): - from datadog_checks.dev.http import MockResponse + from datadog_checks.base.utils.http_testing import MockHTTPResponse check = mocked_prometheus_check - with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get', return_value=MockResponse('httpbin.org')): + with ( + caplog.at_level(logging.DEBUG), + mock.patch('requests.Session.get', return_value=MockHTTPResponse(content='httpbin.org')), + ): resp = check.poll('https://httpbin.org/get') assert 'httpbin.org' in resp.content.decode('utf-8') @@ -1978,12 +1981,15 @@ def test_ssl_verify_not_raise_warning(caplog, mocked_prometheus_check, text_data def test_ssl_verify_not_raise_warning_cert_false(caplog, mocked_prometheus_check, text_data): - from datadog_checks.dev.http import MockResponse + from datadog_checks.base.utils.http_testing import MockHTTPResponse check = mocked_prometheus_check check.ssl_ca_cert = False - with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get', return_value=MockResponse('httpbin.org')): + with ( + caplog.at_level(logging.DEBUG), + mock.patch('requests.Session.get', return_value=MockHTTPResponse(content='httpbin.org')), + ): resp = check.poll('https://httpbin.org/get') assert 'httpbin.org' in resp.content.decode('utf-8') diff --git a/datadog_checks_base/tests/base/utils/http/test_authtoken.py b/datadog_checks_base/tests/base/utils/http/test_authtoken.py index 3a872371a68f9..710d65a35b4c5 100644 --- a/datadog_checks_base/tests/base/utils/http/test_authtoken.py +++ b/datadog_checks_base/tests/base/utils/http/test_authtoken.py @@ -10,10 +10,10 @@ from datadog_checks.base import ConfigurationError from datadog_checks.base.utils.http import DEFAULT_EXPIRATION, RequestsWrapper +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.base.utils.time import get_timestamp from datadog_checks.dev import TempDir from datadog_checks.dev.fs import read_file, write_file -from datadog_checks.dev.http import MockResponse from .common import DEFAULT_OPTIONS, FIXTURE_PATH @@ -607,14 +607,14 @@ def login(*args, **kwargs): assert isinstance(decoded['exp'], int) assert abs(decoded['exp'] - (get_timestamp() + exp)) < 10 - return MockResponse(json_data={'token': 'auth-token'}) - return MockResponse(status_code=404) + return MockHTTPResponse(json_data={'token': 'auth-token'}) + return MockHTTPResponse(status_code=404) def auth(*args, **kwargs): if args[0] == 'https://leader.mesos/service/some-service': assert kwargs['headers']['Authorization'] == 'token=auth-token' - return MockResponse(json_data={}) - return MockResponse(status_code=404) + return MockHTTPResponse(json_data={}) + return MockHTTPResponse(status_code=404) with mock.patch('requests.post', side_effect=login), mock.patch('requests.Session.get', side_effect=auth): http = RequestsWrapper(instance, init_config) @@ -726,7 +726,7 @@ def raise_error_once(*args, **kwargs): if counter['errors'] <= 1: raise Exception - return MockResponse() + return MockHTTPResponse() expected_headers = {'Authorization': 'Bearer secret2'} expected_headers.update(DEFAULT_OPTIONS['headers']) diff --git a/datadog_checks_base/tests/base/utils/http/test_headers.py b/datadog_checks_base/tests/base/utils/http/test_headers.py index ab42172973153..dd3bca5d4b46a 100644 --- a/datadog_checks_base/tests/base/utils/http/test_headers.py +++ b/datadog_checks_base/tests/base/utils/http/test_headers.py @@ -97,3 +97,34 @@ def test_extra_headers_on_http_method_call(): # make sure the original headers are not modified assert http.options['headers'] == complete_headers assert extra_headers == {"foo": "bar"} + + +def test_get_header_default_for_missing(): + http = RequestsWrapper({}, {}) + assert http.get_header('X-Missing') is None + assert http.get_header('X-Missing', 'fallback') == 'fallback' + + +def test_get_header_case_insensitive(): + http = RequestsWrapper({}, {}) + assert http.get_header('accept') == '*/*' + assert http.get_header('Accept') == '*/*' + assert http.get_header('ACCEPT') == '*/*' + + +def test_set_header(): + http = RequestsWrapper({}, {}) + http.set_header('X-Token', 'abc123') + assert http.get_header('X-Token') == 'abc123' + http.set_header('Accept', 'application/json') + assert http.get_header('Accept') == 'application/json' + + +def test_set_header_case_insensitive(): + http = RequestsWrapper({}, {}) + http.set_header('accept', 'application/json') + # Overwrites the existing 'Accept' key (preserving original casing) + assert http.get_header('Accept') == 'application/json' + assert http.options['headers']['Accept'] == 'application/json' + # No duplicate key created + assert sum(1 for k in http.options['headers'] if k.lower() == 'accept') == 1 diff --git a/datadog_checks_base/tests/base/utils/http/test_http_testing.py b/datadog_checks_base/tests/base/utils/http/test_http_testing.py new file mode 100644 index 0000000000000..ebaccc59efd07 --- /dev/null +++ b/datadog_checks_base/tests/base/utils/http/test_http_testing.py @@ -0,0 +1,151 @@ +# (C) Datadog, Inc. 2026-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +import json + +import pytest + +from datadog_checks.base import AgentCheck +from datadog_checks.base.utils.http_exceptions import HTTPStatusError +from datadog_checks.base.utils.http_testing import MockHTTPResponse + + +def test_mock_http_patches_agentcheck(mock_http): + check = AgentCheck('test', {}, [{}]) + assert check.http is mock_http + + +def test_mock_response_json_with_custom_headers(): + headers = {'X-Custom': 'value'} + response = MockHTTPResponse(json_data={'key': 'value'}, headers=headers) + + assert response.headers['content-type'] == 'application/json' + assert response.headers['x-custom'] == 'value' + + +def test_mock_response_json_does_not_mutate_caller_headers(): + headers = {'X-Custom': 'value'} + MockHTTPResponse(json_data={'key': 'value'}, headers=headers) + + assert list(headers.keys()) == ['X-Custom'] + + +def test_mock_response_file_path(tmp_path): + f = tmp_path / 'fixture.txt' + f.write_bytes(b'file content') + + response = MockHTTPResponse(file_path=str(f)) + assert response.content == b'file content' + + +def test_mock_response_raise_for_status(): + response_404 = MockHTTPResponse(content='Not Found', status_code=404) + with pytest.raises(HTTPStatusError) as exc_info: + response_404.raise_for_status() + assert '404 Client Error' in str(exc_info.value) + assert exc_info.value.response is response_404 + + response_500 = MockHTTPResponse(content='Server Error', status_code=500) + with pytest.raises(HTTPStatusError) as exc_info: + response_500.raise_for_status() + assert '500 Server Error' in str(exc_info.value) + assert exc_info.value.response is response_500 + + +def test_mock_response_iter_content_chunks(): + response = MockHTTPResponse(content='hello world') + + chunks = list(response.iter_content(chunk_size=5)) + assert chunks == [b'hello', b' worl', b'd'] + + +def test_mock_response_iter_lines_preserves_empty_lines(): + content = 'line1\n\nline3\n' + response = MockHTTPResponse(content=content) + + lines = list(response.iter_lines()) + assert lines == [b'line1', b'', b'line3'] + + +def test_mock_response_normalize_leading_newline(): + content = '\nActual content' + response = MockHTTPResponse(content=content) + + assert response.text == 'Actual content' + + +def test_mock_response_normalize_leading_newline_with_indent(): + content = """ + line one + line two + """ + response = MockHTTPResponse(content=content) + assert response.text == "line one\nline two\n" + + +def test_mock_response_headers_case_insensitive(): + response = MockHTTPResponse(headers={'Content-Type': 'text/plain', 'X-Custom': 'val'}) + + assert response.headers['Content-Type'] == 'text/plain' + assert response.headers['content-type'] == 'text/plain' + assert response.headers.get('Content-Type') == 'text/plain' + assert response.headers.get('cOnTeNt-tYpE') == 'text/plain' + + +def test_mock_response_headers_update_and_setdefault(): + response = MockHTTPResponse(headers={'Content-Type': 'text/plain'}) + + response.headers.update({'X-New': 'new_val'}) + assert response.headers['x-new'] == 'new_val' + + response.headers.setdefault('X-Default', 'default_val') + assert response.headers['x-default'] == 'default_val' + + response.headers.setdefault('Content-Type', 'should-not-change') + assert response.headers['content-type'] == 'text/plain' + + response.headers.update([('X-Iter', 'iter_val')]) + assert response.headers['x-iter'] == 'iter_val' + + +def test_mock_response_links_standard(): + response = MockHTTPResponse(headers={'link': '; rel=next; type="text/plain"'}) + assert 'next' in response.links + assert response.links['next']['url'] == 'http://example.com/page2' + assert response.links['next']['type'] == 'text/plain' + + +def test_mock_response_links_multiple(): + response = MockHTTPResponse( + headers={'link': '; rel=next, ; rel=prev'} + ) + assert len(response.links) == 2 + assert response.links['next']['url'] == 'http://example.com/page2' + assert response.links['prev']['url'] == 'http://example.com/page1' + + +def test_mock_response_links_empty(): + assert MockHTTPResponse().links == {} + assert MockHTTPResponse(headers={'link': ''}).links == {} + + +def test_mock_response_links_no_rel_keys_by_url(): + response = MockHTTPResponse(headers={'link': '; type="text/plain"'}) + assert 'http://example.com/page2' in response.links + + +def test_mock_response_links_url_with_comma(): + response = MockHTTPResponse(headers={'link': '; rel=next'}) + assert response.links['next']['url'] == 'http://example.com/path?a=1,2' + + +def test_mock_response_links_cleared_after_header_pop(): + response = MockHTTPResponse(headers={'link': '; rel=next'}) + assert 'next' in response.links + response.headers.pop('link') + assert response.links == {} + + +def test_mock_response_raw_readable(): + response = MockHTTPResponse(json_data={'key': 'value'}) + assert json.load(response.raw) == {'key': 'value'} diff --git a/datadog_checks_base/tests/base/utils/http/test_kerberos_unit.py b/datadog_checks_base/tests/base/utils/http/test_kerberos_unit.py index fd4a51a07b0e6..161a837168a8e 100644 --- a/datadog_checks_base/tests/base/utils/http/test_kerberos_unit.py +++ b/datadog_checks_base/tests/base/utils/http/test_kerberos_unit.py @@ -9,8 +9,8 @@ from datadog_checks.base import ConfigurationError from datadog_checks.base.utils.http import RequestsWrapper +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import EnvVars -from datadog_checks.dev.http import MockResponse pytestmark = [pytest.mark.unit] @@ -115,7 +115,7 @@ def test_config_kerberos_keytab_file(): with mock.patch( 'requests.Session.get', - side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5_CLIENT_KTNAME', '')), + side_effect=lambda *args, **kwargs: MockHTTPResponse(os.environ.get('KRB5_CLIENT_KTNAME', '')), ): response = http.get('https://www.google.com') assert response.text == '/test/file' @@ -132,7 +132,7 @@ def test_config_kerberos_cache(): assert os.environ.get('KRB5CCNAME') is None with mock.patch( - 'requests.Session.get', side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5CCNAME', '')) + 'requests.Session.get', side_effect=lambda *args, **kwargs: MockHTTPResponse(os.environ.get('KRB5CCNAME', '')) ): response = http.get('https://www.google.com') assert response.text == '/test/file' @@ -148,7 +148,8 @@ def test_config_kerberos_cache_restores_rollback(): with EnvVars({'KRB5CCNAME': 'old'}): with mock.patch( - 'requests.Session.get', side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5CCNAME', '')) + 'requests.Session.get', + side_effect=lambda *args, **kwargs: MockHTTPResponse(os.environ.get('KRB5CCNAME', '')), ): response = http.get('https://www.google.com') assert response.text == '/test/file' @@ -167,7 +168,7 @@ def test_config_kerberos_keytab_file_rollback(): with mock.patch( 'requests.Session.get', - side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5_CLIENT_KTNAME', '')), + side_effect=lambda *args, **kwargs: MockHTTPResponse(os.environ.get('KRB5_CLIENT_KTNAME', '')), ): response = http.get('https://www.google.com') assert response.text == '/test/file' diff --git a/datadog_checks_dev/changelog.d/22676.added b/datadog_checks_dev/changelog.d/22676.added new file mode 100644 index 0000000000000..45a1c1308315f --- /dev/null +++ b/datadog_checks_dev/changelog.d/22676.added @@ -0,0 +1 @@ +Add mock_http, mock_openmetrics_http, and mock_prometheus_http fixtures for library-agnostic HTTP client mocking in integration tests. \ No newline at end of file diff --git a/datadog_checks_dev/datadog_checks/dev/plugin/pytest.py b/datadog_checks_dev/datadog_checks/dev/plugin/pytest.py index e9f9c80a513f0..9e474ba6a2ef0 100644 --- a/datadog_checks_dev/datadog_checks/dev/plugin/pytest.py +++ b/datadog_checks_dev/datadog_checks/dev/plugin/pytest.py @@ -33,7 +33,9 @@ __aggregator = None __datadog_agent = None -MockResponse = None +MockHTTPResponse = None + +_DEFAULT_MOCK_METHOD = 'requests.Session.get' # TODO(httpx-migration): update when backend changes @pytest.fixture @@ -286,29 +288,93 @@ def dd_default_hostname(): @pytest.fixture def mock_response(): - # Lazily import `requests` as it may be costly under certain conditions - global MockResponse - if MockResponse is None: - from datadog_checks.dev.http import MockResponse + global MockHTTPResponse + if MockHTTPResponse is None: + from datadog_checks.base.utils.http_testing import MockHTTPResponse - yield MockResponse + yield MockHTTPResponse @pytest.fixture def mock_http_response(mocker, mock_response): yield lambda *args, **kwargs: mocker.patch( - kwargs.pop('method', 'requests.Session.get'), return_value=mock_response(*args, **kwargs) + kwargs.pop('method', _DEFAULT_MOCK_METHOD), return_value=mock_response(*args, **kwargs) + ) + + +@pytest.fixture +def mock_http(mocker): + from unittest.mock import PropertyMock, create_autospec + + from datadog_checks.base.checks.base import AgentCheck + from datadog_checks.base.utils.http_protocol import HTTPClientProtocol + + client = create_autospec(HTTPClientProtocol) + # Protocol annotations are not picked up by create_autospec, so set options explicitly. + client.options = { + 'auth': None, + 'cert': None, + 'headers': {}, + 'proxies': None, + 'timeout': (10.0, 10.0), + 'verify': True, + 'allow_redirects': True, + } + + def _get_header(name, default=None): + for key, value in client.options['headers'].items(): + if key.lower() == name.lower(): + return value + return default + + def _set_header(name, value): + for key in list(client.options['headers']): + if key.lower() == name.lower(): + client.options['headers'][key] = value + return + client.options['headers'][name] = value + + client.get_header.side_effect = _get_header + client.set_header.side_effect = _set_header + client.options_method.side_effect = NotImplementedError('HTTP OPTIONS not yet supported in mock_http') + mocker.patch.object(AgentCheck, 'http', new_callable=PropertyMock, return_value=client) + return client + + +@pytest.fixture +def mock_openmetrics_http(mock_http, mocker): + """OpenMetrics HTTP mock with dual interception: + + - v1 checks (OpenMetricsBaseCheck): patches OpenMetricsScraperMixin.get_http_handler to return mock_http. + - v2 checks (OpenMetricsBaseCheckV2): inherited via mock_http's AgentCheck.http PropertyMock; the + get_http_handler patch is unused on this path because v2 calls self.http.get(...) directly. + """ + mocker.patch( + 'datadog_checks.base.checks.openmetrics.mixins.OpenMetricsScraperMixin.get_http_handler', + return_value=mock_http, + ) + return mock_http + + +@pytest.fixture +def mock_prometheus_http(mock_http, mocker): + """mock_http with PrometheusScraperMixin.get_http_handler patched to return it.""" + mock_http.ignore_tls_warning = False + mocker.patch( + 'datadog_checks.base.checks.prometheus.mixins.PrometheusScraperMixin.get_http_handler', + return_value=mock_http, ) + return mock_http @pytest.fixture def mock_http_response_per_endpoint(mocker, mock_response): @overload def _mock( - responses_by_endpoint: Dict[str, list[MockResponse]], + responses_by_endpoint: Dict[str, list[MockHTTPResponse]], *, mode: Literal["default"], - default_response: MockResponse, + default_response: MockHTTPResponse, method: str = ..., url_arg_index: int = ..., url_kwarg_name: str = ..., @@ -316,7 +382,7 @@ def _mock( ): ... @overload def _mock( - responses_by_endpoint: Dict[str, list[MockResponse]], + responses_by_endpoint: Dict[str, list[MockHTTPResponse]], *, mode: Literal["cycle", "exhaust"], default_response: None = None, @@ -326,10 +392,10 @@ def _mock( strict: bool = ..., ): ... def _mock( - responses_by_endpoint: Dict[str, list[MockResponse]], + responses_by_endpoint: Dict[str, list[MockHTTPResponse]], mode: Literal['cycle', 'exhaust', 'default'] = 'cycle', - default_response: MockResponse | None = None, - method: str = 'requests.Session.get', + default_response: MockHTTPResponse | None = None, + method: str = _DEFAULT_MOCK_METHOD, url_arg_index: int = 1, url_kwarg_name: str = "url", strict: bool = True, @@ -364,7 +430,7 @@ def side_effect(*args, **kwargs): if strict: raise ValueError(f"Endpoint {url} not found in mocked responses") else: - return MockResponse(status_code=404) + return mock_response(status_code=404) else: try: return next(queues[url]) diff --git a/datadog_cluster_agent/tests/conftest.py b/datadog_cluster_agent/tests/conftest.py index 4ccf2c30a4710..b6d28217c9d9a 100644 --- a/datadog_cluster_agent/tests/conftest.py +++ b/datadog_cluster_agent/tests/conftest.py @@ -4,9 +4,10 @@ import os from copy import deepcopy -import mock import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse + INSTANCE = {'prometheus_url': 'http://localhost:5000/metrics'} @@ -21,14 +22,9 @@ def instance(): @pytest.fixture() -def mock_metrics_endpoint(): +def mock_metrics_endpoint(mock_openmetrics_http): f_name = os.path.join(os.path.dirname(__file__), 'fixtures', 'metrics.txt') with open(f_name, 'r') as f: text_data = f.read() - with mock.patch( - 'requests.Session.get', - return_value=mock.MagicMock( - status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} - ), - ): - yield + mock_openmetrics_http.get.return_value = MockHTTPResponse(content=text_data, headers={'Content-Type': 'text/plain'}) + yield diff --git a/dcgm/tests/conftest.py b/dcgm/tests/conftest.py index ab5c6d305432e..628d5c03875b9 100644 --- a/dcgm/tests/conftest.py +++ b/dcgm/tests/conftest.py @@ -4,10 +4,10 @@ import copy import os -from unittest import mock import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dcgm import DcgmCheck from datadog_checks.dev import docker_run from datadog_checks.dev.conditions import CheckDockerLogs, CheckEndpoints @@ -40,28 +40,18 @@ def check(instance): @pytest.fixture() -def mock_metrics(): +def mock_metrics(mock_http): f_name = os.path.join(os.path.dirname(__file__), 'fixtures', 'metrics.txt') with open(f_name, 'r') as f: text_data = f.read() - with mock.patch( - 'requests.Session.get', - return_value=mock.MagicMock( - status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} - ), - ): - yield + mock_http.get.return_value = MockHTTPResponse(content=text_data, headers={'Content-Type': 'text/plain'}) + yield @pytest.fixture() -def mock_label_remap(): +def mock_label_remap(mock_http): f_name = os.path.join(os.path.dirname(__file__), 'fixtures', 'label_remap.txt') with open(f_name, 'r') as f: text_data = f.read() - with mock.patch( - 'requests.Session.get', - return_value=mock.MagicMock( - status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} - ), - ): - yield + mock_http.get.return_value = MockHTTPResponse(content=text_data, headers={'Content-Type': 'text/plain'}) + yield diff --git a/dcgm/tests/test_unit.py b/dcgm/tests/test_unit.py index 911caad2cdd80..f55b92ca5bcf5 100644 --- a/dcgm/tests/test_unit.py +++ b/dcgm/tests/test_unit.py @@ -17,7 +17,7 @@ def test_critical_service_check(dd_run_check, aggregator, mock_http_response, ch When we can't connect to dcgm-exporter for whatever reason we should only submit a CRITICAL service check. """ mock_http_response(status_code=404) - with pytest.raises(Exception, match="requests.exceptions.HTTPError"): + with pytest.raises(Exception, match="HTTPStatusError"): dd_run_check(check) aggregator.assert_service_check('dcgm.openmetrics.health', status=check.CRITICAL) diff --git a/druid/datadog_checks/druid/druid.py b/druid/datadog_checks/druid/druid.py index e734743abbfee..fbde9d724193d 100644 --- a/druid/datadog_checks/druid/druid.py +++ b/druid/datadog_checks/druid/druid.py @@ -5,6 +5,7 @@ from datadog_checks.base import AgentCheck, ConfigurationError from datadog_checks.base.errors import CheckException +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError class DruidCheck(AgentCheck): @@ -64,5 +65,5 @@ def _make_request(self, url): self.warning( "Couldn't connect to URL: %s with exception: %s. Please verify the address is reachable", url, e ) - except requests.exceptions.Timeout as e: + except (requests.exceptions.Timeout, HTTPTimeoutError) as e: self.warning("Connection timeout when connecting to %s: %s", url, e) diff --git a/druid/tests/test_unit.py b/druid/tests/test_unit.py index 3a21281fbc8bc..b6381dc9d4903 100644 --- a/druid/tests/test_unit.py +++ b/druid/tests/test_unit.py @@ -19,17 +19,15 @@ def test_missing_url_config(aggregator): check.check({}) -def test_service_check_can_connect_success(aggregator, instance): +def test_service_check_can_connect_success(aggregator, instance, mock_http): check = DruidCheck('druid', {}, [instance]) - req = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): - mock_resp = mock.MagicMock(status_code=200) - mock_resp.json.return_value = {'abc': '123'} - req.get.return_value = mock_resp + mock_resp = mock.MagicMock(status_code=200) + mock_resp.json.return_value = {'abc': '123'} + mock_http.get.return_value = mock_resp - resp = check._get_process_properties('http://hello-world.com:8899', ['foo:bar']) - assert resp == {'abc': '123'} + resp = check._get_process_properties('http://hello-world.com:8899', ['foo:bar']) + assert resp == {'abc': '123'} aggregator.assert_service_check( 'druid.service.can_connect', @@ -39,17 +37,15 @@ def test_service_check_can_connect_success(aggregator, instance): @pytest.mark.parametrize("exception_class", [requests.exceptions.ConnectionError, requests.exceptions.Timeout]) -def test_service_check_can_connect_failure(aggregator, instance, exception_class): +def test_service_check_can_connect_failure(aggregator, instance, mock_http, exception_class): check = DruidCheck('druid', {}, [instance]) - req = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): - attrs = {'raise_for_status.side_effect': exception_class} - req.get.side_effect = [mock.MagicMock(status_code=500, **attrs)] + attrs = {'raise_for_status.side_effect': exception_class} + mock_http.get.side_effect = [mock.MagicMock(status_code=500, **attrs)] - with pytest.raises(CheckException): - properties = check._get_process_properties('http://hello-world.com:8899', ['foo:bar']) - assert properties is None + with pytest.raises(CheckException): + properties = check._get_process_properties('http://hello-world.com:8899', ['foo:bar']) + assert properties is None aggregator.assert_service_check( 'druid.service.can_connect', diff --git a/ecs_fargate/datadog_checks/ecs_fargate/ecs_fargate.py b/ecs_fargate/datadog_checks/ecs_fargate/ecs_fargate.py index b18d0bae203b1..bdfb0146bef53 100644 --- a/ecs_fargate/datadog_checks/ecs_fargate/ecs_fargate.py +++ b/ecs_fargate/datadog_checks/ecs_fargate/ecs_fargate.py @@ -10,6 +10,7 @@ from datadog_checks.base import AgentCheck from datadog_checks.base.utils.common import round_value +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError try: from tagger import get_tags @@ -116,7 +117,7 @@ def check(self, _): try: request = self.http.get(metadata_endpoint) - except requests.exceptions.Timeout: + except (requests.exceptions.Timeout, HTTPTimeoutError): msg = 'Fargate {} endpoint timed out after {} seconds'.format( metadata_endpoint, self.http.options['timeout'] ) @@ -202,7 +203,7 @@ def check(self, _): try: request = self.http.get(stats_endpoint) - except requests.exceptions.Timeout: + except (requests.exceptions.Timeout, HTTPTimeoutError): msg = 'Fargate {} endpoint timed out after {} seconds'.format(stats_endpoint, self.http.options['timeout']) self.service_check('fargate_check', AgentCheck.WARNING, message=msg, tags=custom_tags) self.log.warning(msg, exc_info=True) diff --git a/ecs_fargate/tests/conftest.py b/ecs_fargate/tests/conftest.py index 8c092686f9825..9176123642a01 100644 --- a/ecs_fargate/tests/conftest.py +++ b/ecs_fargate/tests/conftest.py @@ -6,8 +6,8 @@ import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import get_here -from datadog_checks.dev.http import MockResponse from datadog_checks.ecs_fargate import FargateCheck HERE = get_here() @@ -83,38 +83,38 @@ def mocked_requests_get_linux(*args, **kwargs): # v2 if args[0].endswith("/metadata"): - return MockResponse(file_path=os.path.join(HERE, 'fixtures', 'metadata.json')) + return MockHTTPResponse(file_path=os.path.join(HERE, 'fixtures', 'metadata.json')) elif args[0].endswith("/stats"): - return MockResponse(file_path=os.path.join(HERE, 'fixtures', LINUX_STATS_FIXTURE)) + return MockHTTPResponse(file_path=os.path.join(HERE, 'fixtures', LINUX_STATS_FIXTURE)) else: - return MockResponse(status_code=404) + return MockHTTPResponse(status_code=404) def mocked_requests_get_linux_v4(*args, **kwargs): if args[0].endswith("/task"): - return MockResponse(file_path=os.path.join(HERE, 'fixtures', 'metadata_v4.json')) + return MockHTTPResponse(file_path=os.path.join(HERE, 'fixtures', 'metadata_v4.json')) elif args[0].endswith("/task/stats"): - return MockResponse(file_path=os.path.join(HERE, 'fixtures', LINUX_STATS_FIXTURE_V4)) + return MockHTTPResponse(file_path=os.path.join(HERE, 'fixtures', LINUX_STATS_FIXTURE_V4)) else: - return MockResponse(status_code=404) + return MockHTTPResponse(status_code=404) def mocked_requests_get_windows(*args, **kwargs): if args[0].endswith("/metadata"): - return MockResponse(file_path=os.path.join(HERE, 'fixtures', 'metadata.json')) + return MockHTTPResponse(file_path=os.path.join(HERE, 'fixtures', 'metadata.json')) elif args[0].endswith("/stats"): - return MockResponse(file_path=os.path.join(HERE, 'fixtures', WINDOWS_STATS_FIXTURE)) + return MockHTTPResponse(file_path=os.path.join(HERE, 'fixtures', WINDOWS_STATS_FIXTURE)) else: - return MockResponse(status_code=404) + return MockHTTPResponse(status_code=404) def mocked_requests_get_sys_delta(*args, **kwargs): if args[0].endswith("/metadata"): - return MockResponse(file_path=os.path.join(HERE, 'fixtures', 'metadata.json')) + return MockHTTPResponse(file_path=os.path.join(HERE, 'fixtures', 'metadata.json')) elif args[0].endswith("/stats"): - return MockResponse(file_path=os.path.join(HERE, 'fixtures', 'stats_wrong_system_delta.json')) + return MockHTTPResponse(file_path=os.path.join(HERE, 'fixtures', 'stats_wrong_system_delta.json')) else: - return MockResponse(status_code=404) + return MockHTTPResponse(status_code=404) def mocked_get_tags(entity, _): diff --git a/ecs_fargate/tests/test_unit.py b/ecs_fargate/tests/test_unit.py index c4f967166bcec..6406cfc8ce15d 100644 --- a/ecs_fargate/tests/test_unit.py +++ b/ecs_fargate/tests/test_unit.py @@ -8,7 +8,7 @@ import mock import pytest -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.ecs_fargate import FargateCheck from .conftest import ( @@ -42,7 +42,8 @@ def test_failing_check(check, aggregator, dd_run_check): Testing fargate metadata endpoint error. """ with mock.patch( - 'datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', return_value=MockResponse('{}', status_code=500) + 'datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', + return_value=MockHTTPResponse('{}', status_code=500), ): dd_run_check(check) @@ -55,7 +56,8 @@ def test_invalid_response_check(check, aggregator, dd_run_check): Testing invalid fargate metadata payload. """ with mock.patch( - 'datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', return_value=MockResponse('{}', status_code=200) + 'datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', + return_value=MockHTTPResponse('{}', status_code=200), ): dd_run_check(check) @@ -289,24 +291,9 @@ def test_successful_check_wrong_sys_delta(check, aggregator, dd_run_check): [("explicit timeout", {'timeout': 30}, {'timeout': (30, 30)}), ("default timeout", {}, {'timeout': (5, 5)})], ) @pytest.mark.unit -def test_config(test_case, extra_config, expected_http_kwargs, dd_run_check): +def test_config(test_case, extra_config, expected_http_kwargs): instance = extra_config check = FargateCheck('ecs_fargate', {}, instances=[instance]) - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - r.get.return_value = mock.MagicMock(status_code=200) - - dd_run_check(check) - - http_wargs = { - 'auth': mock.ANY, - 'cert': mock.ANY, - 'headers': mock.ANY, - 'proxies': mock.ANY, - 'timeout': mock.ANY, - 'verify': mock.ANY, - 'allow_redirects': mock.ANY, - } - http_wargs.update(expected_http_kwargs) - r.get.assert_called_with('http://169.254.170.2/v2/metadata', **http_wargs) + for key, value in expected_http_kwargs.items(): + assert check.http.options[key] == value diff --git a/elastic/datadog_checks/elastic/elastic.py b/elastic/datadog_checks/elastic/elastic.py index f4eb44ff0fa30..98a273f18bd81 100644 --- a/elastic/datadog_checks/elastic/elastic.py +++ b/elastic/datadog_checks/elastic/elastic.py @@ -11,6 +11,8 @@ import requests from datadog_checks.base import AgentCheck, is_affirmative, to_string +from datadog_checks.base.utils.http_exceptions import HTTPError as AgentHTTPError +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError from .config import from_instance from .metrics import ( @@ -164,7 +166,7 @@ def check(self, _): try: pshard_stats_data = self._get_data(pshard_stats_url, send_sc=send_sc) self._process_pshard_stats_data(pshard_stats_data, pshard_stats_metrics, base_tags) - except requests.ReadTimeout as e: + except (requests.ReadTimeout, HTTPTimeoutError) as e: if bubble_ex: raise self.log.warning("Timed out reading pshard-stats from servers (%s) - stats will be missing", e) @@ -189,7 +191,7 @@ def check(self, _): if self._config.index_stats and version >= [1, 0, 0]: try: self._get_index_metrics(admin_forwarder, version, base_tags) - except requests.ReadTimeout as e: + except (requests.ReadTimeout, HTTPTimeoutError) as e: self.log.warning("Timed out reading index stats from servers (%s) - stats will be missing", e) # Load the cat allocation data. @@ -275,7 +277,7 @@ def _get_index_metrics(self, admin_forwarder, version, base_tags): def _get_template_metrics(self, admin_forwarder, base_tags): try: template_resp = self._get_data(self._join_url('/_cat/templates?format=json', admin_forwarder)) - except requests.exceptions.RequestException as e: + except (requests.exceptions.RequestException, AgentHTTPError) as e: self.log.debug("Error reading templates info from servers (%s) - template metrics will be missing", e) return @@ -337,7 +339,7 @@ def _get_data(self, url, send_sc=True, data=None): resp.raise_for_status() except Exception as e: # this means we've hit a particular kind of auth error that means the config is broken - if isinstance(resp, requests.Response) and resp.status_code == 400: + if resp is not None and resp.status_code == 400: raise AuthenticationError("The ElasticSearch credentials are incorrect") if send_sc: @@ -492,7 +494,7 @@ def _process_cat_allocation_data(self, admin_forwarder, version, base_tags): cat_allocation_url = self._join_url(self.CAT_ALLOC_PATH, admin_forwarder) try: cat_allocation_data = self._get_data(cat_allocation_url) - except requests.ReadTimeout as e: + except (requests.ReadTimeout, HTTPTimeoutError) as e: self.log.error("Timed out reading cat allocation stats from servers (%s) - stats will be missing", e) return @@ -521,7 +523,7 @@ def _process_cat_allocation_data(self, admin_forwarder, version, base_tags): cat_shards_url = self._join_url(self.CAT_SHARDS_PATH, admin_forwarder) try: cat_shards_data = self._get_data(cat_shards_url) - except requests.ReadTimeout as e: + except (requests.ReadTimeout, HTTPTimeoutError) as e: self.log.error("Timed out reading cat shards stats from servers (%s) - stats will be missing", e) return diff --git a/elastic/tests/test_unit.py b/elastic/tests/test_unit.py index 551133e47a4fd..2e5f2370a0eaf 100644 --- a/elastic/tests/test_unit.py +++ b/elastic/tests/test_unit.py @@ -8,7 +8,7 @@ import pytest from datadog_checks.base import ConfigurationError, is_affirmative -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.elastic import ESCheck from datadog_checks.elastic.elastic import AuthenticationError, get_value_from_path from datadog_checks.elastic.metrics import INDEX_STATS_METRICS, stats_for_version @@ -135,7 +135,7 @@ def test_get_template_metrics(aggregator, instance, mock_http_response): def test_get_template_metrics_raise_exception(aggregator, instance): with mock.patch( 'requests.Session.get', - return_value=MockResponse(status_code=403), + return_value=MockHTTPResponse(status_code=403), ): check = ESCheck('elastic', {}, instances=[instance]) # Make sure we do not throw an exception and move on @@ -152,7 +152,7 @@ def test_get_value_from_path(): def test__get_data_throws_authentication_error(instance): with mock.patch( 'requests.Session.get', - return_value=MockResponse(status_code=400), + return_value=MockHTTPResponse(status_code=400), ): check = ESCheck('elastic', {}, instances=[instance]) @@ -163,7 +163,7 @@ def test__get_data_throws_authentication_error(instance): def test__get_data_creates_critical_service_alert(aggregator, instance): with mock.patch( 'requests.Session.get', - return_value=MockResponse(status_code=500), + return_value=MockHTTPResponse(status_code=500), ): check = ESCheck('elastic', {}, instances=[instance]) @@ -174,7 +174,7 @@ def test__get_data_creates_critical_service_alert(aggregator, instance): check.SERVICE_CHECK_CONNECT_NAME, status=check.CRITICAL, tags=check._config.service_check_tags, - message="Error 500 Server Error: None for url: None when hitting test.com", + message="Error 500 Server Error when hitting test.com", ) @@ -194,7 +194,7 @@ def test__get_data_creates_critical_service_alert(aggregator, instance): def test_disable_legacy_sc_tags(aggregator, es_instance): with mock.patch( 'requests.Session.get', - return_value=MockResponse(status_code=500), + return_value=MockHTTPResponse(status_code=500), ): check = ESCheck('elastic', {}, instances=[es_instance]) @@ -210,7 +210,7 @@ def test_disable_legacy_sc_tags(aggregator, es_instance): check.SERVICE_CHECK_CONNECT_NAME, status=check.CRITICAL, tags=expected_tags, - message="Error 500 Server Error: None for url: None when hitting test.com", + message="Error 500 Server Error when hitting test.com", ) diff --git a/envoy/datadog_checks/envoy/envoy.py b/envoy/datadog_checks/envoy/envoy.py index 4790da50656fe..d335b5e548a42 100644 --- a/envoy/datadog_checks/envoy/envoy.py +++ b/envoy/datadog_checks/envoy/envoy.py @@ -8,6 +8,7 @@ import requests from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError from .check import EnvoyCheckV2 from .errors import UnknownMetric, UnknownTags @@ -90,7 +91,7 @@ def check(self, _): try: response = self.http.get(self.stats_url) - except requests.exceptions.Timeout: + except (requests.exceptions.Timeout, HTTPTimeoutError): timeout = self.http.options['timeout'] msg = 'Envoy endpoint `{}` timed out after {} seconds'.format(self.stats_url, timeout) self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, message=msg, tags=self.custom_tags) diff --git a/envoy/datadog_checks/envoy/utils.py b/envoy/datadog_checks/envoy/utils.py index 88b7f4233e7c9..f0428abd8b1dd 100644 --- a/envoy/datadog_checks/envoy/utils.py +++ b/envoy/datadog_checks/envoy/utils.py @@ -6,6 +6,8 @@ import requests +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError + LEGACY_VERSION_RE = re.compile(r'/(\d\.\d\.\d)/') @@ -77,7 +79,7 @@ def _get_server_info(server_info_url, log, http): log.debug('Version not matched.') return - except requests.exceptions.Timeout: + except (requests.exceptions.Timeout, HTTPTimeoutError): log.warning('Envoy endpoint `%s` timed out after %s seconds', server_info_url, http.options['timeout']) return None except Exception as e: diff --git a/envoy/tests/legacy/test_unit.py b/envoy/tests/legacy/test_unit.py index b43b531857fc6..cdcad5ba85923 100644 --- a/envoy/tests/legacy/test_unit.py +++ b/envoy/tests/legacy/test_unit.py @@ -18,7 +18,6 @@ EXT_AUTHZ_METRICS, EXT_PROC_METRICS, FLAVOR, - HOST, INSTANCES, LOCAL_RATE_LIMIT_METRICS, RATE_LIMIT_STAT_PREFIX_TAG, @@ -135,28 +134,13 @@ def test_unknown(fixture_path, mock_http_response, dd_run_check, check): pytest.param({}, {'verify': True}, id="legacy ssl config unset"), ], ) -def test_config(extra_config, expected_http_kwargs, check, dd_run_check): +def test_config(extra_config, expected_http_kwargs, check): instance = deepcopy(INSTANCES['main']) instance.update(extra_config) check = check(instance) - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - r.get.return_value = mock.MagicMock(status_code=200) - - dd_run_check(check) - - http_wargs = { - 'auth': mock.ANY, - 'cert': mock.ANY, - 'headers': mock.ANY, - 'proxies': mock.ANY, - 'timeout': mock.ANY, - 'verify': mock.ANY, - 'allow_redirects': mock.ANY, - } - http_wargs.update(expected_http_kwargs) - r.get.assert_called_with('http://{}:8001/stats'.format(HOST), **http_wargs) + for key, value in expected_http_kwargs.items(): + assert check.http.options[key] == value @pytest.mark.parametrize( diff --git a/etcd/tests/test_integration.py b/etcd/tests/test_integration.py index da94be9a527c3..b5387de0d9467 100644 --- a/etcd/tests/test_integration.py +++ b/etcd/tests/test_integration.py @@ -3,7 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) from copy import deepcopy -import mock import pytest from datadog_checks.etcd import Etcd @@ -63,41 +62,6 @@ def test_service_check(aggregator, instance, dd_run_check): aggregator.assert_service_check('etcd.prometheus.health', Etcd.OK, tags=tags, count=1) -@pytest.mark.parametrize( - 'test_case, extra_config, expected_http_kwargs', - [ - ("new auth config", {'username': 'new_foo', 'password': 'new_bar'}, {'auth': ('new_foo', 'new_bar')}), - ("legacy ssl config True", {'ssl_verify': True}, {'verify': True}), - ("legacy ssl config False", {'ssl_verify': False}, {'verify': False}), - ("legacy ssl config unset", {}, {'verify': False}), - ("timeout", {'prometheus_timeout': 100}, {'timeout': (100.0, 100.0)}), - ], -) -@pytest.mark.integration -def test_config(instance, test_case, extra_config, expected_http_kwargs, dd_run_check): - instance.update(extra_config) - check = Etcd(CHECK_NAME, {}, [instance]) - - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - r.get.return_value = mock.MagicMock(status_code=200) - - dd_run_check(check) - - http_kwargs = { - 'auth': mock.ANY, - 'cert': mock.ANY, - 'data': mock.ANY, - 'headers': mock.ANY, - 'proxies': mock.ANY, - 'timeout': mock.ANY, - 'verify': mock.ANY, - 'allow_redirects': mock.ANY, - } - http_kwargs.update(expected_http_kwargs) - r.post.assert_called_with(URL + '/v3/maintenance/status', **http_kwargs) - - @pytest.mark.integration def test_version_metadata(aggregator, instance, dd_environment, datadog_agent, dd_run_check): check_instance = Etcd(CHECK_NAME, {}, [instance]) diff --git a/etcd/tests/test_unit.py b/etcd/tests/test_unit.py new file mode 100644 index 0000000000000..5987830c6efeb --- /dev/null +++ b/etcd/tests/test_unit.py @@ -0,0 +1,26 @@ +# (C) Datadog, Inc. 2026-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +import pytest + +from datadog_checks.etcd import Etcd + +CHECK_NAME = 'etcd' + + +@pytest.mark.parametrize( + 'test_case, extra_config, expected_http_kwargs', + [ + ("new auth config", {'username': 'new_foo', 'password': 'new_bar'}, {'auth': ('new_foo', 'new_bar')}), + ("legacy ssl config True", {'ssl_verify': True}, {'verify': True}), + ("legacy ssl config False", {'ssl_verify': False}, {'verify': False}), + ("legacy ssl config unset", {}, {'verify': False}), + ("timeout", {'prometheus_timeout': 100}, {'timeout': (100.0, 100.0)}), + ], +) +def test_config(instance, test_case, extra_config, expected_http_kwargs): + instance.update(extra_config) + check = Etcd(CHECK_NAME, {}, [instance]) + + for key, value in expected_http_kwargs.items(): + assert check.http.options[key] == value diff --git a/external_dns/tests/conftest.py b/external_dns/tests/conftest.py index 659cb475e7f29..f7fa24463b583 100644 --- a/external_dns/tests/conftest.py +++ b/external_dns/tests/conftest.py @@ -4,27 +4,23 @@ import os from copy import deepcopy -import mock import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse + from .common import FIXTURE_DIR INSTANCE = {'prometheus_url': 'http://localhost:7979/metrics', 'tags': ['custom:tag']} @pytest.fixture -def mock_external_dns(): +def mock_external_dns(mock_openmetrics_http): f_name = os.path.join(FIXTURE_DIR, 'metrics.txt') with open(f_name, 'r') as f: text_data = f.read() - with mock.patch( - 'requests.Session.get', - return_value=mock.MagicMock( - status_code=200, iter_lines=lambda **kwargs: text_data.split('\n'), headers={'Content-Type': 'text/plain'} - ), - ): - yield + mock_openmetrics_http.get.return_value = MockHTTPResponse(content=text_data, headers={'Content-Type': 'text/plain'}) + yield @pytest.fixture(scope='session') diff --git a/falco/tests/test_unit.py b/falco/tests/test_unit.py index c2a544b1f780a..fdb03f05bbc30 100644 --- a/falco/tests/test_unit.py +++ b/falco/tests/test_unit.py @@ -1,12 +1,10 @@ # (C) Datadog, Inc. 2025-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -from unittest import mock - import pytest from datadog_checks.base.constants import ServiceCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.falco import FalcoCheck @@ -22,13 +20,11 @@ def test_empty_instance(dd_run_check): dd_run_check(check) -def test_check_falco(dd_run_check, aggregator, instance): - mock_responses = [ - MockResponse(file_path=get_fixture_path("falco_metrics.txt")), +def test_check_falco(dd_run_check, aggregator, instance, mock_http): + mock_http.get.side_effect = [ + MockHTTPResponse(file_path=get_fixture_path("falco_metrics.txt")), ] - - with mock.patch('requests.Session.get', side_effect=mock_responses): - dd_run_check(FalcoCheck('falco', {}, [instance])) + dd_run_check(FalcoCheck('falco', {}, [instance])) for metric in METRICS: aggregator.assert_metric(metric) diff --git a/fluxcd/tests/conftest.py b/fluxcd/tests/conftest.py index 55a944c2d90bd..f4f4a6875f515 100644 --- a/fluxcd/tests/conftest.py +++ b/fluxcd/tests/conftest.py @@ -3,10 +3,10 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import os from contextlib import ExitStack -from unittest import mock import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import get_here from datadog_checks.dev.kind import kind_run from datadog_checks.dev.kube_port_forward import port_forward @@ -69,36 +69,22 @@ def check(instance): @pytest.fixture() -def mock_metrics_v1(): +def mock_metrics_v1(mock_http): fixture_file = os.path.join(os.path.dirname(__file__), "fixtures", "metrics-v1.txt") with open(fixture_file, "r") as f: content = f.read() - with mock.patch( - "requests.Session.get", - return_value=mock.MagicMock( - status_code=200, - iter_lines=lambda **kwargs: content.split("\n"), - headers={"Content-Type": "text/plain"}, - ), - ): - yield + mock_http.get.return_value = MockHTTPResponse(content=content, headers={"Content-Type": "text/plain"}) + yield @pytest.fixture() -def mock_metrics_v2(): +def mock_metrics_v2(mock_http): fixture_file = os.path.join(os.path.dirname(__file__), "fixtures", "metrics-v2.txt") with open(fixture_file, "r") as f: content = f.read() - with mock.patch( - "requests.Session.get", - return_value=mock.MagicMock( - status_code=200, - iter_lines=lambda **kwargs: content.split("\n"), - headers={"Content-Type": "text/plain"}, - ), - ): - yield + mock_http.get.return_value = MockHTTPResponse(content=content, headers={"Content-Type": "text/plain"}) + yield diff --git a/fly_io/datadog_checks/fly_io/errors.py b/fly_io/datadog_checks/fly_io/errors.py index 62341bb7f2730..5732ec5d359d2 100644 --- a/fly_io/datadog_checks/fly_io/errors.py +++ b/fly_io/datadog_checks/fly_io/errors.py @@ -5,6 +5,8 @@ import requests +from datadog_checks.base.utils.http_exceptions import HTTPRequestError, HTTPStatusError + def handle_error(f): @wraps(f) @@ -12,7 +14,7 @@ def wrapper(check, *args, **kwargs): try: result = f(check, *args, **kwargs) return result - except requests.exceptions.RequestException as e: + except (requests.exceptions.RequestException, HTTPRequestError, HTTPStatusError) as e: check.log.debug( "Encountered a RequestException in '%s' [%s]: %s", f.__name__, diff --git a/fly_io/tests/conftest.py b/fly_io/tests/conftest.py index 959a905ee23dd..6de661bd9eac3 100644 --- a/fly_io/tests/conftest.py +++ b/fly_io/tests/conftest.py @@ -7,14 +7,13 @@ from pathlib import Path from urllib.parse import urlparse -import mock import pytest -import requests +from datadog_checks.base.utils.http_exceptions import HTTPStatusError +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import docker_run from datadog_checks.dev.conditions import CheckDockerLogs, CheckEndpoints from datadog_checks.dev.fs import get_here -from datadog_checks.dev.http import MockResponse from .common import COMPOSE_FILE, INSTANCE, LAB_INSTANCE, USE_FLY_LAB @@ -98,17 +97,13 @@ def call(method, url, file='response', headers=None, params=None): response = mock_responses(method, url, file=file, headers=headers, params=params) if response is not None: return response - http_response = requests.models.Response() - http_response.status_code = 404 - http_response.reason = "Not Found" - http_response.url = url - raise requests.exceptions.HTTPError(response=http_response) + raise HTTPStatusError('404 Client Error', response=MockHTTPResponse(status_code=404, url=url)) yield call @pytest.fixture -def mock_http_get(request, monkeypatch, mock_http_call): +def mock_http_get(request, mock_http, mock_http_call): param = request.param if hasattr(request, 'param') and request.param is not None else {} http_error = param.pop('http_error', {}) @@ -117,15 +112,13 @@ def get(url, *args, **kwargs): url = get_url_path(url) if http_error and url in http_error: return http_error[url] - mock_status_code = mock.MagicMock(return_value=200) if "/metrics" in url: filepath = os.path.join(get_here(), 'fixtures', 'output.txt') - return MockResponse(file_path=filepath) + return MockHTTPResponse(file_path=filepath) headers = kwargs.get('headers') params = kwargs.get('params') - mock_json = mock.MagicMock(return_value=mock_http_call(method, url, headers=headers, params=params)) - return mock.MagicMock(json=mock_json, status_code=mock_status_code) + json_data = mock_http_call(method, url, headers=headers, params=params) + return MockHTTPResponse(json_data=json_data) - mock_get = mock.MagicMock(side_effect=get) - monkeypatch.setattr('requests.Session.get', mock_get) - return mock_get + mock_http.get.side_effect = get + return mock_http.get diff --git a/fly_io/tests/test_unit.py b/fly_io/tests/test_unit.py index 6098d20713619..4917810f7feb2 100644 --- a/fly_io/tests/test_unit.py +++ b/fly_io/tests/test_unit.py @@ -8,7 +8,7 @@ import pytest from datadog_checks.base.constants import ServiceCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.fly_io import FlyIoCheck @@ -94,11 +94,11 @@ def test_rest_api_app_metrics(dd_run_check, aggregator, instance, caplog): ('mock_http_get'), [ pytest.param( - {'http_error': {'/v1/apps': MockResponse(status_code=500)}}, + {'http_error': {'/v1/apps': MockHTTPResponse(status_code=500)}}, id='500', ), pytest.param( - {'http_error': {'/v1/apps': MockResponse(status_code=404)}}, + {'http_error': {'/v1/apps': MockHTTPResponse(status_code=404)}}, id='404', ), ], @@ -107,7 +107,7 @@ def test_rest_api_app_metrics(dd_run_check, aggregator, instance, caplog): @pytest.mark.usefixtures('mock_http_get') def test_rest_api_exception(dd_run_check, instance, aggregator): check = FlyIoCheck('fly_io', {}, [instance]) - with pytest.raises(Exception, match=r'requests.exceptions.HTTPError'): + with pytest.raises(Exception, match=r'HTTPStatusError'): dd_run_check(check) aggregator.assert_metric("fly_io.machines_api.up", value=0) @@ -124,7 +124,9 @@ def test_rest_api_exception(dd_run_check, instance, aggregator): pytest.param( { 'http_error': { - '/v1/apps/example-app-1/machines': MockResponse(json_data=[{'state': 'started', 'config': None}]) + '/v1/apps/example-app-1/machines': MockHTTPResponse( + json_data=[{'state': 'started', 'config': None}] + ) } }, id='malformed response', @@ -156,7 +158,7 @@ def test_bad_response_exception(dd_run_check, instance, aggregator, caplog): ('mock_http_get'), [ pytest.param( - {'http_error': {'/v1/apps/example-app-1/volumes': MockResponse(status_code=404)}}, + {'http_error': {'/v1/apps/example-app-1/volumes': MockHTTPResponse(status_code=404)}}, id='http error', ), ], @@ -169,8 +171,9 @@ def test_http_error_exception(dd_run_check, instance, aggregator, caplog): dd_run_check(check) assert ( - "Encountered a RequestException in '_collect_volumes_for_app' []: " - "404 Client Error: None for url: None" in caplog.text + "Encountered a RequestException in '_collect_volumes_for_app'" + " []: " + "404 Client Error" in caplog.text ) for metric in MOCKED_PROMETHEUS_METRICS: @@ -218,20 +221,28 @@ def test_external_host_tags(instance, datadog_agent, dd_run_check): ('mock_http_get, log_lines'), [ pytest.param( - {'http_error': {'/v1/apps/example-app-2': MockResponse(status_code=404)}}, - ['RequestException in \'_get_app_status\' []: 404'], + {'http_error': {'/v1/apps/example-app-2': MockHTTPResponse(status_code=404)}}, + [ + "RequestException in '_get_app_status'" + " []:" + " 404 Client Error" + ], id='one app', ), pytest.param( { 'http_error': { - '/v1/apps/example-app-1': MockResponse(status_code=404), - '/v1/apps/example-app-2': MockResponse(status_code=500), + '/v1/apps/example-app-1': MockHTTPResponse(status_code=404), + '/v1/apps/example-app-2': MockHTTPResponse(status_code=500), } }, [ - 'RequestException in \'_get_app_status\' []: 404', - 'RequestException in \'_get_app_status\' []: 500', + "RequestException in '_get_app_status'" + " []:" + " 404 Client Error", + "RequestException in '_get_app_status'" + " []:" + " 500 Server Error", ], id='two apps', ), diff --git a/gitlab/datadog_checks/gitlab/gitlab.py b/gitlab/datadog_checks/gitlab/gitlab.py index 0346b2f9f5532..d363b7abfc5cb 100644 --- a/gitlab/datadog_checks/gitlab/gitlab.py +++ b/gitlab/datadog_checks/gitlab/gitlab.py @@ -8,6 +8,7 @@ from datadog_checks.base import AgentCheck from datadog_checks.base.checks.openmetrics import OpenMetricsBaseCheck from datadog_checks.base.errors import CheckException +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError from .common import get_gitlab_version, get_tags from .gitlab_v2 import GitlabCheckV2 @@ -152,7 +153,7 @@ def _check_health_endpoint(self, check_type): else: r.raise_for_status() - except requests.exceptions.Timeout: + except (requests.exceptions.Timeout, HTTPTimeoutError): # If there's a timeout self.service_check( service_check_name, diff --git a/gitlab/datadog_checks/gitlab/gitlab_v2.py b/gitlab/datadog_checks/gitlab/gitlab_v2.py index 8febf5710b6de..dfb7191e9c270 100644 --- a/gitlab/datadog_checks/gitlab/gitlab_v2.py +++ b/gitlab/datadog_checks/gitlab/gitlab_v2.py @@ -8,6 +8,7 @@ from datadog_checks.base import AgentCheck, OpenMetricsBaseCheckV2 from datadog_checks.base.checks.openmetrics.v2.scraper import OpenMetricsCompatibilityScraper +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError from datadog_checks.gitlab.config_models import ConfigMixin from .common import get_gitlab_version, get_tags @@ -102,7 +103,7 @@ def _check_health_endpoint(self, check_type, extra_params=None, response_handler ) else: self.service_check(service_check_name, OpenMetricsBaseCheckV2.OK, self._tags) - except requests.exceptions.Timeout: + except (requests.exceptions.Timeout, HTTPTimeoutError): self.service_check( service_check_name, OpenMetricsBaseCheckV2.CRITICAL, diff --git a/gitlab/tests/conftest.py b/gitlab/tests/conftest.py index b7d24e38eadae..a7148e0d9c07d 100644 --- a/gitlab/tests/conftest.py +++ b/gitlab/tests/conftest.py @@ -7,10 +7,10 @@ from contextlib import contextmanager from time import sleep -import mock import pytest import requests +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import EnvVars, TempDir, docker_run from datadog_checks.dev._env import get_state, save_state from datadog_checks.dev.conditions import CheckEndpoints @@ -102,12 +102,9 @@ def dd_environment(): @pytest.fixture() -def mock_data(): - with mock.patch( - 'requests.Session.get', - side_effect=mocked_requests_get, - ): - yield +def mock_data(mock_openmetrics_http): + mock_openmetrics_http.get.side_effect = mocked_requests_get + yield def mocked_requests_get(*args, **kwargs): @@ -117,45 +114,29 @@ def mocked_requests_get(*args, **kwargs): f_name = os.path.join(os.path.dirname(__file__), 'fixtures', 'readiness_check.json') with open(f_name, 'r') as f: text_data = f.read() - response = mock.MagicMock() - response.status_code = 200 - response.json.return_value = json.loads(text_data) - return response + return MockHTTPResponse(json_data=json.loads(text_data)) elif url == "http://{}:{}/-/liveness".format(HOST, GITLAB_LOCAL_PORT) or url == "http://{}:{}/-/health".format( HOST, GITLAB_LOCAL_PORT ): - response = mock.MagicMock() - response.status_code = 200 - return response + return MockHTTPResponse() elif url == "http://{}:{}/-/metrics".format(HOST, GITLAB_LOCAL_PORT): f_name = os.path.join(os.path.dirname(__file__), 'fixtures', 'metrics.txt') with open(f_name, 'r') as f: text_data = f.read() - return mock.MagicMock( - status_code=200, - iter_lines=lambda **kwargs: text_data.split("\n"), - headers={'Content-Type': "text/plain"}, - ) + return MockHTTPResponse(content=text_data, headers={'Content-Type': 'text/plain'}) elif url == "http://{}:{}/metrics".format(HOST, GITLAB_LOCAL_GITALY_PROMETHEUS_PORT): f_name = os.path.join(os.path.dirname(__file__), 'fixtures', 'gitaly.txt') with open(f_name, 'r') as f: text_data = f.read() - return mock.MagicMock( - status_code=200, - iter_lines=lambda **kwargs: text_data.split("\n"), - headers={'Content-Type': "text/plain"}, - ) + return MockHTTPResponse(content=text_data, headers={'Content-Type': 'text/plain'}) elif url == "http://{}:{}/api/v4/version".format(HOST, GITLAB_LOCAL_PORT): f_name = os.path.join(os.path.dirname(__file__), 'fixtures', 'version.json') with open(f_name, 'r') as f: text_data = f.read() - response = mock.MagicMock() - response.status_code = 200 - response.json.return_value = json.loads(text_data) - return response + return MockHTTPResponse(json_data=json.loads(text_data)) pytest.fail("url `{}` not registered".format(args[0])) diff --git a/gitlab_runner/datadog_checks/gitlab_runner/gitlab_runner.py b/gitlab_runner/datadog_checks/gitlab_runner/gitlab_runner.py index fa9a04f93006b..bf1be5f6b5c6d 100644 --- a/gitlab_runner/datadog_checks/gitlab_runner/gitlab_runner.py +++ b/gitlab_runner/datadog_checks/gitlab_runner/gitlab_runner.py @@ -9,6 +9,7 @@ from datadog_checks.base.checks.openmetrics import OpenMetricsBaseCheck from datadog_checks.base.errors import CheckException +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError from .metrics import METRICS_LIST @@ -135,7 +136,7 @@ def _check_connectivity_to_master(self, instance, tags): else: r.raise_for_status() - except requests.exceptions.Timeout: + except (requests.exceptions.Timeout, HTTPTimeoutError): # If there's a timeout self.service_check( self.MASTER_SERVICE_CHECK_NAME, diff --git a/gitlab_runner/tests/conftest.py b/gitlab_runner/tests/conftest.py index ec93ffbd16807..8391bf121ee9f 100644 --- a/gitlab_runner/tests/conftest.py +++ b/gitlab_runner/tests/conftest.py @@ -4,9 +4,9 @@ import os -import mock import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import docker_run from datadog_checks.dev.conditions import CheckDockerLogs, CheckEndpoints @@ -68,21 +68,14 @@ def _mocked_requests_get(*args, **kwargs): fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures', 'metrics.txt') with open(fixtures_path, 'r') as f: text_data = f.read() - return mock.MagicMock( - status_code=200, - iter_lines=lambda **kwargs: text_data.split("\n"), - headers={'Content-Type': "text/plain"}, - ) + return MockHTTPResponse(content=text_data, headers={'Content-Type': 'text/plain'}) elif url == 'http://{}:{}/ci'.format(HOST, GITLAB_LOCAL_MASTER_PORT): - return mock.MagicMock(status_code=200) + return MockHTTPResponse() - return mock.MagicMock(status_code=404) + return MockHTTPResponse(status_code=404) @pytest.fixture() -def mock_data(): - with mock.patch( - 'requests.Session.get', - side_effect=_mocked_requests_get, - ): - yield +def mock_data(mock_openmetrics_http): + mock_openmetrics_http.get.side_effect = _mocked_requests_get + yield diff --git a/gitlab_runner/tests/test_unit.py b/gitlab_runner/tests/test_unit.py index cc72d9a5ad861..35b89bc2109d2 100644 --- a/gitlab_runner/tests/test_unit.py +++ b/gitlab_runner/tests/test_unit.py @@ -4,7 +4,6 @@ from copy import deepcopy -import mock import pytest from datadog_checks.dev.utils import get_metadata_metrics @@ -29,22 +28,7 @@ def test_timeout(test_case, timeout_config, expected_timeout): gitlab_runner = GitlabRunnerCheck('gitlab_runner', common.CONFIG['init_config'], instances=config['instances']) - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - r.get.return_value = mock.MagicMock(status_code=200) - - gitlab_runner.check(config['instances'][0]) - - r.get.assert_called_with( - 'http://localhost:8085/ci', - auth=mock.ANY, - cert=mock.ANY, - headers=mock.ANY, - proxies=mock.ANY, - timeout=expected_timeout, - verify=mock.ANY, - allow_redirects=mock.ANY, - ) + assert gitlab_runner.http.options['timeout'] == expected_timeout @pytest.mark.unit diff --git a/haproxy/tests/conftest.py b/haproxy/tests/conftest.py index b32c6c12f585d..c114af9ec6c39 100644 --- a/haproxy/tests/conftest.py +++ b/haproxy/tests/conftest.py @@ -14,6 +14,7 @@ import requests from packaging import version +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import TempDir, WaitFor, docker_run from datadog_checks.haproxy import HAProxyCheck from datadog_checks.haproxy.metrics import METRIC_MAP @@ -204,14 +205,13 @@ def instancev2(): return instance -@pytest.fixture(scope="module") -def haproxy_mock(): +@pytest.fixture +def haproxy_mock(mock_http): filepath = os.path.join(HERE, 'fixtures', 'mock_data') with open(filepath, 'rb') as f: data = f.read() - p = mock.patch('requests.Session.get', return_value=mock.Mock(content=data)) - yield p.start() - p.stop() + mock_http.get.return_value = MockHTTPResponse(content=data) + yield @pytest.fixture(scope="module") @@ -222,23 +222,22 @@ def mock_data(): return data.split('\n') -@pytest.fixture(scope="module") -def haproxy_mock_evil(): +@pytest.fixture +def haproxy_mock_evil(mock_http): filepath = os.path.join(HERE, 'fixtures', 'mock_data_evil') with open(filepath, 'rb') as f: data = f.read() - p = mock.patch('requests.Session.get', return_value=mock.Mock(content=data)) - yield p.start() - p.stop() + mock_http.get.return_value = MockHTTPResponse(content=data) + yield -@pytest.fixture(scope="module") -def haproxy_mock_enterprise_version_info(): +@pytest.fixture +def haproxy_mock_enterprise_version_info(mock_http): filepath = os.path.join(HERE, 'fixtures', 'enterprise_version_info.html') with open(filepath, 'rb') as f: data = f.read() - with mock.patch('requests.Session.get', return_value=mock.Mock(content=data)) as p: - yield p + mock_http.get.return_value = MockHTTPResponse(content=data) + yield @pytest.fixture(scope="session") diff --git a/harbor/datadog_checks/harbor/harbor.py b/harbor/datadog_checks/harbor/harbor.py index f51e2620bae65..c99413a682db5 100644 --- a/harbor/datadog_checks/harbor/harbor.py +++ b/harbor/datadog_checks/harbor/harbor.py @@ -4,6 +4,7 @@ from requests import HTTPError from datadog_checks.base import AgentCheck +from datadog_checks.base.utils.http_exceptions import HTTPStatusError from .api import HarborAPI from .common import HEALTHY @@ -29,8 +30,8 @@ def _check_registries_health(self, api, base_tags): try: registries = api.registries() self.log.debug("Found %d registries", len(registries)) - except HTTPError as e: - if e.response.status_code in (401, 403): + except (HTTPError, HTTPStatusError) as e: + if e.response is not None and e.response.status_code in (401, 403): # Forbidden, user is not admin self.log.info( "Provided user in harbor integration config is not an admin user. Ignoring registries health checks" @@ -49,7 +50,7 @@ def _check_registries_health(self, api, base_tags): try: api.registry_health(registry['id']) self.service_check(REGISTRY_STATUS, AgentCheck.OK, tags=tags) - except HTTPError as e: + except (HTTPError, HTTPStatusError) as e: self.log.debug(e, exc_info=True) self.service_check(REGISTRY_STATUS, AgentCheck.CRITICAL, tags=tags) @@ -61,8 +62,8 @@ def _submit_project_metrics(self, api, base_tags): def _submit_disk_metrics(self, api, base_tags): try: volume_info = api.volume_info() - except HTTPError as e: - if e.response.status_code in (401, 403): + except (HTTPError, HTTPStatusError) as e: + if e.response is not None and e.response.status_code in (401, 403): # Forbidden, user is not admin self.log.warning( "Provided user in harbor integration config is not an admin user. Ignoring volume metrics" diff --git a/harbor/tests/conftest.py b/harbor/tests/conftest.py index 632ef1b7571e6..f0f83528653ad 100644 --- a/harbor/tests/conftest.py +++ b/harbor/tests/conftest.py @@ -5,11 +5,10 @@ import pytest import requests -from mock import patch +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import docker_run from datadog_checks.dev.conditions import CheckDockerLogs, WaitFor -from datadog_checks.dev.http import MockResponse from datadog_checks.harbor import HarborCheck from datadog_checks.harbor.api import HarborAPI from datadog_checks.harbor.common import ( @@ -104,9 +103,10 @@ def harbor_api(harbor_check, admin_instance, patch_requests): @pytest.fixture -def patch_requests(): - with patch("requests.Session.request", side_effect=mocked_requests): - yield +def patch_requests(mock_http): + mock_http.get.side_effect = mocked_requests + mock_http.post.side_effect = mocked_requests + yield def get_docker_compose_file(): @@ -115,7 +115,7 @@ def get_docker_compose_file(): return os.path.join(HERE, 'compose', harbor_folder, 'docker-compose.yml') -def mocked_requests(_, url, **kwargs): +def mocked_requests(url, **kwargs): def match(url, *candidates_url): for c in candidates_url: if url == c.format(base_url=URL): @@ -123,24 +123,24 @@ def match(url, *candidates_url): return False if match(url, LOGIN_URL): - return MockResponse() + return MockHTTPResponse() elif match(url, HEALTH_URL): - return MockResponse(json_data=HEALTH_FIXTURE) + return MockHTTPResponse(json_data=HEALTH_FIXTURE) elif match(url, PING_URL): - return MockResponse('Pong') + return MockHTTPResponse('Pong') elif match(url, CHARTREPO_HEALTH_URL): - return MockResponse(json_data=CHARTREPO_HEALTH_FIXTURE) + return MockHTTPResponse(json_data=CHARTREPO_HEALTH_FIXTURE) elif match(url, PROJECTS_URL): - return MockResponse(json_data=PROJECTS_FIXTURE) + return MockHTTPResponse(json_data=PROJECTS_FIXTURE) elif match(url, REGISTRIES_URL): - return MockResponse(json_data=REGISTRIES_FIXTURE) + return MockHTTPResponse(json_data=REGISTRIES_FIXTURE) elif match(url, REGISTRIES_PING_URL): - return MockResponse() + return MockHTTPResponse() elif match(url, VOLUME_INFO_URL): if HARBOR_VERSION < VERSION_2_2: - return MockResponse(json_data=VOLUME_INFO_PRE_2_2_FIXTURE) - return MockResponse(json_data=VOLUME_INFO_FIXTURE) + return MockHTTPResponse(json_data=VOLUME_INFO_PRE_2_2_FIXTURE) + return MockHTTPResponse(json_data=VOLUME_INFO_FIXTURE) elif match(url, SYSTEM_INFO_URL): - return MockResponse(json_data=SYSTEM_INFO_FIXTURE) + return MockHTTPResponse(json_data=SYSTEM_INFO_FIXTURE) - return MockResponse(status_code=404) + return MockHTTPResponse(status_code=404) diff --git a/harbor/tests/test_unit.py b/harbor/tests/test_unit.py index 92b588db4fd04..86dbd046db17e 100644 --- a/harbor/tests/test_unit.py +++ b/harbor/tests/test_unit.py @@ -3,10 +3,10 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import pytest from mock import MagicMock -from requests import HTTPError from datadog_checks.base import AgentCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_exceptions import HTTPStatusError +from datadog_checks.base.utils.http_testing import MockHTTPResponse from .common import HARBOR_COMPONENTS @@ -53,11 +53,11 @@ def test_submit_read_only_status(aggregator, harbor_check, harbor_api): def test_api__make_get_request(harbor_api): harbor_api.http = MagicMock() - harbor_api.http.get = MagicMock(return_value=MockResponse(json_data={'json': True})) + harbor_api.http.get = MagicMock(return_value=MockHTTPResponse(json_data={'json': True})) assert harbor_api._make_get_request('{base_url}/api/path') == {"json": True} - harbor_api.http.get = MagicMock(return_value=MockResponse(status_code=500)) - with pytest.raises(HTTPError): + harbor_api.http.get = MagicMock(return_value=MockHTTPResponse(status_code=500)) + with pytest.raises(HTTPStatusError): harbor_api._make_get_request('{base_url}/api/path') @@ -66,7 +66,9 @@ def test_api__make_paginated_get_request(harbor_api): paginated_result = [[expected_result[i], expected_result[i + 1]] for i in range(0, len(expected_result) - 1, 2)] values = [] for r in paginated_result: - values.append(MockResponse(json_data=r, headers={'link': 'Link: ; rel=next; type="text/plain"'})) + values.append( + MockHTTPResponse(json_data=r, headers={'link': 'Link: ; rel=next; type="text/plain"'}) + ) values[-1].headers.pop('link') harbor_api.http = MagicMock() @@ -77,9 +79,9 @@ def test_api__make_paginated_get_request(harbor_api): def test_api__make_post_request(harbor_api): harbor_api.http = MagicMock() - harbor_api.http.post = MagicMock(return_value=MockResponse(json_data={'json': True})) + harbor_api.http.post = MagicMock(return_value=MockHTTPResponse(json_data={'json': True})) assert harbor_api._make_post_request('{base_url}/api/path') == {"json": True} - harbor_api.http.post = MagicMock(return_value=MockResponse(status_code=500)) - with pytest.raises(HTTPError): + harbor_api.http.post = MagicMock(return_value=MockHTTPResponse(status_code=500)) + with pytest.raises(HTTPStatusError): harbor_api._make_post_request('{base_url}/api/path') diff --git a/hdfs_datanode/tests/conftest.py b/hdfs_datanode/tests/conftest.py index d8248b94a20a1..8017fb19a6de0 100644 --- a/hdfs_datanode/tests/conftest.py +++ b/hdfs_datanode/tests/conftest.py @@ -6,13 +6,12 @@ from copy import deepcopy import pytest -from mock import patch +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import docker_run -from datadog_checks.dev.http import MockResponse from datadog_checks.hdfs_datanode import HDFSDataNode -from .common import FIXTURE_DIR, HERE, INSTANCE_INTEGRATION, TEST_PASSWORD, TEST_USERNAME +from .common import FIXTURE_DIR, HERE, INSTANCE_INTEGRATION @pytest.fixture(scope="session") @@ -36,37 +35,26 @@ def instance(): @pytest.fixture -def mocked_request(): - with patch('requests.Session.get', new=requests_get_mock): - yield +def mocked_request(mock_http): + mock_http.get.side_effect = requests_get_mock + yield @pytest.fixture -def mocked_metadata_request(): - with patch('requests.Session.get', new=requests_metadata_mock): - yield +def mocked_metadata_request(mock_http): + mock_http.get.side_effect = requests_metadata_mock + yield @pytest.fixture -def mocked_auth_request(): - with patch('requests.Session.get', new=requests_auth_mock): - yield +def mocked_auth_request(mock_http): + mock_http.get.side_effect = requests_get_mock + yield -def requests_get_mock(*args, **kwargs): - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'hdfs_datanode_jmx.json')) +def requests_get_mock(url, *args, **kwargs): + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'hdfs_datanode_jmx.json')) -def requests_metadata_mock(*args, **kwargs): - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'hdfs_datanode_info_jmx.json')) - - -def requests_auth_mock(*args, **kwargs): - # Make sure we're passing in authentication - assert 'auth' in kwargs, "Error, missing authentication" - - # Make sure we've got the correct username and password - assert kwargs['auth'] == (TEST_USERNAME, TEST_PASSWORD), "Incorrect username or password" - - # Return mocked request.get(...) - return requests_get_mock(*args, **kwargs) +def requests_metadata_mock(url, *args, **kwargs): + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'hdfs_datanode_info_jmx.json')) diff --git a/hdfs_namenode/tests/conftest.py b/hdfs_namenode/tests/conftest.py index f6a0da366dfab..fae3b8bd6f61c 100644 --- a/hdfs_namenode/tests/conftest.py +++ b/hdfs_namenode/tests/conftest.py @@ -4,10 +4,9 @@ import os import pytest -from mock import patch +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import docker_run -from datadog_checks.dev.http import MockResponse from datadog_checks.hdfs_namenode import HDFSNameNode from .common import ( @@ -17,8 +16,6 @@ NAME_SYSTEM_METADATA_URL, NAME_SYSTEM_STATE_URL, NAME_SYSTEM_URL, - TEST_PASSWORD, - TEST_USERNAME, ) @@ -43,32 +40,21 @@ def check(): @pytest.fixture -def mocked_request(): - with patch("requests.Session.get", new=requests_get_mock): - yield +def mocked_request(mock_http): + mock_http.get.side_effect = requests_get_mock + yield @pytest.fixture -def mocked_auth_request(): - with patch("requests.Session.get", new=requests_auth_mock): - yield +def mocked_auth_request(mock_http): + mock_http.get.side_effect = requests_get_mock + yield -def requests_get_mock(session, url, *args, **kwargs): +def requests_get_mock(url, *args, **kwargs): if url == NAME_SYSTEM_STATE_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'hdfs_namesystem_state.json')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'hdfs_namesystem_state.json')) elif url == NAME_SYSTEM_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'hdfs_namesystem.json')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'hdfs_namesystem.json')) elif url == NAME_SYSTEM_METADATA_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'hdfs_namesystem_info.json')) - - -def requests_auth_mock(*args, **kwargs): - # Make sure we're passing in authentication - assert 'auth' in kwargs, "Error, missing authentication" - - # Make sure we've got the correct username and password - assert kwargs['auth'] == (TEST_USERNAME, TEST_PASSWORD), "Incorrect username or password" - - # Return mocked request.get(...) - return requests_get_mock(*args, **kwargs) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'hdfs_namesystem_info.json')) diff --git a/impala/tests/conftest.py b/impala/tests/conftest.py index fed60c3fed3bf..86f818e299bfa 100644 --- a/impala/tests/conftest.py +++ b/impala/tests/conftest.py @@ -3,10 +3,10 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import os from contextlib import ExitStack, contextmanager -from unittest import mock import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import EnvVars, TempDir, docker_run, get_docker_hostname, get_here from datadog_checks.dev._env import get_state, save_state from datadog_checks.dev.conditions import CheckEndpoints @@ -92,22 +92,15 @@ def catalog_check(catalog_instance): @pytest.fixture() -def mock_metrics(request): +def mock_metrics(request, mock_http): metrics_file = request.node.get_closest_marker("metrics_file") with open( os.path.join(os.path.dirname(__file__), "fixtures", metrics_file.args[0], metrics_file.args[1]), "r" ) as fixture_file: content = fixture_file.read() - with mock.patch( - "requests.Session.get", - return_value=mock.MagicMock( - status_code=200, - iter_lines=lambda **kwargs: content.split("\n"), - headers={"Content-Type": "text/plain"}, - ), - ): - yield + mock_http.get.return_value = MockHTTPResponse(content=content, headers={"Content-Type": "text/plain"}) + yield @contextmanager diff --git a/kubelet/tests/test_kubelet.py b/kubelet/tests/test_kubelet.py index 6ffe23ceaaeb7..459de309f5199 100644 --- a/kubelet/tests/test_kubelet.py +++ b/kubelet/tests/test_kubelet.py @@ -15,7 +15,7 @@ from datadog_checks.base.checks.kubelet_base.base import KubeletCredentials from datadog_checks.base.errors import SkipInstanceError from datadog_checks.base.utils.date import parse_rfc3339 -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.kubelet import KubeletCheck, PodListUtils # Skip the whole tests module on Windows @@ -857,7 +857,7 @@ def test_report_container_state_metrics(monkeypatch, tagger): monkeypatch.setattr( check, 'perform_kubelet_query', - mock.Mock(return_value=MockResponse(file_path=os.path.join(HERE, 'fixtures', 'pods_crashed.json'))), + mock.Mock(return_value=MockHTTPResponse(file_path=os.path.join(HERE, 'fixtures', 'pods_crashed.json'))), ) monkeypatch.setattr(check, 'compute_pod_expiration_datetime', mock.Mock(return_value=None)) monkeypatch.setattr(check, 'gauge', mock.Mock()) @@ -1010,7 +1010,7 @@ def test_pod_expiration(monkeypatch, aggregator, tagger): monkeypatch.setattr( check, 'perform_kubelet_query', - mock.Mock(return_value=MockResponse(file_path=os.path.join(HERE, 'fixtures', 'pods_expired.json'))), + mock.Mock(return_value=MockHTTPResponse(file_path=os.path.join(HERE, 'fixtures', 'pods_expired.json'))), ) monkeypatch.setattr( check, 'compute_pod_expiration_datetime', mock.Mock(return_value=parse_rfc3339("2019-02-18T16:00:06Z")) diff --git a/kubevirt_api/tests/conftest.py b/kubevirt_api/tests/conftest.py index 8556cf4d3956d..81af61164d28d 100644 --- a/kubevirt_api/tests/conftest.py +++ b/kubevirt_api/tests/conftest.py @@ -7,8 +7,8 @@ import pytest import yaml +from datadog_checks.base.utils.http_testing import MockHTTPResponse # noqa: F401 from datadog_checks.dev import get_here, run_command -from datadog_checks.dev.http import MockResponse from datadog_checks.dev.kind import kind_run from datadog_checks.dev.kube_port_forward import port_forward @@ -105,4 +105,4 @@ def mock_http_responses(url, **_params): raise Exception(f"url `{url}` not registered") with open(os.path.join(HERE, "fixtures", fixtures_file)) as f: - return MockResponse(content=f.read()) + return MockHTTPResponse(content=f.read()) diff --git a/kubevirt_api/tests/test_unit.py b/kubevirt_api/tests/test_unit.py index 1a4b040e86d05..0179b6c93586b 100644 --- a/kubevirt_api/tests/test_unit.py +++ b/kubevirt_api/tests/test_unit.py @@ -17,8 +17,8 @@ pytestmark = [pytest.mark.unit] -def test_check_collects_all_metrics(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_check_collects_all_metrics(dd_run_check, aggregator, instance, mock_http): + mock_http.get.side_effect = mock_http_responses check = KubeVirtApiCheck("kubevirt_api", {}, [instance]) @@ -94,8 +94,8 @@ def test_check_collects_all_metrics(dd_run_check, aggregator, instance, mocker): aggregator.assert_metrics_using_metadata(get_metadata_metrics()) -def test_check_sends_zero_count_for_vms(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_check_sends_zero_count_for_vms(dd_run_check, aggregator, instance, mock_http): + mock_http.get.side_effect = mock_http_responses check = KubeVirtApiCheck("kubevirt_api", {}, [instance]) @@ -115,8 +115,8 @@ def test_check_sends_zero_count_for_vms(dd_run_check, aggregator, instance, mock aggregator.assert_metric("kubevirt_api.vm.count", value=0) -def test_check_sends_zero_count_for_vmis(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_check_sends_zero_count_for_vmis(dd_run_check, aggregator, instance, mock_http): + mock_http.get.side_effect = mock_http_responses check = KubeVirtApiCheck("kubevirt_api", {}, [instance]) @@ -153,8 +153,8 @@ def test_emits_zero_can_connect_when_service_is_down(dd_run_check, aggregator, i ) -def test_emits_one_can_connect_when_service_is_up(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_emits_one_can_connect_when_service_is_up(dd_run_check, aggregator, instance, mock_http): + mock_http.get.side_effect = mock_http_responses check = KubeVirtApiCheck("kubevirt_api", {}, [instance]) check._setup_kube_client = lambda: None @@ -170,8 +170,8 @@ def test_emits_one_can_connect_when_service_is_up(dd_run_check, aggregator, inst ) -def test_raise_exception_when_metrics_endpoint_is_bad(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_raise_exception_when_metrics_endpoint_is_bad(dd_run_check, aggregator, instance, mock_http): + mock_http.get.side_effect = mock_http_responses check = KubeVirtApiCheck("kubevirt_api", {}, [BAD_METRICS_HOSTNAME_INSTANCE]) check._setup_kube_client = lambda: None @@ -189,8 +189,8 @@ def test_raise_exception_when_metrics_endpoint_is_bad(dd_run_check, aggregator, ) -def test_raise_exception_cannot_connect_to_kubernetes_api(dd_run_check, aggregator, instance, mocker, caplog): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_raise_exception_cannot_connect_to_kubernetes_api(dd_run_check, aggregator, instance, mock_http, caplog): + mock_http.get.side_effect = mock_http_responses check = KubeVirtApiCheck("kubevirt_api", {}, [instance]) with pytest.raises( @@ -201,8 +201,8 @@ def test_raise_exception_cannot_connect_to_kubernetes_api(dd_run_check, aggregat assert "Cannot connect to Kubernetes API:" in caplog.text -def test_log_warning_healthz_endpoint_not_provided(dd_run_check, aggregator, instance, mocker, caplog): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_log_warning_healthz_endpoint_not_provided(dd_run_check, aggregator, instance, mock_http, caplog): + mock_http.get.side_effect = mock_http_responses new_instance = deepcopy(instance) new_instance.pop("kubevirt_api_healthz_endpoint") diff --git a/kubevirt_controller/tests/conftest.py b/kubevirt_controller/tests/conftest.py index e0e0174775b60..b2512fba1a7b4 100644 --- a/kubevirt_controller/tests/conftest.py +++ b/kubevirt_controller/tests/conftest.py @@ -7,8 +7,8 @@ import pytest import yaml +from datadog_checks.base.utils.http_testing import MockHTTPResponse # noqa: F401 from datadog_checks.dev import get_here, run_command -from datadog_checks.dev.http import MockResponse from datadog_checks.dev.kind import kind_run from datadog_checks.dev.kube_port_forward import port_forward @@ -101,4 +101,4 @@ def mock_http_responses(url, **_params): raise Exception(f"url `{url}` not registered") with open(os.path.join(HERE, "fixtures", fixtures_file)) as f: - return MockResponse(content=f.read()) + return MockHTTPResponse(content=f.read()) diff --git a/kubevirt_controller/tests/test_unit.py b/kubevirt_controller/tests/test_unit.py index 0a6bcb988a40c..1b4aa53fe488a 100644 --- a/kubevirt_controller/tests/test_unit.py +++ b/kubevirt_controller/tests/test_unit.py @@ -15,8 +15,8 @@ ] -def test_emits_can_connect_one_when_service_is_up(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_emits_can_connect_one_when_service_is_up(dd_run_check, aggregator, instance, mock_http): + mock_http.get.side_effect = mock_http_responses check = KubeVirtControllerCheck("kubevirt_controller", {}, [instance]) dd_run_check(check) aggregator.assert_metric( @@ -35,8 +35,8 @@ def test_emits_can_connect_zero_when_service_is_down(dd_run_check, aggregator, i ) -def test_check_collects_all_metrics(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_check_collects_all_metrics(dd_run_check, aggregator, instance, mock_http): + mock_http.get.side_effect = mock_http_responses check = KubeVirtControllerCheck("kubevirt_controller", {}, [instance]) diff --git a/kubevirt_handler/tests/conftest.py b/kubevirt_handler/tests/conftest.py index 7bc0bc1bad6d3..294abc17ab1e9 100644 --- a/kubevirt_handler/tests/conftest.py +++ b/kubevirt_handler/tests/conftest.py @@ -6,8 +6,8 @@ import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse # noqa: F401 from datadog_checks.dev import get_here, run_command -from datadog_checks.dev.http import MockResponse from datadog_checks.dev.kind import kind_run from datadog_checks.dev.kube_port_forward import port_forward @@ -89,4 +89,4 @@ def mock_http_responses(url, **_params): raise Exception(f"url `{url}` not registered") with open(os.path.join(HERE, "fixtures", fixtures_file)) as f: - return MockResponse(content=f.read()) + return MockHTTPResponse(content=f.read()) diff --git a/kubevirt_handler/tests/test_unit.py b/kubevirt_handler/tests/test_unit.py index 01e8e42c4a9ba..28561e3e5a327 100644 --- a/kubevirt_handler/tests/test_unit.py +++ b/kubevirt_handler/tests/test_unit.py @@ -16,8 +16,8 @@ ] -def test_check_collects_metrics(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_check_collects_metrics(dd_run_check, aggregator, instance, mock_http): + mock_http.get.side_effect = mock_http_responses check = KubeVirtHandlerCheck("kubevirt_handler", {}, [instance]) dd_run_check(check) @@ -121,8 +121,8 @@ def test_check_collects_metrics(dd_run_check, aggregator, instance, mocker): aggregator.assert_metrics_using_metadata(get_metadata_metrics()) -def test_logs_warning_when_healthz_endpoint_is_missing(dd_run_check, aggregator, instance, mocker, caplog): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_logs_warning_when_healthz_endpoint_is_missing(dd_run_check, aggregator, instance, mock_http, caplog): + mock_http.get.side_effect = mock_http_responses del instance["kubevirt_handler_healthz_endpoint"] check = KubeVirtHandlerCheck("kubevirt_handler", {}, [instance]) dd_run_check(check) @@ -133,8 +133,8 @@ def test_logs_warning_when_healthz_endpoint_is_missing(dd_run_check, aggregator, ) -def test_emits_can_connect_one_when_service_is_up(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_emits_can_connect_one_when_service_is_up(dd_run_check, aggregator, instance, mock_http): + mock_http.get.side_effect = mock_http_responses check = KubeVirtHandlerCheck("kubevirt_handler", {}, [instance]) dd_run_check(check) aggregator.assert_metric( @@ -156,8 +156,8 @@ def test_emits_can_connect_zero_when_service_is_down(dd_run_check, aggregator, i ) -def test_version_metadata(instance, dd_run_check, datadog_agent, aggregator, mocker): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_version_metadata(instance, dd_run_check, datadog_agent, aggregator, mock_http): + mock_http.get.side_effect = mock_http_responses check = KubeVirtHandlerCheck("kubevirt_handler", {}, [instance]) check.check_id = "test:123" dd_run_check(check) diff --git a/mapreduce/tests/conftest.py b/mapreduce/tests/conftest.py index dea2ab20b4951..754b33b47d2a1 100644 --- a/mapreduce/tests/conftest.py +++ b/mapreduce/tests/conftest.py @@ -5,11 +5,10 @@ from copy import deepcopy import pytest -from mock import patch +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import docker_run from datadog_checks.dev.conditions import WaitFor -from datadog_checks.dev.http import MockResponse from datadog_checks.mapreduce import MapReduceCheck from .common import ( @@ -21,8 +20,6 @@ MR_JOB_COUNTERS_URL, MR_JOBS_URL, MR_TASKS_URL, - TEST_PASSWORD, - TEST_USERNAME, YARN_APPS_URL_BASE, setup_mapreduce, ) @@ -51,55 +48,41 @@ def instance(): @pytest.fixture -def mocked_request(): - with patch("requests.Session.get", new=requests_get_mock): - yield +def mocked_request(mock_http): + mock_http.get.side_effect = requests_get_mock + yield @pytest.fixture -def mocked_auth_request(): - with patch("requests.Session.get", new=requests_auth_mock): - yield +def mocked_auth_request(mock_http): + mock_http.get.side_effect = requests_get_mock + yield def get_custom_hosts(): return [(host, '127.0.0.1') for host in MOCKED_E2E_HOSTS] -def requests_get_mock(session, *args, **kwargs): - url = args[0] - # The parameter that creates the query params (kwargs) is an unordered dict, - # so the query params can be in any order +def requests_get_mock(url, *args, **kwargs): if url.startswith(YARN_APPS_URL_BASE): query = url[len(YARN_APPS_URL_BASE) :] if query in ["?states=RUNNING&applicationTypes=MAPREDUCE", "?applicationTypes=MAPREDUCE&states=RUNNING"]: - return MockResponse(file_path=os.path.join(HERE, "fixtures", "apps_metrics")) + return MockHTTPResponse(file_path=os.path.join(HERE, "fixtures", "apps_metrics")) else: raise Exception( "Apps URL must have the two query parameters: states=RUNNING and applicationTypes=MAPREDUCE" ) if url == MR_JOBS_URL: - return MockResponse(file_path=os.path.join(HERE, "fixtures", "job_metrics")) + return MockHTTPResponse(file_path=os.path.join(HERE, "fixtures", "job_metrics")) if url == MR_JOB_COUNTERS_URL: - return MockResponse(file_path=os.path.join(HERE, "fixtures", "job_counter_metrics")) + return MockHTTPResponse(file_path=os.path.join(HERE, "fixtures", "job_counter_metrics")) if url == MR_TASKS_URL: - return MockResponse(file_path=os.path.join(HERE, "fixtures", "task_metrics")) + return MockHTTPResponse(file_path=os.path.join(HERE, "fixtures", "task_metrics")) if url == CLUSTER_INFO_URL: - return MockResponse(file_path=os.path.join(HERE, "fixtures", "cluster_info")) + return MockHTTPResponse(file_path=os.path.join(HERE, "fixtures", "cluster_info")) raise Exception("There is no mock request for {}".format(url)) - - -def requests_auth_mock(session, *args, **kwargs): - # Make sure we're passing in authentication - assert 'auth' in kwargs, "Error, missing authentication" - - # Make sure we've got the correct username and password - assert kwargs['auth'] == (TEST_USERNAME, TEST_PASSWORD), "Incorrect username or password" - - # Return mocked request.get(...) - return requests_get_mock(session, *args, **kwargs) diff --git a/marathon/datadog_checks/marathon/marathon.py b/marathon/datadog_checks/marathon/marathon.py index 74c65be1468b3..c783599a23191 100644 --- a/marathon/datadog_checks/marathon/marathon.py +++ b/marathon/datadog_checks/marathon/marathon.py @@ -7,6 +7,7 @@ import requests from datadog_checks.base import AgentCheck +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError class Marathon(AgentCheck): @@ -114,7 +115,7 @@ def get_json(self, url, acs_url, tags=None): self.refresh_acs_token(acs_url, tags) r = self.http.get(url) r.raise_for_status() - except requests.exceptions.Timeout: + except (requests.exceptions.Timeout, HTTPTimeoutError): # If there's a timeout self.service_check( self.SERVICE_CHECK_NAME, diff --git a/marathon/tests/test_unit.py b/marathon/tests/test_unit.py index ef104e6264e90..042ab6f38ccd5 100644 --- a/marathon/tests/test_unit.py +++ b/marathon/tests/test_unit.py @@ -2,8 +2,8 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) from copy import deepcopy +from unittest.mock import MagicMock -import mock import pytest from datadog_checks.marathon import Marathon @@ -38,13 +38,13 @@ def test_process_apps_ko(check, aggregator): If the check can't hit the Marathon master Url, no metric should be collected """ - check.get_apps_json = mock.MagicMock(return_value=None) + check.get_apps_json = MagicMock(return_value=None) check.process_apps('url', 'acs_url', [], [], None) assert len(aggregator.metric_names) == 0 def test_process_apps(check, aggregator): - check.get_apps_json = mock.MagicMock( + check.get_apps_json = MagicMock( return_value={ 'apps': [ {'id': '/', 'version': '', 'backoffSeconds': 99}, @@ -107,20 +107,5 @@ def test_config(test_case, init_config, extra_config, expected_http_kwargs): instance.update(extra_config) check = Marathon('marathon', init_config, instances=[instance]) - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - r.get.return_value = mock.MagicMock(status_code=200) - - check.check(instance) - - http_wargs = { - 'auth': mock.ANY, - 'cert': mock.ANY, - 'headers': mock.ANY, - 'proxies': mock.ANY, - 'timeout': mock.ANY, - 'verify': mock.ANY, - 'allow_redirects': mock.ANY, - } - http_wargs.update(expected_http_kwargs) - r.get.assert_called_with('http://localhost:8080/v2/queue', **http_wargs) + for key, value in expected_http_kwargs.items(): + assert check.http.options[key] == value diff --git a/mesos_master/datadog_checks/mesos_master/mesos_master.py b/mesos_master/datadog_checks/mesos_master/mesos_master.py index a3bf1d590fe12..a8d3f1873acea 100644 --- a/mesos_master/datadog_checks/mesos_master/mesos_master.py +++ b/mesos_master/datadog_checks/mesos_master/mesos_master.py @@ -13,6 +13,7 @@ from datadog_checks.base import AgentCheck from datadog_checks.base.errors import CheckException +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError class MesosMaster(AgentCheck): @@ -183,7 +184,7 @@ def _make_request(self, url): else: status = AgentCheck.OK msg = "Mesos master instance detected at {} ".format(url) - except requests.exceptions.Timeout: + except (requests.exceptions.Timeout, HTTPTimeoutError): # If there's a timeout msg = "{} seconds timeout when hitting {}".format(self.http.options['timeout'], url) status = AgentCheck.CRITICAL diff --git a/mesos_master/tests/test_check.py b/mesos_master/tests/test_check.py index 4c026dcabc9c2..daff6b4033480 100644 --- a/mesos_master/tests/test_check.py +++ b/mesos_master/tests/test_check.py @@ -1,7 +1,8 @@ # (C) Datadog, Inc. 2018-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -import mock +from unittest.mock import MagicMock + import pytest import requests @@ -84,28 +85,28 @@ def test_instance_timeout(check, instance): [ ( 'OK case for /state endpoint', - [mock.MagicMock(status_code=200, content='{}')], + [MagicMock(status_code=200, content='{}')], AgentCheck.OK, ['my:tag', 'url:http://hello.com/state'], False, ), ( 'OK case with failing /state due to bad status and fallback on /state.json', - [mock.MagicMock(status_code=500), mock.MagicMock(status_code=200, content='{}')], + [MagicMock(status_code=500), MagicMock(status_code=200, content='{}')], AgentCheck.OK, ['my:tag', 'url:http://hello.com/state.json'], False, ), ( 'OK case with failing /state due to Timeout and fallback on /state.json', - [requests.exceptions.Timeout, mock.MagicMock(status_code=200, content='{}')], + [requests.exceptions.Timeout, MagicMock(status_code=200, content='{}')], AgentCheck.OK, ['my:tag', 'url:http://hello.com/state.json'], False, ), ( 'OK case with failing /state due to Exception and fallback on /state.json', - [Exception, mock.MagicMock(status_code=200, content='{}')], + [Exception, MagicMock(status_code=200, content='{}')], AgentCheck.OK, ['my:tag', 'url:http://hello.com/state.json'], False, @@ -119,7 +120,7 @@ def test_instance_timeout(check, instance): ), ( 'NOK case with failing /state and /state.json with bad status', - [mock.MagicMock(status_code=500), mock.MagicMock(status_code=500)], + [MagicMock(status_code=500), MagicMock(status_code=500)], AgentCheck.CRITICAL, ['my:tag', 'url:http://hello.com/state.json'], True, @@ -127,8 +128,8 @@ def test_instance_timeout(check, instance): ( 'OK case with non-leader master on /state', [ - mock.MagicMock(status_code=401, history=[mock.MagicMock(status_code=307)]), - mock.MagicMock(content='{}', history=[], status_code=500), + MagicMock(status_code=401, history=[MagicMock(status_code=307)]), + MagicMock(content='{}', history=[], status_code=500), ], AgentCheck.UNKNOWN, ['my:tag', 'url:http://hello.com/state.json'], @@ -137,8 +138,8 @@ def test_instance_timeout(check, instance): ( 'OK case with non-leader master on /state.json', [ - mock.MagicMock(status_code=500, history=[]), - mock.MagicMock(content='{}', history=[mock.MagicMock(status_code=307)], status_code=401), + MagicMock(status_code=500, history=[]), + MagicMock(content='{}', history=[MagicMock(status_code=307)], status_code=401), ], AgentCheck.UNKNOWN, ['my:tag', 'url:http://hello.com/state.json'], @@ -148,20 +149,25 @@ def test_instance_timeout(check, instance): ) @pytest.mark.integration def test_can_connect_service_check( - instance, aggregator, test_case_name, request_mock_side_effects, expected_status, expected_tags, expect_exception + instance, + aggregator, + mock_http, + test_case_name, + request_mock_side_effects, + expected_status, + expected_tags, + expect_exception, ): check = MesosMaster('mesos_master', {}, [instance]) - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - r.get.side_effect = request_mock_side_effects + mock_http.get.side_effect = request_mock_side_effects - try: - check._get_master_state('http://hello.com', ['my:tag']) - exception_raised = False - except CheckException: - exception_raised = True + try: + check._get_master_state('http://hello.com', ['my:tag']) + exception_raised = False + except CheckException: + exception_raised = True - assert expect_exception == exception_raised + assert expect_exception == exception_raised aggregator.assert_service_check('mesos_master.can_connect', count=1, status=expected_status, tags=expected_tags) diff --git a/mesos_slave/tests/test_unit.py b/mesos_slave/tests/test_unit.py index 098e3683313cf..e04738108caed 100644 --- a/mesos_slave/tests/test_unit.py +++ b/mesos_slave/tests/test_unit.py @@ -177,37 +177,40 @@ def test_config(check, instance, test_case, extra_config, expected_http_kwargs): @pytest.mark.parametrize(PARAMETERS, state_test_data) @pytest.mark.integration def test_can_connect_service_check_state( - instance, aggregator, test_case_name, request_mock_effects, expected_tags, expect_exception, expected_status + instance, + aggregator, + mock_http, + test_case_name, + request_mock_effects, + expected_tags, + expect_exception, + expected_status, ): check = MesosSlave('mesos_slave', {}, [instance]) - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - r.get.side_effect = request_mock_effects - try: - check._process_state_info('http://hello.com', instance['tasks'], 5050, instance['tags']) - assert not expect_exception - except Exception: - if not expect_exception: - raise + mock_http.get.side_effect = request_mock_effects + try: + check._process_state_info('http://hello.com', instance['tasks'], 5050, instance['tags']) + assert not expect_exception + except Exception: + if not expect_exception: + raise aggregator.assert_service_check('mesos_slave.can_connect', count=1, status=expected_status, tags=expected_tags) @pytest.mark.integration -def test_can_connect_service_with_instance_cluster_name(instance, aggregator): +def test_can_connect_service_with_instance_cluster_name(instance, aggregator, mock_http): instance['cluster_name'] = 'test-cluster' expected_tags = ['url:http://hello.com/state'] + cluster_name_tag + additional_tags expected_status = AgentCheck.OK check = MesosSlave('mesos_slave', {}, [instance]) - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - r.get.side_effect = [mock.MagicMock(status_code=200, content='{}')] - try: - check._process_state_info('http://hello.com', instance['tasks'], 5050, instance['tags']) - assert not False - except Exception: - if not False: - raise + mock_http.get.side_effect = [mock.MagicMock(status_code=200, content='{}')] + try: + check._process_state_info('http://hello.com', instance['tasks'], 5050, instance['tags']) + assert not False + except Exception: + if not False: + raise aggregator.assert_service_check('mesos_slave.can_connect', count=1, status=expected_status, tags=expected_tags) @@ -215,17 +218,22 @@ def test_can_connect_service_with_instance_cluster_name(instance, aggregator): @pytest.mark.parametrize(PARAMETERS, stats_test_data) @pytest.mark.integration def test_can_connect_service_check_stats( - instance, aggregator, test_case_name, request_mock_effects, expected_tags, expect_exception, expected_status + instance, + aggregator, + mock_http, + test_case_name, + request_mock_effects, + expected_tags, + expect_exception, + expected_status, ): check = MesosSlave('mesos_slave', {}, [instance]) - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - r.get.side_effect = request_mock_effects - try: - check._process_stats_info('http://hello.com', instance['tags']) - assert not expect_exception - except Exception: - if not expect_exception: - raise + mock_http.get.side_effect = request_mock_effects + try: + check._process_stats_info('http://hello.com', instance['tags']) + assert not expect_exception + except Exception: + if not expect_exception: + raise aggregator.assert_service_check('mesos_slave.can_connect', count=1, status=expected_status, tags=expected_tags) diff --git a/nginx/tests/common.py b/nginx/tests/common.py index 023d7d1c81b08..bcb25ffd0b747 100644 --- a/nginx/tests/common.py +++ b/nginx/tests/common.py @@ -3,8 +3,8 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import os +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import get_docker_hostname -from datadog_checks.dev.http import MockResponse from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.nginx.metrics import COUNT_METRICS, METRICS_SEND_AS_COUNT, METRICS_SEND_AS_HISTOGRAM @@ -115,4 +115,4 @@ def mock_http_responses(url, **_params): raise Exception("url `{url}` not registered".format(url=url)) with open(os.path.join(HERE, 'fixtures', metrics_file)) as f: - return MockResponse(content=f.read(), headers={"content-type": "application/json"}) + return MockHTTPResponse(content=f.read(), headers={"content-type": "application/json"}) diff --git a/nginx/tests/test_unit.py b/nginx/tests/test_unit.py index 66de9c5dfd748..655c41583eb63 100644 --- a/nginx/tests/test_unit.py +++ b/nginx/tests/test_unit.py @@ -68,34 +68,16 @@ def test_config(check, instance, test_case, extra_config, expected_http_kwargs): c = check(instance) - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - r.get.return_value = mock.MagicMock(status_code=200, content=b'{}') + for key, value in expected_http_kwargs.items(): + assert c.http.options[key] == value - c.check(instance) - http_wargs = { - 'auth': mock.ANY, - 'cert': mock.ANY, - 'headers': mock.ANY, - 'proxies': mock.ANY, - 'timeout': mock.ANY, - 'verify': mock.ANY, - 'allow_redirects': mock.ANY, - } - http_wargs.update(expected_http_kwargs) - - r.get.assert_called_with('http://localhost:8080/nginx_status', **http_wargs) - - -def test_no_version(check, instance, caplog): +def test_no_version(check, instance, caplog, mock_http): c = check(instance) - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - r.get.return_value = mock.MagicMock(status_code=200, content=b'{}', headers={'server': 'nginx'}) + mock_http.get.return_value = mock.MagicMock(status_code=200, content=b'{}', headers={'server': 'nginx'}) - c.check(instance) + c.check(instance) errors = [record for record in caplog.records if record.levelname == "ERROR"] assert not errors diff --git a/nutanix/tests/conftest.py b/nutanix/tests/conftest.py index 9dbee745235ec..b1911496c2945 100644 --- a/nutanix/tests/conftest.py +++ b/nutanix/tests/conftest.py @@ -8,6 +8,7 @@ import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import docker_run, get_docker_hostname, get_here from datadog_checks.dev.conditions import CheckEndpoints @@ -121,12 +122,8 @@ def mock_instance(): @pytest.fixture -def mock_http_get(mocker): +def mock_http_get(mock_http): def mock_response(url, params=None, *args, **kwargs): - mock_resp = mocker.Mock() - mock_resp.status_code = 200 - mock_resp.raise_for_status = mocker.Mock() - page = None if params: @@ -144,62 +141,43 @@ def mock_response(url, params=None, *args, **kwargs): page = 0 if '/console' in url: - return mock_resp + return MockHTTPResponse() if ( "/api/clustermgmt/v4.0/stats/clusters/00064715-c043-5d8f-ee4b-176ec875554d/hosts/d8787814-4fe8-4ba5-931f-e1ee31c294a6" in url ): - response_data = load_fixture("host_stats_00064715_d8787814.json") - mock_resp.json = mocker.Mock(return_value=response_data) - return mock_resp + return MockHTTPResponse(json_data=load_fixture("host_stats_00064715_d8787814.json")) if ( "/api/clustermgmt/v4.0/stats/clusters/aabbccdd-1111-2222-3333-444455556666/hosts/eeee1111-2222-3333-4444-555566667777" in url ): - response_data = load_fixture("host_stats_aabbccdd_eeee1111.json") - mock_resp.json = mocker.Mock(return_value=response_data) - return mock_resp + return MockHTTPResponse(json_data=load_fixture("host_stats_aabbccdd_eeee1111.json")) if "/api/clustermgmt/v4.0/stats/clusters/00064715-c043-5d8f-ee4b-176ec875554d" in url: - response_data = load_fixture("cluster_stats_00064715.json") - mock_resp.json = mocker.Mock(return_value=response_data) - return mock_resp + return MockHTTPResponse(json_data=load_fixture("cluster_stats_00064715.json")) if "/api/clustermgmt/v4.0/stats/clusters/aabbccdd-1111-2222-3333-444455556666" in url: - response_data = load_fixture("cluster_stats_aabbccdd.json") - mock_resp.json = mocker.Mock(return_value=response_data) - return mock_resp + return MockHTTPResponse(json_data=load_fixture("cluster_stats_aabbccdd.json")) if '/api/clustermgmt/v4.0/config/clusters/d07db284-6df6-4ca2-88cd-9dd5ed71ac08/hosts' in url: - mock_resp.status_code = 400 - return mock_resp + return MockHTTPResponse(status_code=400) if '/api/clustermgmt/v4.0/config/clusters/00064715-c043-5d8f-ee4b-176ec875554d/hosts' in url: - response_data = load_fixture_page("hosts_00064715.json", page) - mock_resp.json = mocker.Mock(return_value=response_data) - return mock_resp + return MockHTTPResponse(json_data=load_fixture_page("hosts_00064715.json", page)) if '/api/clustermgmt/v4.0/config/clusters/aabbccdd-1111-2222-3333-444455556666/hosts' in url: - response_data = load_fixture_page("hosts_aabbccdd.json", page) - mock_resp.json = mocker.Mock(return_value=response_data) - return mock_resp + return MockHTTPResponse(json_data=load_fixture_page("hosts_aabbccdd.json", page)) if '/api/clustermgmt/v4.0/config/clusters' in url: - response_data = load_fixture_page("clusters.json", page) - mock_resp.json = mocker.Mock(return_value=response_data) - return mock_resp + return MockHTTPResponse(json_data=load_fixture_page("clusters.json", page)) if '/api/prism/v4.0/config/categories' in url: - response_data = load_fixture_page("categories.json", page) - mock_resp.json = mocker.Mock(return_value=response_data) - return mock_resp + return MockHTTPResponse(json_data=load_fixture_page("categories.json", page)) if 'api/vmm/v4.0/ahv/stats/vms' in url: - response_data = load_fixture_page("vms_stats.json", page) - mock_resp.json = mocker.Mock(return_value=response_data) - return mock_resp + return MockHTTPResponse(json_data=load_fixture_page("vms_stats.json", page)) if 'api/vmm/v4.0/ahv/config/vms' in url: response_data = load_fixture_page("vms.json", page) @@ -218,8 +196,7 @@ def mock_response(url, params=None, *args, **kwargs): response_data = dict(response_data) response_data['data'] = filtered - mock_resp.json = mocker.Mock(return_value=response_data) - return mock_resp + return MockHTTPResponse(json_data=response_data) # Events endpoint - paginated if 'api/monitoring/v4.0/serviceability/events' in url: @@ -247,8 +224,7 @@ def mock_response(url, params=None, *args, **kwargs): response_data = dict(response_data) response_data['data'] = filtered_data - mock_resp.json = mocker.Mock(return_value=response_data) - return mock_resp + return MockHTTPResponse(json_data=response_data) if 'api/monitoring/v4.0/serviceability/audits' in url: response_data = load_fixture_page("audits.json", page) @@ -275,8 +251,7 @@ def mock_response(url, params=None, *args, **kwargs): response_data = dict(response_data) response_data['data'] = filtered_data - mock_resp.json = mocker.Mock(return_value=response_data) - return mock_resp + return MockHTTPResponse(json_data=response_data) # Individual alert fetch by ID (e.g. /alerts/{uuid}) import re @@ -287,11 +262,8 @@ def mock_response(url, params=None, *args, **kwargs): all_alerts = load_fixture_page("alerts.json", 0).get('data', []) alert_data = next((a for a in all_alerts if a.get('extId') == alert_ext_id), None) if alert_data: - mock_resp.json = mocker.Mock(return_value={"data": alert_data}) - else: - mock_resp.status_code = 404 - mock_resp.raise_for_status = mocker.Mock(side_effect=Exception("404 Not Found")) - return mock_resp + return MockHTTPResponse(json_data={"data": alert_data}) + return MockHTTPResponse(status_code=404) if 'api/monitoring/v4.0/serviceability/alerts' in url or 'api/monitoring/v4.2/serviceability/alerts' in url: response_data = load_fixture_page("alerts.json", page) @@ -318,8 +290,8 @@ def mock_response(url, params=None, *args, **kwargs): response_data = dict(response_data) response_data['data'] = filtered_data - mock_resp.json = mocker.Mock(return_value=response_data) - return mock_resp + return MockHTTPResponse(json_data=response_data) + if 'api/prism/v4.0/config/tasks' in url: response_data = load_fixture_page("tasks.json", page) @@ -345,12 +317,10 @@ def mock_response(url, params=None, *args, **kwargs): response_data = dict(response_data) response_data['data'] = filtered_data - mock_resp.json = mocker.Mock(return_value=response_data) - return mock_resp + return MockHTTPResponse(json_data=response_data) print(f"[MOCK ERROR] No matching endpoint for URL: {url}") - mock_resp.status_code = 404 - mock_resp.raise_for_status = mocker.Mock(side_effect=Exception("404 Not Found")) - return mock_resp + return MockHTTPResponse(status_code=404) - return mocker.patch('requests.Session.get', side_effect=mock_response) + mock_http.get.side_effect = mock_response + return mock_http.get diff --git a/nutanix/tests/test_retry.py b/nutanix/tests/test_retry.py index c9aed0a766c1e..369000ed40951 100644 --- a/nutanix/tests/test_retry.py +++ b/nutanix/tests/test_retry.py @@ -3,11 +3,10 @@ # Licensed under a 3-clause BSD style license (see LICENSE) -from unittest.mock import Mock - import pytest -from requests import HTTPError, Response +from datadog_checks.base.utils.http_exceptions import HTTPStatusError +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.nutanix import NutanixCheck pytestmark = [pytest.mark.unit] @@ -18,18 +17,14 @@ def test_retry_on_rate_limit_success_no_retry(dd_run_check, aggregator, mock_ins check = NutanixCheck('nutanix', {}, [mock_instance]) dd_run_check(check) - mock_response = Mock(spec=Response) - mock_response.status_code = 200 - mock_response.json.return_value = {"data": {"test": "data"}} - mock_response.raise_for_status = Mock() - mock_response.content = b'{"data": {"test": "data"}}' - - mock_get = mocker.patch('requests.Session.get', return_value=mock_response) + mock_http_get.reset_mock() + mock_http_get.side_effect = None + mock_http_get.return_value = MockHTTPResponse(json_data={"data": {"test": "data"}}) result = check._get_request_data("api/test") assert result == {"test": "data"} - assert mock_get.call_count == 1 + assert mock_http_get.call_count == 1 aggregator.assert_metric("nutanix.api.rate_limited", count=0) @@ -38,26 +33,17 @@ def test_retry_on_rate_limit_429_then_success(dd_run_check, aggregator, mock_ins check = NutanixCheck('nutanix', {}, [mock_instance]) dd_run_check(check) - # First response: 429 rate limited - mock_response_429 = Mock(spec=Response) - mock_response_429.status_code = 429 - mock_response_429.raise_for_status.side_effect = HTTPError(response=mock_response_429) - mock_response_429.content = b'' - - # Second response: success - mock_response_200 = Mock(spec=Response) - mock_response_200.status_code = 200 - mock_response_200.json.return_value = {"data": {"test": "data"}} - mock_response_200.raise_for_status = Mock() - mock_response_200.content = b'{"data": {"test": "data"}}' - - mock_get = mocker.patch('requests.Session.get', side_effect=[mock_response_429, mock_response_200]) + mock_http_get.reset_mock() + mock_http_get.side_effect = [ + MockHTTPResponse(status_code=429), + MockHTTPResponse(json_data={"data": {"test": "data"}}), + ] mock_sleep = mocker.patch('time.sleep') result = check._get_request_data("api/test") assert result == {"test": "data"} - assert mock_get.call_count == 2 + assert mock_http_get.call_count == 2 assert mock_sleep.call_count == 1 # First retry: base * 2^1 + jitter = 2 to 3 @@ -74,19 +60,16 @@ def test_retry_on_rate_limit_max_retries_exceeded(dd_run_check, aggregator, mock check = NutanixCheck('nutanix', {}, [mock_instance]) dd_run_check(check) - mock_response_429 = Mock(spec=Response) - mock_response_429.status_code = 429 - mock_response_429.raise_for_status.side_effect = HTTPError(response=mock_response_429) - mock_response_429.content = b'' - - mock_get = mocker.patch('requests.Session.get', return_value=mock_response_429) + mock_http_get.reset_mock() + mock_http_get.side_effect = None + mock_http_get.return_value = MockHTTPResponse(status_code=429) mock_sleep = mocker.patch('time.sleep') - with pytest.raises(HTTPError): + with pytest.raises(HTTPStatusError): check._get_request_data("api/test") # Initial request + 1 retry (range(1, 2)) = 2 total - assert mock_get.call_count == 2 + assert mock_http_get.call_count == 2 assert mock_sleep.call_count == 1 # Sleep between retries (not after final failure) aggregator.assert_metric("nutanix.api.rate_limited", tags=['nutanix', 'prism_central:10.0.0.197']) @@ -98,19 +81,16 @@ def test_retry_on_non_429_error_no_retry(dd_run_check, aggregator, mock_instance dd_run_check(check) # 500 Internal Server Error - mock_response_500 = Mock(spec=Response) - mock_response_500.status_code = 500 - mock_response_500.raise_for_status.side_effect = HTTPError(response=mock_response_500) - mock_response_500.content = b'' - - mock_get = mocker.patch('requests.Session.get', return_value=mock_response_500) + mock_http_get.reset_mock() + mock_http_get.side_effect = None + mock_http_get.return_value = MockHTTPResponse(status_code=500) mock_sleep = mocker.patch('time.sleep') - with pytest.raises(HTTPError): + with pytest.raises(HTTPStatusError): check._get_request_data("api/test") # Should only try once, no retries for non-429 errors - assert mock_get.call_count == 1 + assert mock_http_get.call_count == 1 assert mock_sleep.call_count == 0 aggregator.assert_metric("nutanix.api.rate_limited", count=0) @@ -126,24 +106,17 @@ def test_retry_with_custom_config(dd_run_check, aggregator, mock_instance, mock_ dd_run_check(check) # First two responses: 429, then success - mock_response_429 = Mock(spec=Response) - mock_response_429.status_code = 429 - mock_response_429.raise_for_status.side_effect = HTTPError(response=mock_response_429) - mock_response_429.content = b'' - - mock_response_200 = Mock(spec=Response) - mock_response_200.status_code = 200 - mock_response_200.json.return_value = {"data": {"test": "data"}} - mock_response_200.raise_for_status = Mock() - mock_response_200.content = b'{"data": {"test": "data"}}' - - mock_get = mocker.patch('requests.Session.get', side_effect=[mock_response_429, mock_response_200]) + mock_http_get.reset_mock() + mock_http_get.side_effect = [ + MockHTTPResponse(status_code=429), + MockHTTPResponse(json_data={"data": {"test": "data"}}), + ] mock_sleep = mocker.patch('time.sleep') result = check._get_request_data("api/test") assert result == {"test": "data"} - assert mock_get.call_count == 2 + assert mock_http_get.call_count == 2 # First retry: base * 2^1 + jitter = 4 to 5 (base=2) sleep_time = mock_sleep.call_args[0][0] @@ -160,26 +133,19 @@ def test_retry_exponential_backoff(dd_run_check, aggregator, mock_instance, mock dd_run_check(check) # Four 429 responses, then success - mock_response_429 = Mock(spec=Response) - mock_response_429.status_code = 429 - mock_response_429.raise_for_status.side_effect = HTTPError(response=mock_response_429) - mock_response_429.content = b'' - - mock_response_200 = Mock(spec=Response) - mock_response_200.status_code = 200 - mock_response_200.json.return_value = {"data": {"test": "data"}} - mock_response_200.raise_for_status = Mock() - mock_response_200.content = b'{"data": {"test": "data"}}' - - mock_get = mocker.patch( - 'requests.Session.get', side_effect=[mock_response_429, mock_response_429, mock_response_429, mock_response_200] - ) + mock_http_get.reset_mock() + mock_http_get.side_effect = [ + MockHTTPResponse(status_code=429), + MockHTTPResponse(status_code=429), + MockHTTPResponse(status_code=429), + MockHTTPResponse(json_data={"data": {"test": "data"}}), + ] mock_sleep = mocker.patch('time.sleep') result = check._get_request_data("api/test") assert result == {"test": "data"} - assert mock_get.call_count == 4 + assert mock_http_get.call_count == 4 assert mock_sleep.call_count == 3 # Check exponential backoff pattern: base * 2^attempt + jitter (attempt starts at 1) @@ -202,19 +168,16 @@ def test_retry_disabled_with_zero_max_retries(dd_run_check, aggregator, mock_ins check = NutanixCheck('nutanix', {}, [mock_instance]) dd_run_check(check) - mock_response_429 = Mock(spec=Response) - mock_response_429.status_code = 429 - mock_response_429.raise_for_status.side_effect = HTTPError(response=mock_response_429) - mock_response_429.content = b'' - - mock_get = mocker.patch('requests.Session.get', return_value=mock_response_429) + mock_http_get.reset_mock() + mock_http_get.side_effect = None + mock_http_get.return_value = MockHTTPResponse(status_code=429) mock_sleep = mocker.patch('time.sleep') - with pytest.raises(HTTPError): + with pytest.raises(HTTPStatusError): check._get_request_data("api/test") # Should only try once when max_retries is 0 - assert mock_get.call_count == 1 + assert mock_http_get.call_count == 1 assert mock_sleep.call_count == 0 # Loop never runs with max_retries=0 (clamped to 1), so no rate_limited metric is emitted @@ -226,25 +189,18 @@ def test_health_check_with_retry(dd_run_check, aggregator, mock_instance, mock_h check = NutanixCheck('nutanix', {}, [mock_instance]) dd_run_check(check) - # First response: 429 rate limited - mock_response_429 = Mock(spec=Response) - mock_response_429.status_code = 429 - mock_response_429.raise_for_status.side_effect = HTTPError(response=mock_response_429) - mock_response_429.content = b'' - - # Second response: success - mock_response_200 = Mock(spec=Response) - mock_response_200.status_code = 200 - mock_response_200.raise_for_status = Mock() - mock_response_200.content = b'' - - mock_get = mocker.patch('requests.Session.get', side_effect=[mock_response_429, mock_response_200]) + # First response: 429 rate limited, second: success + mock_http_get.reset_mock() + mock_http_get.side_effect = [ + MockHTTPResponse(status_code=429), + MockHTTPResponse(), + ] mock_sleep = mocker.patch('time.sleep') result = check._check_health() assert result is True - assert mock_get.call_count == 2 + assert mock_http_get.call_count == 2 assert mock_sleep.call_count == 1 # Health check should report up after successful retry diff --git a/nvidia_nim/tests/test_unit.py b/nvidia_nim/tests/test_unit.py index deaeffedf3ae8..d593cc99a9a1f 100644 --- a/nvidia_nim/tests/test_unit.py +++ b/nvidia_nim/tests/test_unit.py @@ -7,7 +7,7 @@ import pytest from datadog_checks.base.constants import ServiceCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.nvidia_nim import NvidiaNIMCheck @@ -20,8 +20,8 @@ def test_check_nvidia_nim(dd_run_check, aggregator, datadog_agent, instance): with mock.patch( 'requests.Session.get', side_effect=[ - MockResponse(file_path=get_fixture_path("nim_metrics.txt")), - MockResponse(file_path=get_fixture_path("nim_version.json")), + MockHTTPResponse(file_path=get_fixture_path("nim_metrics.txt")), + MockHTTPResponse(file_path=get_fixture_path("nim_version.json")), ], ): dd_run_check(check) @@ -54,7 +54,7 @@ def test_emits_critical_openemtrics_service_check_when_service_is_down( """ mock_http_response(status_code=404) check = NvidiaNIMCheck("nvidia_nim", {}, [instance]) - with pytest.raises(Exception, match="requests.exceptions.HTTPError"): + with pytest.raises(Exception, match="HTTPStatusError"): dd_run_check(check) aggregator.assert_all_metrics_covered() diff --git a/nvidia_triton/tests/test_unit.py b/nvidia_triton/tests/test_unit.py index 31c6b7e0551d2..e5c8fff456612 100644 --- a/nvidia_triton/tests/test_unit.py +++ b/nvidia_triton/tests/test_unit.py @@ -35,7 +35,7 @@ def test_emits_critical_openemtrics_service_check_when_service_is_down( """ mock_http_response(status_code=404) check = NvidiaTritonCheck('nvidia_triton', {}, [instance]) - with pytest.raises(Exception, match="requests.exceptions.HTTPError"): + with pytest.raises(Exception, match="HTTPStatusError"): dd_run_check(check) aggregator.assert_all_metrics_covered() diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index 2e9ade362b0e0..ea0a39f870e2d 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -11,6 +11,14 @@ from datadog_checks.base import AgentCheck from datadog_checks.base.errors import CheckException from datadog_checks.base.utils.discovery.discovery import Discovery +from datadog_checks.base.utils.http_exceptions import ( + HTTPConnectionError as AgentHTTPConnectionError, +) +from datadog_checks.base.utils.http_exceptions import ( + HTTPInvalidURLError, + HTTPStatusError, + HTTPTimeoutError, +) from datadog_checks.base.utils.time import get_current_datetime, get_timestamp from datadog_checks.octopus_deploy.config_models.instance import ProjectGroups, Projects @@ -74,7 +82,16 @@ def _process_endpoint(self, endpoint, params=None, report_service_check=False): if report_service_check: self.gauge('api.can_connect', 1, tags=self._base_tags) return response.json() - except (Timeout, HTTPError, InvalidURL, ConnectionError) as e: + except ( + Timeout, + HTTPError, + InvalidURL, + ConnectionError, + HTTPTimeoutError, + HTTPStatusError, + HTTPInvalidURLError, + AgentHTTPConnectionError, + ) as e: if report_service_check: self.gauge('api.can_connect', 0, tags=self._base_tags) raise CheckException( diff --git a/octopus_deploy/tests/conftest.py b/octopus_deploy/tests/conftest.py index 7ec3897c84a13..acb3c2c45d7db 100644 --- a/octopus_deploy/tests/conftest.py +++ b/octopus_deploy/tests/conftest.py @@ -5,16 +5,16 @@ import json import os from pathlib import Path +from unittest.mock import MagicMock from urllib.parse import urlparse -import mock import pytest -import requests +from datadog_checks.base.utils.http_exceptions import HTTPStatusError +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import docker_run from datadog_checks.dev.conditions import CheckDockerLogs, CheckEndpoints from datadog_checks.dev.fs import get_here -from datadog_checks.dev.http import MockResponse from .constants import COMPOSE_FILE, INSTANCE, LAB_INSTANCE, USE_OCTOPUS_LAB @@ -88,8 +88,9 @@ } -# https://docs.python.org/3/library/unittest.mock-examples.html#coping-with-mutable-arguments -class CopyingMock(mock.MagicMock): +class _CopyingMock(MagicMock): + """Deep-copy args at record time so mutable params aren't modified after the call.""" + def __call__(self, /, *args, **kwargs): args = copy.deepcopy(args) kwargs = copy.deepcopy(kwargs) @@ -181,38 +182,31 @@ def call(method, url, file='response', headers=None, params=None): response = mock_responses(method, url, file=file, headers=headers, params=params) if response is not None: return response - http_response = requests.models.Response() - http_response.status_code = 404 - http_response.reason = "Not Found" - http_response.url = url - raise requests.exceptions.HTTPError(response=http_response) + raise HTTPStatusError('404 Client Error', response=MockHTTPResponse(status_code=404, url=url)) yield call @pytest.fixture -def mock_http_get(request, monkeypatch, mock_http_call): +def mock_http_get(request, mock_http, mock_http_call): param = request.param if hasattr(request, 'param') and request.param is not None else {} http_error = param.pop('http_error', {}) data = param.pop('mock_data', {}) elapsed_total_seconds = param.pop('elapsed_total_seconds', {}) def get(url, *args, **kwargs): - args = copy.deepcopy(args) kwargs = copy.deepcopy(kwargs) method = 'GET' url = get_url_path(url) if http_error and url in http_error: return http_error[url] if data and url in data: - return MockResponse(json_data=data[url], status_code=200) + return MockHTTPResponse(json_data=data[url]) headers = kwargs.get('headers') params = kwargs.get('params') - mock_elapsed = mock.MagicMock(total_seconds=mock.MagicMock(return_value=elapsed_total_seconds.get(url, 0.0))) - mock_json = mock.MagicMock(return_value=mock_http_call(method, url, headers=headers, params=params)) - mock_status_code = mock.MagicMock(return_value=200) - return CopyingMock(elapsed=mock_elapsed, json=mock_json, status_code=mock_status_code) - - mock_get = CopyingMock(side_effect=get) - monkeypatch.setattr('requests.Session.get', mock_get) - return mock_get + json_data = mock_http_call(method, url, headers=headers, params=params) + return MockHTTPResponse(json_data=json_data, elapsed_seconds=elapsed_total_seconds.get(url, 0.0)) + + copying_mock = _CopyingMock(side_effect=get) + mock_http.get = copying_mock + return copying_mock diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index 5cb5cd76c3002..e93af9c65c74b 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -9,7 +9,7 @@ import mock import pytest -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.octopus_deploy import OctopusDeployCheck @@ -33,7 +33,7 @@ pytest.param( { 'http_error': { - '/api/spaces': MockResponse(status_code=500), + '/api/spaces': MockHTTPResponse(status_code=500), } }, pytest.raises(Exception, match=r'Could not connect to octopus API.*'), @@ -1038,10 +1038,10 @@ def test_empty_include(get_current_datetime, dd_run_check, aggregator): pytest.param( { 'http_error': { - '/api/Spaces-1/tasks': MockResponse(status_code=500), + '/api/Spaces-1/tasks': MockHTTPResponse(status_code=500), } }, - 'Failed to access endpoint: api/Spaces-1/tasks: 500 Server Error: None for url: None', + 'Failed to access endpoint: api/Spaces-1/tasks: 500 Server Error', id='http error', ), ], @@ -1101,10 +1101,10 @@ def test_server_node_metrics(get_current_datetime, dd_run_check, aggregator, ins pytest.param( { 'http_error': { - '/api/octopusservernodes': MockResponse(status_code=500), + '/api/octopusservernodes': MockHTTPResponse(status_code=500), } }, - 'Failed to access endpoint: api/octopusservernodes: 500 Server Error: None for url: None', + 'Failed to access endpoint: api/octopusservernodes: 500 Server Error', id='http error', ), ], @@ -1545,10 +1545,10 @@ def test_environments_discovery_include_invalid(get_current_datetime, dd_run_che pytest.param( { 'http_error': { - '/api/Spaces-1/environments': MockResponse(status_code=500), + '/api/Spaces-1/environments': MockHTTPResponse(status_code=500), } }, - 'Failed to access endpoint: api/Spaces-1/environments: 500 Server Error: None for url: None', + 'Failed to access endpoint: api/Spaces-1/environments: 500 Server Error', id='http error', ), ], @@ -1651,10 +1651,10 @@ def test_environments_metrics_http_failure( pytest.param( { 'http_error': { - '/api/Spaces-1/releases/Releases-3': MockResponse(status_code=500), + '/api/Spaces-1/releases/Releases-3': MockHTTPResponse(status_code=500), } }, - 'Failed to access endpoint: api/Spaces-1/releases/Releases-3: 500 Server Error: None for url: None', + 'Failed to access endpoint: api/Spaces-1/releases/Releases-3: 500 Server Error', id='http error', ), ], @@ -1879,10 +1879,10 @@ def test_deployment_metrics_releases_http_failure( pytest.param( { 'http_error': { - '/api/Spaces-1/deployments/Deployments-18': MockResponse(status_code=500), + '/api/Spaces-1/deployments/Deployments-18': MockHTTPResponse(status_code=500), } }, - 'Failed to access endpoint: api/Spaces-1/deployments/Deployments-18: 500 Server Error: None for url: None', + 'Failed to access endpoint: api/Spaces-1/deployments/Deployments-18: 500 Server Error', id='http error', ), ], @@ -2111,10 +2111,10 @@ def test_deployment_metrics_deployments_http_failure( pytest.param( { 'http_error': { - '/api/Spaces-1/environments': MockResponse(status_code=500), + '/api/Spaces-1/environments': MockHTTPResponse(status_code=500), } }, - 'Failed to access endpoint: api/Spaces-1/environments: 500 Server Error: None for url: None', + 'Failed to access endpoint: api/Spaces-1/environments: 500 Server Error', id='http error', ), ], diff --git a/openmetrics/tests/test_openmetrics.py b/openmetrics/tests/test_openmetrics.py index f0212951b1bde..b55f8693f7711 100644 --- a/openmetrics/tests/test_openmetrics.py +++ b/openmetrics/tests/test_openmetrics.py @@ -33,12 +33,9 @@ @pytest.mark.parametrize('poll_mock_fixture', ['prometheus_poll_mock', 'openmetrics_poll_mock']) def test_openmetrics(aggregator, dd_run_check, request, poll_mock_fixture): - from datadog_checks.base.checks.openmetrics.v2.scraper import OpenMetricsScraper - request.getfixturevalue(poll_mock_fixture) check = OpenMetricsCheck('openmetrics', {}, [instance_new]) - scraper = OpenMetricsScraper(check, instance_new) dd_run_check(check) aggregator.assert_metric( @@ -63,19 +60,15 @@ def test_openmetrics(aggregator, dd_run_check, request, poll_mock_fixture): ) aggregator.assert_all_metrics_covered() - assert check.http.options['headers']['Accept'] == '*/*' - assert scraper.http.options['headers']['Accept'] == 'text/plain' + assert check.http.get_header('Accept') == '*/*' def test_openmetrics_use_latest_spec(aggregator, dd_run_check, mock_http_response, openmetrics_payload, caplog): - from datadog_checks.base.checks.openmetrics.v2.scraper import OpenMetricsScraper - # We want to make sure that when `use_latest_spec` is enabled, we use the OpenMetrics parser # even when the response's `Content-Type` doesn't declare the appropriate media type. - mock_http_response(openmetrics_payload, normalize_content=False) + get_mock = mock_http_response(openmetrics_payload, normalize_content=False) check = OpenMetricsCheck('openmetrics', {}, [instance_new_strict]) - scraper = OpenMetricsScraper(check, instance_new_strict) dd_run_check(check) aggregator.assert_metric( @@ -95,9 +88,9 @@ def test_openmetrics_use_latest_spec(aggregator, dd_run_check, mock_http_respons ) aggregator.assert_all_metrics_covered() - assert check.http.options['headers']['Accept'] == '*/*' + assert check.http.get_header('Accept') == '*/*' assert caplog.text == '' - assert scraper.http.options['headers']['Accept'] == ( + assert get_mock.call_args.kwargs['headers']['Accept'] == ( 'application/openmetrics-text;version=1.0.0,application/openmetrics-text;version=0.0.1' ) diff --git a/openstack/datadog_checks/openstack/openstack.py b/openstack/datadog_checks/openstack/openstack.py index 2ed71df4ccf00..6bd505692950c 100644 --- a/openstack/datadog_checks/openstack/openstack.py +++ b/openstack/datadog_checks/openstack/openstack.py @@ -14,6 +14,7 @@ import simplejson as json from datadog_checks.base import AgentCheck, is_affirmative +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError SOURCE_TYPE = 'openstack' @@ -218,7 +219,12 @@ def get_auth_response_from_config(cls, init_config, instance_config, proxy_confi exception_msg = None try: auth_resp = cls.request_auth_token(auth_scope, identity, keystone_server_url, ssl_verify, proxy_config) - except (requests.exceptions.HTTPError, requests.exceptions.Timeout, requests.exceptions.ConnectionError): + except ( + requests.exceptions.HTTPError, + requests.exceptions.Timeout, + requests.exceptions.ConnectionError, + HTTPTimeoutError, + ): exception_msg = "Failed keystone auth with user:{user} domain:{domain} scope:{scope} @{url}".format( user=identity['password']['user']['name'], domain=identity['password']['user']['domain']['id'], @@ -240,6 +246,7 @@ def get_auth_response_from_config(cls, init_config, instance_config, proxy_confi requests.exceptions.HTTPError, requests.exceptions.Timeout, requests.exceptions.ConnectionError, + HTTPTimeoutError, ) as e: exception_msg = "{msg} and also failed keystone auth with \ identity:{user} domain:{domain} scope:{scope} @{url}: {ex}".format( @@ -274,7 +281,12 @@ def from_config(cls, init_config, instance_config, proxy_config=None): try: project_resp = cls.request_project_list(auth_token, keystone_server_url, ssl_verify, proxy_config) projects = project_resp.json().get('projects') - except (requests.exceptions.HTTPError, requests.exceptions.Timeout, requests.exceptions.ConnectionError) as e: + except ( + requests.exceptions.HTTPError, + requests.exceptions.Timeout, + requests.exceptions.ConnectionError, + HTTPTimeoutError, + ) as e: exception_msg = "unable to retrieve project list from keystone auth with identity: @{url}: {ex}".format( url=keystone_server_url, ex=e ) @@ -292,6 +304,7 @@ def from_config(cls, init_config, instance_config, proxy_config=None): requests.exceptions.HTTPError, requests.exceptions.Timeout, requests.exceptions.ConnectionError, + HTTPTimeoutError, ) as e: exception_msg = "unable to retrieve project from keystone auth with identity: @{url}: {ex}".format( url=keystone_server_url, ex=e @@ -1061,7 +1074,12 @@ def _send_api_service_checks(self, scope, tags): AgentCheck.OK, tags=["keystone_server:%s" % self.init_config.get("keystone_server_url")] + tags, ) - except (requests.exceptions.HTTPError, requests.exceptions.Timeout, requests.exceptions.ConnectionError): + except ( + requests.exceptions.HTTPError, + requests.exceptions.Timeout, + requests.exceptions.ConnectionError, + HTTPTimeoutError, + ): self.service_check( self.COMPUTE_API_SC, AgentCheck.CRITICAL, @@ -1082,7 +1100,12 @@ def _send_api_service_checks(self, scope, tags): AgentCheck.OK, tags=["keystone_server:%s" % self.init_config.get("keystone_server_url")] + tags, ) - except (requests.exceptions.HTTPError, requests.exceptions.Timeout, requests.exceptions.ConnectionError): + except ( + requests.exceptions.HTTPError, + requests.exceptions.Timeout, + requests.exceptions.ConnectionError, + HTTPTimeoutError, + ): self.service_check( self.NETWORK_API_SC, AgentCheck.CRITICAL, @@ -1286,7 +1309,7 @@ def check(self, instance): self.warning("Error reaching nova API") return - except (requests.exceptions.Timeout, requests.exceptions.ConnectionError): + except (requests.exceptions.Timeout, requests.exceptions.ConnectionError, HTTPTimeoutError): # exponential backoff self.do_backoff(instance) self.warning("There were some problems reaching the nova API - applying exponential backoff") diff --git a/openstack_controller/datadog_checks/openstack_controller/components/component.py b/openstack_controller/datadog_checks/openstack_controller/components/component.py index c848c48a3506b..077d5b7fa9bb1 100644 --- a/openstack_controller/datadog_checks/openstack_controller/components/component.py +++ b/openstack_controller/datadog_checks/openstack_controller/components/component.py @@ -9,6 +9,7 @@ import requests from datadog_checks.base import AgentCheck +from datadog_checks.base.utils.http_exceptions import HTTPRequestError, HTTPStatusError from datadog_checks.openstack_controller.api.catalog import CatalogEndPointFailure @@ -76,7 +77,7 @@ def wrapper(self, *args, **kwargs): tags = argument_value('tags', func, *args, **kwargs) self.check.service_check(self.SERVICE_CHECK, AgentCheck.OK, tags=tags) return result if result is not None else True - except requests.exceptions.RequestException as e: + except (requests.exceptions.RequestException, HTTPRequestError, HTTPStatusError) as e: self.check.log.debug( "Encountered a RequestException in '%s:%s' [%s]: %s", self.__class__.__name__, diff --git a/openstack_controller/datadog_checks/openstack_controller/legacy/api.py b/openstack_controller/datadog_checks/openstack_controller/legacy/api.py index d2e6ead5f70ee..ab35258b904bc 100644 --- a/openstack_controller/datadog_checks/openstack_controller/legacy/api.py +++ b/openstack_controller/datadog_checks/openstack_controller/legacy/api.py @@ -9,6 +9,8 @@ import requests from openstack import connection +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError + from .exceptions import ( AuthenticationNeeded, IncompleteIdentity, @@ -500,7 +502,12 @@ def _post_auth_token(logger, keystone_endpoint, identity, requests_wrapper, scop logger.debug("url: %s || response: %s", auth_url, resp.json()) return resp - except (requests.exceptions.HTTPError, requests.exceptions.Timeout, requests.exceptions.ConnectionError): + except ( + requests.exceptions.HTTPError, + requests.exceptions.Timeout, + requests.exceptions.ConnectionError, + HTTPTimeoutError, + ): safe_identity = copy.deepcopy(identity) safe_identity['password']['user']['password'] = '********' msg = "Failed Keystone auth with identity:{identity} scope:{scope} @{url}".format( @@ -520,7 +527,12 @@ def _get_auth_projects(logger, keystone_endpoint, requests_wrapper): logger.debug("url: %s || response: %s", auth_url, jresp) projects = jresp.get('projects') return projects - except (requests.exceptions.HTTPError, requests.exceptions.Timeout, requests.exceptions.ConnectionError) as e: + except ( + requests.exceptions.HTTPError, + requests.exceptions.Timeout, + requests.exceptions.ConnectionError, + HTTPTimeoutError, + ) as e: msg = "unable to retrieve project list from Keystone auth with identity: @{url}: {ex}".format( url=auth_url, ex=e ) diff --git a/openstack_controller/datadog_checks/openstack_controller/legacy/openstack_controller_legacy.py b/openstack_controller/datadog_checks/openstack_controller/legacy/openstack_controller_legacy.py index c94f7bb0027d5..268f89466f78f 100644 --- a/openstack_controller/datadog_checks/openstack_controller/legacy/openstack_controller_legacy.py +++ b/openstack_controller/datadog_checks/openstack_controller/legacy/openstack_controller_legacy.py @@ -13,6 +13,7 @@ from datadog_checks.base import AgentCheck, is_affirmative from datadog_checks.base.utils.common import pattern_filter +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError from .api import ApiFactory from .exceptions import ( @@ -764,7 +765,12 @@ def check(self, instance): except AuthenticationNeeded: # Delete the scope, we'll populate a new one on the next run for this instance self.delete_api_cache() - except (requests.exceptions.HTTPError, requests.exceptions.Timeout, requests.exceptions.ConnectionError) as e: + except ( + requests.exceptions.HTTPError, + requests.exceptions.Timeout, + requests.exceptions.ConnectionError, + HTTPTimeoutError, + ) as e: if isinstance(e, requests.exceptions.HTTPError) and e.response.status_code < 500: self.warning("Error reaching Nova API: %s", e) else: diff --git a/openstack_controller/tests/conftest.py b/openstack_controller/tests/conftest.py index e6b933bb66871..f4ee07975972d 100644 --- a/openstack_controller/tests/conftest.py +++ b/openstack_controller/tests/conftest.py @@ -14,10 +14,10 @@ import yaml import tests.configs as configs +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import docker_run from datadog_checks.dev.conditions import CheckDockerLogs from datadog_checks.dev.fs import get_here -from datadog_checks.dev.http import MockResponse from datadog_checks.openstack_controller import OpenStackControllerCheck from .endpoints import IRONIC_ENDPOINTS, NOVA_ENDPOINTS @@ -1000,7 +1000,7 @@ def get(url, *args, **kwargs): if http_error and url in http_error: return http_error[url] if data and url in data: - return MockResponse(json_data=data[url], status_code=200) + return MockHTTPResponse(json_data=data[url], status_code=200) headers = kwargs.get('headers') params = kwargs.get('params') mock_elapsed = mock.MagicMock(total_seconds=mock.MagicMock(return_value=elapsed_total_seconds.get(url, 0.0))) @@ -1024,6 +1024,7 @@ def post(url, *args, **kwargs): url = get_url_path(url) if http_error and url in http_error: return http_error[url] + headers = None if url == '/identity/v3/auth/tokens': data = kwargs['json'] file = data.get('auth', {}).get('scope', 'unscoped') @@ -1038,7 +1039,7 @@ def post(url, *args, **kwargs): json_data = mock_http_call(method, url) if replace and url in replace: json_data = replace[url](json_data) - return MockResponse(json_data=json_data, status_code=200, headers=headers) + return MockHTTPResponse(json_data=json_data, status_code=200, headers=headers) mock_post = mock.MagicMock(side_effect=post) monkeypatch.setattr('requests.Session.post', mock_post) diff --git a/openstack_controller/tests/test_unit_auth.py b/openstack_controller/tests/test_unit_auth.py index 24b8e74fe9492..9725dc2c06060 100644 --- a/openstack_controller/tests/test_unit_auth.py +++ b/openstack_controller/tests/test_unit_auth.py @@ -8,7 +8,7 @@ import pytest import tests.configs as configs -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse pytestmark = [ pytest.mark.unit, @@ -20,14 +20,14 @@ ('mock_http_post', 'connection_authorize', 'instance'), [ pytest.param( - {'http_error': {'/identity/v3/auth/tokens': MockResponse(status_code=500)}}, + {'http_error': {'/identity/v3/auth/tokens': MockHTTPResponse(status_code=500)}}, None, configs.REST, id='api rest', ), pytest.param( None, - {'http_error': MockResponse(status_code=500)}, + {'http_error': MockHTTPResponse(status_code=500)}, configs.SDK, id='api sdk', ), diff --git a/openstack_controller/tests/test_unit_cinder.py b/openstack_controller/tests/test_unit_cinder.py index 994d2f629acc2..be4c728926220 100644 --- a/openstack_controller/tests/test_unit_cinder.py +++ b/openstack_controller/tests/test_unit_cinder.py @@ -10,7 +10,7 @@ import tests.configs as configs from datadog_checks.base import AgentCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.openstack_controller.api.type import ApiType from tests.common import remove_service_from_catalog @@ -148,12 +148,12 @@ def test_not_in_catalog(aggregator, check, dd_run_check, caplog, mock_http_post, ('mock_http_get', 'instance'), [ pytest.param( - {'http_error': {'/volume/v3/': MockResponse(status_code=500)}}, + {'http_error': {'/volume/v3/': MockHTTPResponse(status_code=500)}}, configs.REST, id='api rest', ), pytest.param( - {'http_error': {'/volume/v3/': MockResponse(status_code=500)}}, + {'http_error': {'/volume/v3/': MockHTTPResponse(status_code=500)}}, configs.SDK, id='api sdk', ), diff --git a/openstack_controller/tests/test_unit_glance.py b/openstack_controller/tests/test_unit_glance.py index 20b6235afce3e..6157af5b0c2de 100644 --- a/openstack_controller/tests/test_unit_glance.py +++ b/openstack_controller/tests/test_unit_glance.py @@ -11,7 +11,7 @@ import tests.configs as configs from datadog_checks.base import AgentCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.openstack_controller.api.type import ApiType from tests.common import remove_service_from_catalog from tests.metrics import ( @@ -133,12 +133,12 @@ def test_not_in_catalog(aggregator, check, dd_run_check, caplog, mock_http_post, ('mock_http_get', 'instance'), [ pytest.param( - {'http_error': {'/image': MockResponse(status_code=500)}}, + {'http_error': {'/image': MockHTTPResponse(status_code=500)}}, configs.REST, id='api rest', ), pytest.param( - {'http_error': {'/image': MockResponse(status_code=500)}}, + {'http_error': {'/image': MockHTTPResponse(status_code=500)}}, configs.SDK, id='api sdk', ), @@ -214,7 +214,7 @@ def test_response_time(aggregator, check, dd_run_check, mock_http_get): ('mock_http_get', 'connection_image', 'instance', 'api_type'), [ pytest.param( - {'http_error': {'/image/v2/images': MockResponse(status_code=500)}}, + {'http_error': {'/image/v2/images': MockHTTPResponse(status_code=500)}}, None, configs.REST, ApiType.REST, @@ -222,7 +222,7 @@ def test_response_time(aggregator, check, dd_run_check, mock_http_get): ), pytest.param( None, - {'http_error': {'images': MockResponse(status_code=500)}}, + {'http_error': {'images': MockHTTPResponse(status_code=500)}}, configs.SDK, ApiType.SDK, id='api sdk', diff --git a/openstack_controller/tests/test_unit_heat.py b/openstack_controller/tests/test_unit_heat.py index cef6d072d5d72..2516cfd80d7fd 100644 --- a/openstack_controller/tests/test_unit_heat.py +++ b/openstack_controller/tests/test_unit_heat.py @@ -11,7 +11,7 @@ import tests.configs as configs from datadog_checks.base import AgentCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.openstack_controller.api.type import ApiType from tests.common import remove_service_from_catalog from tests.metrics import ( @@ -104,12 +104,12 @@ def test_not_in_catalog(aggregator, check, dd_run_check, caplog, mock_http_post, ('mock_http_get', 'instance'), [ pytest.param( - {'http_error': {'/heat-api': MockResponse(status_code=500)}}, + {'http_error': {'/heat-api': MockHTTPResponse(status_code=500)}}, configs.REST, id='api rest', ), pytest.param( - {'http_error': {'/heat-api': MockResponse(status_code=500)}}, + {'http_error': {'/heat-api': MockHTTPResponse(status_code=500)}}, configs.SDK, id='api sdk', ), @@ -187,8 +187,8 @@ def test_response_time(aggregator, check, dd_run_check, mock_http_get): pytest.param( { 'http_error': { - '/heat-api/v1/1e6e233e637d4d55a50a62b63398ad15/stacks': MockResponse(status_code=500), - '/heat-api/v1/6e39099cccde4f809b003d9e0dd09304/stacks': MockResponse(status_code=500), + '/heat-api/v1/1e6e233e637d4d55a50a62b63398ad15/stacks': MockHTTPResponse(status_code=500), + '/heat-api/v1/6e39099cccde4f809b003d9e0dd09304/stacks': MockHTTPResponse(status_code=500), } }, None, @@ -201,8 +201,8 @@ def test_response_time(aggregator, check, dd_run_check, mock_http_get): { 'http_error': { 'stacks': { - '1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } } }, diff --git a/openstack_controller/tests/test_unit_ironic.py b/openstack_controller/tests/test_unit_ironic.py index 013ce3230658a..34340fe5cb4c1 100644 --- a/openstack_controller/tests/test_unit_ironic.py +++ b/openstack_controller/tests/test_unit_ironic.py @@ -11,7 +11,7 @@ import tests.configs as configs from datadog_checks.base import AgentCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.openstack_controller.api.type import ApiType from tests.common import remove_service_from_catalog from tests.metrics import ( @@ -677,12 +677,12 @@ def test_not_in_catalog(aggregator, check, dd_run_check, caplog, mock_http_post, ('mock_http_get', 'instance'), [ pytest.param( - {'http_error': {'/baremetal': MockResponse(status_code=500)}}, + {'http_error': {'/baremetal': MockHTTPResponse(status_code=500)}}, configs.REST, id='api rest', ), pytest.param( - {'http_error': {'/baremetal': MockResponse(status_code=500)}}, + {'http_error': {'/baremetal': MockHTTPResponse(status_code=500)}}, configs.SDK, id='api sdk', ), @@ -758,7 +758,7 @@ def test_response_time(aggregator, check, dd_run_check, mock_http_get): ('mock_http_get', 'connection_baremetal', 'instance', 'api_type'), [ pytest.param( - {'http_error': {'/baremetal/v1/nodes/detail': MockResponse(status_code=500)}}, + {'http_error': {'/baremetal/v1/nodes/detail': MockHTTPResponse(status_code=500)}}, None, configs.REST, ApiType.REST, @@ -766,7 +766,7 @@ def test_response_time(aggregator, check, dd_run_check, mock_http_get): ), pytest.param( None, - {'http_error': {'nodes': MockResponse(status_code=500)}}, + {'http_error': {'nodes': MockHTTPResponse(status_code=500)}}, configs.SDK, ApiType.SDK, id='api sdk', @@ -1220,7 +1220,7 @@ def test_pagination_invalid_no_exception(aggregator, openstack_controller_check, ('mock_http_get', 'connection_baremetal', 'instance', 'api_type'), [ pytest.param( - {'http_error': {'/baremetal/v1/conductors': MockResponse(status_code=500)}}, + {'http_error': {'/baremetal/v1/conductors': MockHTTPResponse(status_code=500)}}, None, configs.REST, ApiType.REST, @@ -1228,7 +1228,7 @@ def test_pagination_invalid_no_exception(aggregator, openstack_controller_check, ), pytest.param( None, - {'http_error': {'conductors': MockResponse(status_code=500)}}, + {'http_error': {'conductors': MockHTTPResponse(status_code=500)}}, configs.SDK, ApiType.SDK, id='api sdk', diff --git a/openstack_controller/tests/test_unit_keystone.py b/openstack_controller/tests/test_unit_keystone.py index bc6eab0ca4cb3..90978efec1bda 100644 --- a/openstack_controller/tests/test_unit_keystone.py +++ b/openstack_controller/tests/test_unit_keystone.py @@ -11,7 +11,7 @@ import tests.configs as configs from datadog_checks.base import AgentCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.openstack_controller.api.type import ApiType from tests.common import remove_service_from_catalog @@ -409,12 +409,12 @@ def test_region_id_in_tags(aggregator, dd_run_check, instance, openstack_control ('mock_http_get', 'instance'), [ pytest.param( - {'http_error': {'/identity': MockResponse(status_code=500)}}, + {'http_error': {'/identity': MockHTTPResponse(status_code=500)}}, configs.REST, id='api rest', ), pytest.param( - {'http_error': {'/identity': MockResponse(status_code=500)}}, + {'http_error': {'/identity': MockHTTPResponse(status_code=500)}}, configs.SDK, id='api sdk', ), @@ -500,7 +500,7 @@ def test_response_time(aggregator, check, dd_run_check, mock_http_get): ('mock_http_get', 'connection_identity', 'instance', 'api_type'), [ pytest.param( - {'http_error': {'/identity/v3/regions': MockResponse(status_code=500)}}, + {'http_error': {'/identity/v3/regions': MockHTTPResponse(status_code=500)}}, None, configs.REST, ApiType.REST, @@ -508,7 +508,7 @@ def test_response_time(aggregator, check, dd_run_check, mock_http_get): ), pytest.param( None, - {'http_error': {'regions': MockResponse(status_code=500)}}, + {'http_error': {'regions': MockHTTPResponse(status_code=500)}}, configs.SDK, ApiType.SDK, id='api sdk', @@ -575,7 +575,7 @@ def test_regions_metrics(aggregator, check, dd_run_check): ('mock_http_get', 'connection_identity', 'instance', 'api_type'), [ pytest.param( - {'http_error': {'/identity/v3/domains': MockResponse(status_code=500)}}, + {'http_error': {'/identity/v3/domains': MockHTTPResponse(status_code=500)}}, None, configs.REST, ApiType.REST, @@ -583,7 +583,7 @@ def test_regions_metrics(aggregator, check, dd_run_check): ), pytest.param( None, - {'http_error': {'domains': MockResponse(status_code=500)}}, + {'http_error': {'domains': MockHTTPResponse(status_code=500)}}, configs.SDK, ApiType.SDK, id='api sdk', @@ -670,7 +670,7 @@ def test_domains_metrics(aggregator, check, dd_run_check): ('mock_http_get', 'connection_identity', 'instance', 'api_type'), [ pytest.param( - {'http_error': {'/identity/v3/projects': MockResponse(status_code=500)}}, + {'http_error': {'/identity/v3/projects': MockHTTPResponse(status_code=500)}}, None, configs.REST, ApiType.REST, @@ -678,7 +678,7 @@ def test_domains_metrics(aggregator, check, dd_run_check): ), pytest.param( None, - {'http_error': {'projects': MockResponse(status_code=500)}}, + {'http_error': {'projects': MockHTTPResponse(status_code=500)}}, configs.SDK, ApiType.SDK, id='api sdk', @@ -829,7 +829,7 @@ def test_projects_metrics(aggregator, check, dd_run_check): ('mock_http_get', 'connection_identity', 'instance', 'api_type'), [ pytest.param( - {'http_error': {'/identity/v3/users': MockResponse(status_code=500)}}, + {'http_error': {'/identity/v3/users': MockHTTPResponse(status_code=500)}}, None, configs.REST, ApiType.REST, @@ -837,7 +837,7 @@ def test_projects_metrics(aggregator, check, dd_run_check): ), pytest.param( None, - {'http_error': {'users': MockResponse(status_code=500)}}, + {'http_error': {'users': MockHTTPResponse(status_code=500)}}, configs.SDK, ApiType.SDK, id='api sdk', @@ -1024,7 +1024,7 @@ def test_users_metrics(aggregator, check, dd_run_check): ('mock_http_get', 'connection_identity', 'instance', 'api_type'), [ pytest.param( - {'http_error': {'/identity/v3/groups': MockResponse(status_code=500)}}, + {'http_error': {'/identity/v3/groups': MockHTTPResponse(status_code=500)}}, None, configs.REST, ApiType.REST, @@ -1032,7 +1032,7 @@ def test_users_metrics(aggregator, check, dd_run_check): ), pytest.param( None, - {'http_error': {'groups': MockResponse(status_code=500)}}, + {'http_error': {'groups': MockHTTPResponse(status_code=500)}}, configs.SDK, ApiType.SDK, id='api sdk', @@ -1063,7 +1063,7 @@ def test_groups_exception(aggregator, check, dd_run_check, mock_http_get, connec pytest.param( { 'http_error': { - '/identity/v3/groups/89b36a4c32c44b0ea8856b6357f101ea/users': MockResponse(status_code=500) + '/identity/v3/groups/89b36a4c32c44b0ea8856b6357f101ea/users': MockHTTPResponse(status_code=500) } }, None, @@ -1073,7 +1073,7 @@ def test_groups_exception(aggregator, check, dd_run_check, mock_http_get, connec ), pytest.param( None, - {'http_error': {'group_users': {'89b36a4c32c44b0ea8856b6357f101ea': MockResponse(status_code=500)}}}, + {'http_error': {'group_users': {'89b36a4c32c44b0ea8856b6357f101ea': MockHTTPResponse(status_code=500)}}}, configs.SDK, ApiType.SDK, id='api sdk', @@ -1199,7 +1199,7 @@ def test_groups_metrics(aggregator, check, dd_run_check): ('mock_http_get', 'connection_identity', 'instance', 'api_type'), [ pytest.param( - {'http_error': {'/identity/v3/services': MockResponse(status_code=500)}}, + {'http_error': {'/identity/v3/services': MockHTTPResponse(status_code=500)}}, None, configs.REST, ApiType.REST, @@ -1207,7 +1207,7 @@ def test_groups_metrics(aggregator, check, dd_run_check): ), pytest.param( None, - {'http_error': {'services': MockResponse(status_code=500)}}, + {'http_error': {'services': MockHTTPResponse(status_code=500)}}, configs.SDK, ApiType.SDK, id='api sdk', @@ -1344,7 +1344,7 @@ def test_services_metrics(aggregator, check, dd_run_check): ('mock_http_get', 'connection_identity', 'instance', 'api_type'), [ pytest.param( - {'http_error': {'/identity/v3/registered_limits': MockResponse(status_code=500)}}, + {'http_error': {'/identity/v3/registered_limits': MockHTTPResponse(status_code=500)}}, None, configs.REST, ApiType.REST, @@ -1352,7 +1352,7 @@ def test_services_metrics(aggregator, check, dd_run_check): ), pytest.param( None, - {'http_error': {'registered_limits': MockResponse(status_code=500)}}, + {'http_error': {'registered_limits': MockHTTPResponse(status_code=500)}}, configs.SDK, ApiType.SDK, id='api sdk', @@ -1388,7 +1388,7 @@ def test_registered_limits_exception(aggregator, check, dd_run_check, mock_http_ ('mock_http_get', 'connection_identity', 'instance', 'api_type'), [ pytest.param( - {'http_error': {'/identity/v3/limits': MockResponse(status_code=500)}}, + {'http_error': {'/identity/v3/limits': MockHTTPResponse(status_code=500)}}, None, configs.REST, ApiType.REST, @@ -1396,7 +1396,7 @@ def test_registered_limits_exception(aggregator, check, dd_run_check, mock_http_ ), pytest.param( None, - {'http_error': {'limits': MockResponse(status_code=500)}}, + {'http_error': {'limits': MockHTTPResponse(status_code=500)}}, configs.SDK, ApiType.SDK, id='api sdk', diff --git a/openstack_controller/tests/test_unit_neutron.py b/openstack_controller/tests/test_unit_neutron.py index 21713a956edc6..75afdb10d2340 100644 --- a/openstack_controller/tests/test_unit_neutron.py +++ b/openstack_controller/tests/test_unit_neutron.py @@ -11,7 +11,7 @@ import tests.configs as configs import tests.metrics as metrics from datadog_checks.base import AgentCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.openstack_controller.api.type import ApiType from tests.common import remove_service_from_catalog @@ -181,12 +181,12 @@ def test_not_in_catalog(aggregator, check, dd_run_check, caplog, mock_http_post, ('mock_http_get', 'instance'), [ pytest.param( - {'http_error': {'/networking': MockResponse(status_code=500)}}, + {'http_error': {'/networking': MockHTTPResponse(status_code=500)}}, configs.REST, id='api rest', ), pytest.param( - {'http_error': {'/networking': MockResponse(status_code=500)}}, + {'http_error': {'/networking': MockHTTPResponse(status_code=500)}}, configs.SDK, id='api sdk', ), @@ -262,7 +262,7 @@ def test_response_time(aggregator, check, dd_run_check, mock_http_get): ('mock_http_get', 'connection_network', 'instance', 'api_type'), [ pytest.param( - {'http_error': {'/networking/v2.0/agents': MockResponse(status_code=500)}}, + {'http_error': {'/networking/v2.0/agents': MockHTTPResponse(status_code=500)}}, None, configs.REST, ApiType.REST, @@ -270,7 +270,7 @@ def test_response_time(aggregator, check, dd_run_check, mock_http_get): ), pytest.param( None, - {'http_error': {'agents': MockResponse(status_code=500)}}, + {'http_error': {'agents': MockHTTPResponse(status_code=500)}}, configs.SDK, ApiType.SDK, id='api sdk', @@ -488,7 +488,7 @@ def test_disable_quotas_collect_for_all_projects(aggregator, dd_run_check, insta pytest.param( { 'http_error': { - '/networking/v2.0/networks': MockResponse(status_code=500), + '/networking/v2.0/networks': MockHTTPResponse(status_code=500), } }, None, @@ -501,8 +501,8 @@ def test_disable_quotas_collect_for_all_projects(aggregator, dd_run_check, insta { 'http_error': { 'networks': { - '1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } } }, @@ -1348,8 +1348,8 @@ def test_networks_pagination( pytest.param( { 'http_error': { - '/networking/v2.0/quotas/1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '/networking/v2.0/quotas/6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '/networking/v2.0/quotas/1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '/networking/v2.0/quotas/6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } }, None, @@ -1362,8 +1362,8 @@ def test_networks_pagination( { 'http_error': { 'quotas': { - '1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } } }, diff --git a/openstack_controller/tests/test_unit_nova.py b/openstack_controller/tests/test_unit_nova.py index ab81ef4cf237a..abd1b3c5d211a 100644 --- a/openstack_controller/tests/test_unit_nova.py +++ b/openstack_controller/tests/test_unit_nova.py @@ -15,7 +15,7 @@ import tests.configs as configs import tests.metrics as metrics from datadog_checks.base import AgentCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.openstack_controller.api.type import ApiType from tests.common import remove_service_from_catalog @@ -441,12 +441,12 @@ def test_not_in_catalog(aggregator, check, dd_run_check, caplog, mock_http_post, ('mock_http_get', 'instance'), [ pytest.param( - {'http_error': {'/compute/v2.1': MockResponse(status_code=500)}}, + {'http_error': {'/compute/v2.1': MockHTTPResponse(status_code=500)}}, configs.REST, id='api rest', ), pytest.param( - {'http_error': {'/compute/v2.1': MockResponse(status_code=500)}}, + {'http_error': {'/compute/v2.1': MockHTTPResponse(status_code=500)}}, configs.SDK, id='api sdk', ), @@ -524,7 +524,7 @@ def test_response_time(aggregator, check, dd_run_check, mock_http_get): pytest.param( { 'http_error': { - '/compute/v2.1/limits': MockResponse(status_code=500), + '/compute/v2.1/limits': MockHTTPResponse(status_code=500), } }, None, @@ -537,8 +537,8 @@ def test_response_time(aggregator, check, dd_run_check, mock_http_get): { 'http_error': { 'limits': { - '1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } } }, @@ -842,7 +842,7 @@ def test_limits_metrics(aggregator, check, dd_run_check): ('mock_http_get', 'connection_compute', 'instance', 'api_type'), [ pytest.param( - {'http_error': {'/compute/v2.1/os-services': MockResponse(status_code=500)}}, + {'http_error': {'/compute/v2.1/os-services': MockHTTPResponse(status_code=500)}}, None, configs.REST, ApiType.REST, @@ -850,7 +850,7 @@ def test_limits_metrics(aggregator, check, dd_run_check): ), pytest.param( None, - {'http_error': {'services': MockResponse(status_code=500)}}, + {'http_error': {'services': MockHTTPResponse(status_code=500)}}, configs.SDK, ApiType.SDK, id='api sdk', @@ -916,7 +916,7 @@ def test_services_metrics(aggregator, check, dd_run_check, metrics): ('mock_http_get', 'connection_compute', 'instance', 'api_type'), [ pytest.param( - {'http_error': {'/compute/v2.1/flavors/detail': MockResponse(status_code=500)}}, + {'http_error': {'/compute/v2.1/flavors/detail': MockHTTPResponse(status_code=500)}}, None, configs.REST, ApiType.REST, @@ -924,7 +924,7 @@ def test_services_metrics(aggregator, check, dd_run_check, metrics): ), pytest.param( None, - {'http_error': {'flavors': MockResponse(status_code=500)}}, + {'http_error': {'flavors': MockHTTPResponse(status_code=500)}}, configs.SDK, ApiType.SDK, id='api sdk', @@ -1195,7 +1195,7 @@ def test_flavors_metrics(aggregator, check, dd_run_check): ('mock_http_get', 'connection_compute', 'instance', 'api_type'), [ pytest.param( - {'http_error': {'/compute/v2.1/os-hypervisors/detail': MockResponse(status_code=500)}}, + {'http_error': {'/compute/v2.1/os-hypervisors/detail': MockHTTPResponse(status_code=500)}}, None, configs.REST, ApiType.REST, @@ -1203,7 +1203,7 @@ def test_flavors_metrics(aggregator, check, dd_run_check): ), pytest.param( None, - {'http_error': {'hypervisors': MockResponse(status_code=500)}}, + {'http_error': {'hypervisors': MockHTTPResponse(status_code=500)}}, configs.SDK, ApiType.SDK, id='api sdk', @@ -1232,7 +1232,7 @@ def test_hypervisors_exception(aggregator, check, dd_run_check, mock_http_get, c ('mock_http_get', 'connection_compute', 'instance', 'api_type'), [ pytest.param( - {'http_error': {'/compute/v2.1/os-hypervisors/1/uptime': MockResponse(status_code=500)}}, + {'http_error': {'/compute/v2.1/os-hypervisors/1/uptime': MockHTTPResponse(status_code=500)}}, None, configs.REST, ApiType.REST, @@ -1240,7 +1240,7 @@ def test_hypervisors_exception(aggregator, check, dd_run_check, mock_http_get, c ), pytest.param( None, - {'http_error': {'hypervisor_uptime': {1: MockResponse(status_code=500)}}}, + {'http_error': {'hypervisor_uptime': {1: MockHTTPResponse(status_code=500)}}}, configs.SDK, ApiType.SDK, id='api sdk', @@ -1566,8 +1566,8 @@ def test_disable_diagnostics_collect_for_all_servers(aggregator, dd_run_check, i pytest.param( { 'http_error': { - '/compute/v2.1/os-quota-sets/1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '/compute/v2.1/os-quota-sets/6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '/compute/v2.1/os-quota-sets/1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '/compute/v2.1/os-quota-sets/6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } }, None, @@ -1580,8 +1580,8 @@ def test_disable_diagnostics_collect_for_all_servers(aggregator, dd_run_check, i { 'http_error': { 'quota_sets': { - '1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } } }, @@ -1692,7 +1692,7 @@ def test_quota_sets_metrics_excluding_demo_project(aggregator, check, dd_run_che pytest.param( { 'http_error': { - '/compute/v2.1/servers/detail': MockResponse(status_code=500), + '/compute/v2.1/servers/detail': MockHTTPResponse(status_code=500), } }, None, @@ -1705,8 +1705,8 @@ def test_quota_sets_metrics_excluding_demo_project(aggregator, check, dd_run_che { 'http_error': { 'servers': { - '1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } } }, @@ -2143,7 +2143,7 @@ def test_servers_metrics_excluding_dev_servers(aggregator, check, dd_run_check, pytest.param( { 'http_error': { - '/compute/v2.1/flavors/c1': MockResponse(status_code=500), + '/compute/v2.1/flavors/c1': MockHTTPResponse(status_code=500), } }, None, @@ -2156,7 +2156,7 @@ def test_servers_metrics_excluding_dev_servers(aggregator, check, dd_run_check, pytest.param( { 'http_error': { - '/compute/v2.1/flavors/c1': MockResponse(status_code=500), + '/compute/v2.1/flavors/c1': MockHTTPResponse(status_code=500), } }, None, @@ -2171,7 +2171,7 @@ def test_servers_metrics_excluding_dev_servers(aggregator, check, dd_run_check, { 'http_error': { 'flavors': { - 'c1': MockResponse(status_code=500), + 'c1': MockHTTPResponse(status_code=500), } } }, @@ -2186,7 +2186,7 @@ def test_servers_metrics_excluding_dev_servers(aggregator, check, dd_run_check, { 'http_error': { 'flavors': { - 'c1': MockResponse(status_code=500), + 'c1': MockHTTPResponse(status_code=500), } } }, @@ -2307,7 +2307,7 @@ def test_server_disable_flavors( pytest.param( { 'http_error': { - '/compute/v2.1/servers/5102fbbf-7156-48dc-8355-af7ab992266f/diagnostics': MockResponse( + '/compute/v2.1/servers/5102fbbf-7156-48dc-8355-af7ab992266f/diagnostics': MockHTTPResponse( status_code=500 ), } @@ -2322,7 +2322,7 @@ def test_server_disable_flavors( pytest.param( { 'http_error': { - '/compute/v2.1/servers/5102fbbf-7156-48dc-8355-af7ab992266f/diagnostics': MockResponse( + '/compute/v2.1/servers/5102fbbf-7156-48dc-8355-af7ab992266f/diagnostics': MockHTTPResponse( status_code=500 ), } @@ -2339,7 +2339,7 @@ def test_server_disable_flavors( { 'http_error': { 'server_diagnostics': { - '5102fbbf-7156-48dc-8355-af7ab992266f': MockResponse(status_code=500), + '5102fbbf-7156-48dc-8355-af7ab992266f': MockHTTPResponse(status_code=500), } } }, @@ -2354,7 +2354,7 @@ def test_server_disable_flavors( { 'http_error': { 'server_diagnostics': { - '5102fbbf-7156-48dc-8355-af7ab992266f': MockResponse(status_code=500), + '5102fbbf-7156-48dc-8355-af7ab992266f': MockHTTPResponse(status_code=500), } } }, diff --git a/openstack_controller/tests/test_unit_octavia.py b/openstack_controller/tests/test_unit_octavia.py index a685e261fc3a6..f369f34e84080 100644 --- a/openstack_controller/tests/test_unit_octavia.py +++ b/openstack_controller/tests/test_unit_octavia.py @@ -10,7 +10,7 @@ import tests.configs as configs from datadog_checks.base import AgentCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.openstack_controller.api.type import ApiType from tests.common import remove_service_from_catalog @@ -496,12 +496,12 @@ def test_not_in_catalog(aggregator, check, dd_run_check, caplog, mock_http_post, ('mock_http_get', 'instance'), [ pytest.param( - {'http_error': {'/load-balancer': MockResponse(status_code=500)}}, + {'http_error': {'/load-balancer': MockHTTPResponse(status_code=500)}}, configs.REST, id='api rest', ), pytest.param( - {'http_error': {'/load-balancer': MockResponse(status_code=500)}}, + {'http_error': {'/load-balancer': MockHTTPResponse(status_code=500)}}, configs.SDK, id='api sdk', ), @@ -579,7 +579,7 @@ def test_response_time(aggregator, check, dd_run_check, mock_http_get): pytest.param( { 'http_error': { - '/load-balancer/v2/lbaas/loadbalancers': MockResponse(status_code=500), + '/load-balancer/v2/lbaas/loadbalancers': MockHTTPResponse(status_code=500), } }, None, @@ -592,8 +592,8 @@ def test_response_time(aggregator, check, dd_run_check, mock_http_get): { 'http_error': { 'load_balancers': { - '1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } } }, @@ -859,7 +859,7 @@ def test_loadbalancers_pagination( pytest.param( { 'http_error': { - '/load-balancer/v2/lbaas/listeners': MockResponse(status_code=500), + '/load-balancer/v2/lbaas/listeners': MockHTTPResponse(status_code=500), } }, None, @@ -872,8 +872,8 @@ def test_loadbalancers_pagination( { 'http_error': { 'listeners': { - '1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } } }, @@ -1523,7 +1523,7 @@ def test_listeners_pagination( pytest.param( { 'http_error': { - '/load-balancer/v2/lbaas/pools': MockResponse(status_code=500), + '/load-balancer/v2/lbaas/pools': MockHTTPResponse(status_code=500), } }, None, @@ -1536,8 +1536,8 @@ def test_listeners_pagination( { 'http_error': { 'pools': { - '1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } } }, @@ -1707,7 +1707,7 @@ def test_pools_pagination( pytest.param( { 'http_error': { - '/load-balancer/v2/lbaas/pools/d0335b34-3115-4b3b-9a1a-7e2363ebfee3/members': MockResponse( + '/load-balancer/v2/lbaas/pools/d0335b34-3115-4b3b-9a1a-7e2363ebfee3/members': MockHTTPResponse( status_code=500 ), } @@ -1722,7 +1722,7 @@ def test_pools_pagination( { 'http_error': { 'pool_members': { - 'd0335b34-3115-4b3b-9a1a-7e2363ebfee3': MockResponse(status_code=500), + 'd0335b34-3115-4b3b-9a1a-7e2363ebfee3': MockHTTPResponse(status_code=500), } } }, @@ -1882,7 +1882,7 @@ def test_pool_members_metrics(aggregator, check, dd_run_check): pytest.param( { 'http_error': { - '/load-balancer/v2/lbaas/healthmonitors': MockResponse(status_code=500), + '/load-balancer/v2/lbaas/healthmonitors': MockHTTPResponse(status_code=500), } }, None, @@ -1895,8 +1895,8 @@ def test_pool_members_metrics(aggregator, check, dd_run_check): { 'http_error': { 'health_monitors': { - '1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } } }, @@ -2052,7 +2052,7 @@ def test_healthmonitors_metrics(aggregator, check, dd_run_check): pytest.param( { 'http_error': { - '/load-balancer/v2/lbaas/quotas': MockResponse(status_code=500), + '/load-balancer/v2/lbaas/quotas': MockHTTPResponse(status_code=500), } }, None, @@ -2065,8 +2065,8 @@ def test_healthmonitors_metrics(aggregator, check, dd_run_check): { 'http_error': { 'quotas': { - '1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } } }, @@ -2343,7 +2343,7 @@ def test_quotas_metrics(aggregator, check, dd_run_check): pytest.param( { 'http_error': { - '/load-balancer/v2/octavia/amphorae': MockResponse(status_code=500), + '/load-balancer/v2/octavia/amphorae': MockHTTPResponse(status_code=500), } }, None, @@ -2356,8 +2356,8 @@ def test_quotas_metrics(aggregator, check, dd_run_check): { 'http_error': { 'amphorae': { - '1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } } }, diff --git a/openstack_controller/tests/test_unit_swift.py b/openstack_controller/tests/test_unit_swift.py index e58f9c17ed862..d1ce57f296cd6 100644 --- a/openstack_controller/tests/test_unit_swift.py +++ b/openstack_controller/tests/test_unit_swift.py @@ -11,7 +11,7 @@ import tests.configs as configs from datadog_checks.base import AgentCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.openstack_controller.api.type import ApiType from tests.common import remove_service_from_catalog from tests.metrics import ( @@ -106,8 +106,8 @@ def test_not_in_catalog(aggregator, check, dd_run_check, caplog, mock_http_post, pytest.param( { 'http_error': { - '/v1/AUTH_1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '/v1/AUTH_6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '/v1/AUTH_1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '/v1/AUTH_6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } }, configs.REST, @@ -116,8 +116,8 @@ def test_not_in_catalog(aggregator, check, dd_run_check, caplog, mock_http_post, pytest.param( { 'http_error': { - '/v1/AUTH_1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '/v1/AUTH_6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '/v1/AUTH_1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '/v1/AUTH_6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } }, configs.SDK, @@ -197,8 +197,8 @@ def test_response_time(aggregator, check, dd_run_check, mock_http_get): pytest.param( { 'http_error': { - '/v1/AUTH_1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '/v1/AUTH_6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '/v1/AUTH_1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '/v1/AUTH_6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), }, }, None, @@ -211,8 +211,8 @@ def test_response_time(aggregator, check, dd_run_check, mock_http_get): { 'http_error': { 'containers': { - '1e6e233e637d4d55a50a62b63398ad15': MockResponse(status_code=500), - '6e39099cccde4f809b003d9e0dd09304': MockResponse(status_code=500), + '1e6e233e637d4d55a50a62b63398ad15': MockHTTPResponse(status_code=500), + '6e39099cccde4f809b003d9e0dd09304': MockHTTPResponse(status_code=500), } } }, diff --git a/php_fpm/tests/test_unit.py b/php_fpm/tests/test_unit.py index b0f46d613c077..c70af599f53a4 100644 --- a/php_fpm/tests/test_unit.py +++ b/php_fpm/tests/test_unit.py @@ -33,50 +33,44 @@ def test_bad_ping(aggregator, dd_run_check): aggregator.all_metrics_asserted() -def test_should_not_retry(check, instance): +def test_should_not_retry(check, instance, mock_http): """ backoff only works when response code is 503, otherwise the error should bubble up """ - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - r.get.side_effect = FooException("Generic http error here") - with pytest.raises(FooException): - check._process_status(instance['status_url'], [], None, False) + mock_http.get.side_effect = FooException("Generic http error here") + with pytest.raises(FooException): + check._process_status(instance['status_url'], [], None, False) -def test_should_bail_out(check, instance): +def test_should_bail_out(check, instance, mock_http): """ backoff should give up after 3 attempts """ - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - attrs = {'raise_for_status.side_effect': FooException()} - r.get.side_effect = [ - mock.MagicMock(status_code=503, **attrs), - mock.MagicMock(status_code=503, **attrs), - mock.MagicMock(status_code=503, **attrs), - mock.MagicMock(status_code=200), - ] - with pytest.raises(FooException): - check._process_status(instance['status_url'], [], None, False) - - -def test_backoff_success(check, instance, aggregator, payload): + attrs = {'raise_for_status.side_effect': FooException()} + mock_http.get.side_effect = [ + mock.MagicMock(status_code=503, **attrs), + mock.MagicMock(status_code=503, **attrs), + mock.MagicMock(status_code=503, **attrs), + mock.MagicMock(status_code=200), + ] + with pytest.raises(FooException): + check._process_status(instance['status_url'], [], None, False) + + +def test_backoff_success(check, instance, aggregator, payload, mock_http): """ Success after 2 failed attempts """ instance['ping_url'] = None - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - attrs = {'json.return_value': payload} - r.get.side_effect = [ - mock.MagicMock(status_code=503), - mock.MagicMock(status_code=503), - mock.MagicMock(status_code=200, **attrs), - ] - pool_name = check._process_status(instance['status_url'], [], None, False) - assert pool_name == 'www' + attrs = {'json.return_value': payload} + mock_http.get.side_effect = [ + mock.MagicMock(status_code=503), + mock.MagicMock(status_code=503), + mock.MagicMock(status_code=200, **attrs), + ] + pool_name = check._process_status(instance['status_url'], [], None, False) + assert pool_name == 'www' @pytest.mark.parametrize( @@ -101,25 +95,14 @@ def test_backoff_success(check, instance, aggregator, payload): ), ], ) -def test_config(test_case, extra_config, expected_http_kwargs, dd_run_check): +def test_config(test_case, extra_config, expected_http_kwargs): instance = {'ping_url': 'http://foo:9001/ping'} instance.update(extra_config) check = PHPFPMCheck('php_fpm', {}, instances=[instance]) - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - r.get.return_value = mock.MagicMock(status_code=200) - - dd_run_check(check) - - http_kwargs = { - 'auth': mock.ANY, - 'cert': mock.ANY, - 'headers': mock.ANY, - 'proxies': mock.ANY, - 'timeout': mock.ANY, - 'verify': mock.ANY, - 'allow_redirects': mock.ANY, - } - http_kwargs.update(expected_http_kwargs) - r.get.assert_called_with('http://foo:9001/ping', **http_kwargs) + for key, value in expected_http_kwargs.items(): + if key == 'headers': + for h_key, h_value in value.items(): + assert check.http.get_header(h_key) == h_value + else: + assert check.http.options[key] == value diff --git a/powerdns_recursor/tests/test_metadata.py b/powerdns_recursor/tests/test_metadata.py index 36ff99eebd981..e5f82358b469d 100644 --- a/powerdns_recursor/tests/test_metadata.py +++ b/powerdns_recursor/tests/test_metadata.py @@ -6,7 +6,7 @@ import pytest import requests -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.powerdns_recursor import PowerDNSRecursorCheck from . import common @@ -30,13 +30,13 @@ def test_metadata_unit(datadog_agent): check.log.debug.assert_called_with('Error collecting PowerDNS Recursor version: %s', '') datadog_agent.reset() - with mock.patch('requests.Session.get', return_value=MockResponse()): + with mock.patch('requests.Session.get', return_value=MockHTTPResponse()): check._collect_metadata(config_obj) datadog_agent.assert_metadata_count(0) check.log.debug.assert_called_with("Couldn't find the PowerDNS Recursor Server version header") datadog_agent.reset() - with mock.patch('requests.Session.get', return_value=MockResponse(headers={'Server': 'wrong_stuff'})): + with mock.patch('requests.Session.get', return_value=MockHTTPResponse(headers={'Server': 'wrong_stuff'})): check._collect_metadata(config_obj) datadog_agent.assert_metadata_count(0) check.log.debug.assert_called_with( diff --git a/prometheus/tests/conftest.py b/prometheus/tests/conftest.py index 853e8f5e99efd..de8ddb13c8174 100644 --- a/prometheus/tests/conftest.py +++ b/prometheus/tests/conftest.py @@ -4,11 +4,11 @@ import os -import mock import pytest from prometheus_client import CollectorRegistry, Counter, Gauge, generate_latest from datadog_checks.base import ensure_unicode +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import docker_run, get_docker_hostname HERE = os.path.dirname(os.path.abspath(__file__)) @@ -50,7 +50,7 @@ def dd_environment(e2e_instance): @pytest.fixture -def poll_mock(): +def poll_mock(mock_prometheus_http): registry = CollectorRegistry() # pylint: disable=E1123,E1101 g1 = Gauge('metric1', 'processor usage', ['matched_label', 'node', 'flavor'], registry=registry) @@ -62,13 +62,6 @@ def poll_mock(): g3 = Gauge('metric3', 'memory usage', ['matched_label', 'node', 'timestamp'], registry=registry) g3.labels(matched_label="foobar", node="host2", timestamp="456").set(float('inf')) - poll_mock_patch = mock.patch( - 'requests.Session.get', - return_value=mock.MagicMock( - status_code=200, - iter_lines=lambda **kwargs: ensure_unicode(generate_latest(registry)).split("\n"), - headers={'Content-Type': "text/plain"}, - ), - ) - with poll_mock_patch: - yield + content = ensure_unicode(generate_latest(registry)) + mock_prometheus_http.get.return_value = MockHTTPResponse(content=content, headers={'Content-Type': 'text/plain'}) + yield diff --git a/proxmox/datadog_checks/proxmox/check.py b/proxmox/datadog_checks/proxmox/check.py index 60c91e2361e43..463860f938151 100644 --- a/proxmox/datadog_checks/proxmox/check.py +++ b/proxmox/datadog_checks/proxmox/check.py @@ -3,10 +3,19 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import re +from json import JSONDecodeError as StdJSONDecodeError from requests.exceptions import ConnectionError, HTTPError, InvalidURL, JSONDecodeError, Timeout from datadog_checks.base import AgentCheck +from datadog_checks.base.utils.http_exceptions import ( + HTTPConnectionError as AgentHTTPConnectionError, +) +from datadog_checks.base.utils.http_exceptions import ( + HTTPInvalidURLError, + HTTPStatusError, + HTTPTimeoutError, +) from datadog_checks.base.utils.time import get_current_datetime, get_timestamp from datadog_checks.proxmox.config_models import ConfigMixin @@ -146,7 +155,19 @@ def _get_vm_hostname(self, vm_id, vm_name, node): hostname_response = self.http.get(url) hostname_json = hostname_response.json() hostname = hostname_json.get("data", {}).get("result", {}).get("host-name", vm_name) - except (HTTPError, InvalidURL, ConnectionError, Timeout, JSONDecodeError, AttributeError) as e: + except ( + HTTPError, + InvalidURL, + ConnectionError, + Timeout, + JSONDecodeError, + StdJSONDecodeError, + AttributeError, + HTTPStatusError, + HTTPInvalidURLError, + AgentHTTPConnectionError, + HTTPTimeoutError, + ) as e: self.log.info( "Failed to get hostname for vm %s on node %s; endpoint: %s; %s", vm_id, @@ -386,7 +407,18 @@ def check(self, _): self.set_metadata('version', version) self.gauge("api.up", 1, tags=self.base_tags + ['proxmox_status:up']) - except (HTTPError, InvalidURL, ConnectionError, Timeout, JSONDecodeError) as e: + except ( + HTTPError, + InvalidURL, + ConnectionError, + Timeout, + JSONDecodeError, + StdJSONDecodeError, + HTTPStatusError, + HTTPInvalidURLError, + AgentHTTPConnectionError, + HTTPTimeoutError, + ) as e: self.log.error( "Encountered an Exception when hitting the Proxmox API %s: %s", self.config.proxmox_server, e ) diff --git a/proxmox/tests/conftest.py b/proxmox/tests/conftest.py index 6d0411b86f136..d54d7b7f0cfd1 100644 --- a/proxmox/tests/conftest.py +++ b/proxmox/tests/conftest.py @@ -7,10 +7,10 @@ from pathlib import Path from urllib.parse import urlparse -import mock import pytest -import requests +from datadog_checks.base.utils.http_exceptions import HTTPStatusError +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev.fs import get_here from .common import INSTANCE @@ -83,17 +83,13 @@ def call(method, url, file='response', headers=None, params=None): response = mock_responses(method, url, file=file, headers=headers, params=params) if response is not None: return response - http_response = requests.models.Response() - http_response.status_code = 404 - http_response.reason = "Not Found" - http_response.url = url - raise requests.exceptions.HTTPError(response=http_response) + raise HTTPStatusError('404 Client Error', response=MockHTTPResponse(status_code=404, url=url)) yield call @pytest.fixture -def mock_http_get(request, monkeypatch, mock_http_call): +def mock_http_get(request, mock_http, mock_http_call): param = request.param if hasattr(request, 'param') and request.param is not None else {} http_error = param.pop('http_error', {}) @@ -102,12 +98,10 @@ def get(url, *args, **kwargs): url = get_url_path(url) if http_error and url in http_error: return http_error[url] - mock_status_code = mock.MagicMock(return_value=200) headers = kwargs.get('headers') params = kwargs.get('params') - mock_json = mock.MagicMock(return_value=mock_http_call(method, url, headers=headers, params=params)) - return mock.MagicMock(json=mock_json, status_code=mock_status_code) + json_data = mock_http_call(method, url, headers=headers, params=params) + return MockHTTPResponse(json_data=json_data) - mock_get = mock.MagicMock(side_effect=get) - monkeypatch.setattr('requests.Session.get', mock_get) - return mock_get + mock_http.get.side_effect = get + return mock_http.get diff --git a/proxmox/tests/test_unit.py b/proxmox/tests/test_unit.py index 8bec8ae7d26b0..23802f60b76f5 100644 --- a/proxmox/tests/test_unit.py +++ b/proxmox/tests/test_unit.py @@ -9,7 +9,7 @@ import mock import pytest -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.proxmox import ProxmoxCheck @@ -58,11 +58,11 @@ def test_no_tags(dd_run_check, aggregator, instance): ('mock_http_get'), [ pytest.param( - {'http_error': {'/api2/json/version': MockResponse(status_code=500)}}, + {'http_error': {'/api2/json/version': MockHTTPResponse(status_code=500)}}, id='500', ), pytest.param( - {'http_error': {'/api2/json/version': MockResponse(status_code=404)}}, + {'http_error': {'/api2/json/version': MockHTTPResponse(status_code=404)}}, id='404', ), ], @@ -71,7 +71,7 @@ def test_no_tags(dd_run_check, aggregator, instance): @pytest.mark.usefixtures('mock_http_get') def test_api_down(dd_run_check, aggregator, instance): check = ProxmoxCheck('proxmox', {}, [instance]) - with pytest.raises(Exception, match=r'requests.exceptions.HTTPError'): + with pytest.raises(Exception, match=r'HTTPStatusError'): dd_run_check(check) aggregator.assert_metric( @@ -270,7 +270,7 @@ def test_resource_up_metrics(dd_run_check, aggregator, instance): pytest.param( { 'http_error': { - '/api2/json/nodes/ip-122-82-3-112/qemu/100/agent/get-host-name': MockResponse(status_code=500) + '/api2/json/nodes/ip-122-82-3-112/qemu/100/agent/get-host-name': MockHTTPResponse(status_code=500) } }, id='500', @@ -278,7 +278,7 @@ def test_resource_up_metrics(dd_run_check, aggregator, instance): pytest.param( { 'http_error': { - '/api2/json/nodes/ip-122-82-3-112/qemu/100/agent/get-host-name': MockResponse(status_code=404) + '/api2/json/nodes/ip-122-82-3-112/qemu/100/agent/get-host-name': MockHTTPResponse(status_code=404) } }, id='404', @@ -286,7 +286,7 @@ def test_resource_up_metrics(dd_run_check, aggregator, instance): pytest.param( { 'http_error': { - '/api2/json/nodes/ip-122-82-3-112/qemu/100/agent/get-host-name': MockResponse( + '/api2/json/nodes/ip-122-82-3-112/qemu/100/agent/get-host-name': MockHTTPResponse( status_code=200, json_data={"data": None, "message": "No QEMU guest agent configured\n"} ) } @@ -489,7 +489,7 @@ def test_perf_metrics(dd_run_check, aggregator, instance): ('mock_http_get'), [ pytest.param( - {'http_error': {'/api2/json/cluster/metrics/export': MockResponse(status_code=501)}}, + {'http_error': {'/api2/json/cluster/metrics/export': MockHTTPResponse(status_code=501)}}, id='501', ), ], diff --git a/quarkus/tests/test_unit.py b/quarkus/tests/test_unit.py index 9f96137aa2e8c..b1df052419d7d 100644 --- a/quarkus/tests/test_unit.py +++ b/quarkus/tests/test_unit.py @@ -83,7 +83,7 @@ def test_emits_critical_service_check_when_service_is_down(dd_run_check, aggrega mock_http_response(status_code=404) check = QuarkusCheck('quarkus', {}, [instance]) # When - with pytest.raises(Exception, match="requests.exceptions.HTTPError"): + with pytest.raises(Exception, match="HTTPStatusError"): dd_run_check(check) # Then aggregator.assert_service_check('quarkus.openmetrics.health', QuarkusCheck.CRITICAL) diff --git a/rabbitmq/tests/test_openmetrics.py b/rabbitmq/tests/test_openmetrics.py index ce916324e5891..2000235d73d61 100644 --- a/rabbitmq/tests/test_openmetrics.py +++ b/rabbitmq/tests/test_openmetrics.py @@ -10,7 +10,7 @@ from datadog_checks.base.errors import ConfigurationError from datadog_checks.base.types import ServiceCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.rabbitmq import RabbitMQ @@ -203,7 +203,7 @@ def mock_http_responses(url, **_params): ): 'detailed-only-metrics.txt', }[parsed.path + (f"?{parsed.query}" if parsed.query else "")] with open(os.path.join(OM_RESPONSE_FIXTURES, fname)) as fh: - return MockResponse(content=fh.read()) + return MockHTTPResponse(content=fh.read()) @pytest.mark.parametrize( @@ -352,6 +352,6 @@ def test_config(prom_plugin_settings, err): def test_service_check_critical(aggregator, dd_run_check, mock_http_response): mock_http_response(status_code=404) check = _rmq_om_check({'url': 'http://fail'}) - with pytest.raises(Exception, match="requests.exceptions.HTTPError"): + with pytest.raises(Exception, match="HTTPStatusError"): dd_run_check(check) aggregator.assert_service_check('rabbitmq.openmetrics.health', status=check.CRITICAL) diff --git a/rabbitmq/tests/test_unit.py b/rabbitmq/tests/test_unit.py index 7da42ea0dc88e..7b61718718b4f 100644 --- a/rabbitmq/tests/test_unit.py +++ b/rabbitmq/tests/test_unit.py @@ -24,14 +24,12 @@ pytestmark = [pytest.mark.unit, common.requires_management] -def test__get_data(check): - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - r.get.side_effect = [requests.exceptions.HTTPError, ValueError] - with pytest.raises(RabbitMQException): - check._get_data('') - with pytest.raises(RabbitMQException): - check._get_data('') +def test__get_data(check, mock_http): + mock_http.get.side_effect = [requests.exceptions.HTTPError, ValueError] + with pytest.raises(RabbitMQException): + check._get_data('') + with pytest.raises(RabbitMQException): + check._get_data('') def test_status_check(check, aggregator): @@ -136,24 +134,8 @@ def test_config(check, test_case, extra_config, expected_http_kwargs): config.update(extra_config) check = RabbitMQ('rabbitmq', {}, instances=[config]) - r = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): - r.get.return_value = mock.MagicMock(status_code=200) - - check.check(config) - - http_wargs = { - 'auth': mock.ANY, - 'cert': mock.ANY, - 'headers': mock.ANY, - 'proxies': mock.ANY, - 'timeout': mock.ANY, - 'verify': mock.ANY, - 'allow_redirects': mock.ANY, - } - http_wargs.update(expected_http_kwargs) - - r.get.assert_called_with('http://localhost:15672/api/connections', **http_wargs) + for key, value in expected_http_kwargs.items(): + assert check.http.options[key] == value def test_nodes(aggregator, check): diff --git a/ray/tests/common.py b/ray/tests/common.py index 19b0c9f911678..d51a4faaf0061 100644 --- a/ray/tests/common.py +++ b/ray/tests/common.py @@ -4,8 +4,8 @@ import os +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import get_docker_hostname, get_here -from datadog_checks.dev.http import MockResponse HERE = get_here() @@ -333,4 +333,4 @@ def mock_http_responses(url, **_params): raise Exception(f"url `{url}` not registered") with open(os.path.join(HERE, 'fixtures', metrics_file)) as f: - return MockResponse(content=f.read()) + return MockHTTPResponse(content=f.read()) diff --git a/ray/tests/test_unit.py b/ray/tests/test_unit.py index f2e7725b5287f..4be5c8aa13ba9 100644 --- a/ray/tests/test_unit.py +++ b/ray/tests/test_unit.py @@ -16,8 +16,8 @@ pytest.param(MOCKED_WORKER_INSTANCE, WORKER_METRICS, id='worker'), ], ) -def test_check(dd_run_check, aggregator, mocker, check, instance, metrics): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_check(dd_run_check, aggregator, mock_http, check, instance, metrics): + mock_http.get.side_effect = mock_http_responses dd_run_check(check(instance)) for expected_metric in metrics: @@ -30,10 +30,10 @@ def test_check(dd_run_check, aggregator, mocker, check, instance, metrics): assert len(aggregator.service_check_names) == 1 -def test_invalid_url(dd_run_check, aggregator, check, mocked_head_instance, mocker): +def test_invalid_url(dd_run_check, aggregator, check, mocked_head_instance, mock_http): mocked_head_instance["openmetrics_endpoint"] = "http://unknowwn" - mocker.patch("requests.Session.get", wraps=mock_http_responses) + mock_http.get.side_effect = mock_http_responses with pytest.raises(Exception): dd_run_check(check(mocked_head_instance)) diff --git a/scylla/tests/conftest.py b/scylla/tests/conftest.py index 5b28e13609d02..52338e368df3f 100644 --- a/scylla/tests/conftest.py +++ b/scylla/tests/conftest.py @@ -3,9 +3,9 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import os -import mock import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import docker_run, get_docker_hostname, get_here HERE = get_here() @@ -34,7 +34,7 @@ def instance(): @pytest.fixture() -def mock_db_data(): +def mock_db_data(mock_openmetrics_http): if os.environ['SCYLLA_VERSION'].startswith('5.'): f_name = os.path.join(os.path.dirname(__file__), 'fixtures', 'scylla_5_metrics.txt') elif os.environ['SCYLLA_VERSION'].startswith('3.3'): @@ -44,12 +44,5 @@ def mock_db_data(): with open(f_name, 'r') as f: text_data = f.read() - with mock.patch( - 'requests.Session.get', - return_value=mock.MagicMock( - status_code=200, - iter_lines=lambda **kwargs: text_data.split("\n"), - headers={'Content-Type': "text/plain"}, - ), - ): - yield + mock_openmetrics_http.get.return_value = MockHTTPResponse(content=text_data, headers={'Content-Type': 'text/plain'}) + yield diff --git a/sonarqube/tests/test_unit.py b/sonarqube/tests/test_unit.py index 3582574044079..a469dfafe6c2b 100644 --- a/sonarqube/tests/test_unit.py +++ b/sonarqube/tests/test_unit.py @@ -6,7 +6,7 @@ import mock import requests -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from .common import HERE from .metrics import WEB_METRICS @@ -27,10 +27,10 @@ def test_service_check_critical(aggregator, dd_run_check, sonarqube_check, web_i def test_service_check_ok_version_empty(aggregator, dd_run_check, sonarqube_check, web_instance): with mock.patch('datadog_checks.sonarqube.check.SonarqubeCheck.http') as mock_http: mock_http.get.side_effect = [ - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'version_empty')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'version_empty')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), ] check = sonarqube_check(web_instance) global_tags = ['endpoint:{}'.format(web_instance['web_endpoint'])] @@ -44,10 +44,10 @@ def test_service_check_ok_version_empty(aggregator, dd_run_check, sonarqube_chec def test_service_check_ok(aggregator, dd_run_check, sonarqube_check, web_instance): with mock.patch('datadog_checks.sonarqube.check.SonarqubeCheck.http') as mock_http: mock_http.get.side_effect = [ - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), ] check = sonarqube_check(web_instance) global_tags = ['endpoint:{}'.format(web_instance['web_endpoint'])] @@ -61,10 +61,10 @@ def test_service_check_ok(aggregator, dd_run_check, sonarqube_check, web_instanc def test_service_check_ok_and_config_none(aggregator, dd_run_check, sonarqube_check, web_instance_config_none): with mock.patch('datadog_checks.sonarqube.check.SonarqubeCheck.http') as mock_http: mock_http.get.side_effect = [ - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), ] check = sonarqube_check(web_instance_config_none) global_tags = ['endpoint:{}'.format(web_instance_config_none['web_endpoint'])] @@ -80,10 +80,10 @@ def test_service_check_ok_and_exclude_metrics( ): with mock.patch('datadog_checks.sonarqube.check.SonarqubeCheck.http') as mock_http: mock_http.get.side_effect = [ - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), ] check = sonarqube_check(web_instance_and_exclude_metrics) global_tags = ['endpoint:{}'.format(web_instance_and_exclude_metrics['web_endpoint'])] @@ -99,11 +99,11 @@ def test_service_check_ok_with_autodiscovery_only_include( ): with mock.patch('datadog_checks.sonarqube.check.SonarqubeCheck.http') as mock_http: mock_http.get.side_effect = [ - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'components_search')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'components_search')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), ] check = sonarqube_check(web_instance_with_autodiscovery_only_include) global_tags = ['endpoint:{}'.format(web_instance_with_autodiscovery_only_include['web_endpoint'])] @@ -119,10 +119,10 @@ def test_service_check_ok_with_autodiscovery_only_include_metrics_empty( ): with mock.patch('datadog_checks.sonarqube.check.SonarqubeCheck.http') as mock_http: mock_http.get.side_effect = [ - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_empty')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'components_search')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component_empty')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_empty')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'components_search')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component_empty')), ] check = sonarqube_check(web_instance_with_autodiscovery_only_include) global_tags = ['endpoint:{}'.format(web_instance_with_autodiscovery_only_include['web_endpoint'])] @@ -138,12 +138,12 @@ def test_service_check_ok_with_autodiscovery_include_all_and_exclude( ): with mock.patch('datadog_checks.sonarqube.check.SonarqubeCheck.http') as mock_http: mock_http.get.side_effect = [ - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'components_search_with_tmp_p1')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'components_search_with_tmp_p2')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'components_search_with_tmp_p1')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'components_search_with_tmp_p2')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), ] check = sonarqube_check(web_instance_with_autodiscovery_include_all_and_exclude) global_tags = ['endpoint:{}'.format(web_instance_with_autodiscovery_include_all_and_exclude['web_endpoint'])] @@ -162,12 +162,12 @@ def test_service_check_ok_with_autodiscovery_include_all_and_limit( ): with mock.patch('datadog_checks.sonarqube.check.SonarqubeCheck.http') as mock_http: mock_http.get.side_effect = [ - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'components_search_with_tmp_p1')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'components_search_with_tmp_p2')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'components_search_with_tmp_p1')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'components_search_with_tmp_p2')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), ] check = sonarqube_check(web_instance_with_autodiscovery_include_all_and_limit) global_tags = ['endpoint:{}'.format(web_instance_with_autodiscovery_include_all_and_limit['web_endpoint'])] @@ -184,12 +184,12 @@ def test_service_check_ok_with_component_and_autodiscovery( ): with mock.patch('datadog_checks.sonarqube.check.SonarqubeCheck.http') as mock_http: mock_http.get.side_effect = [ - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'components_search')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'components_search')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), ] check = sonarqube_check(web_instance_with_component_and_autodiscovery) global_tags = ['endpoint:{}'.format(web_instance_with_component_and_autodiscovery['web_endpoint'])] @@ -206,11 +206,11 @@ def test_service_check_ok_with_autodiscovery_config_none( ): with mock.patch('datadog_checks.sonarqube.check.SonarqubeCheck.http') as mock_http: mock_http.get.side_effect = [ - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'components_search')), - MockResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'version')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_1')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'metrics_search_p_2')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'components_search')), + MockHTTPResponse(file_path=os.path.join(HERE, 'api_responses', 'measures_component')), ] check = sonarqube_check(web_instance_with_autodiscovery_config_none) global_tags = ['endpoint:{}'.format(web_instance_with_autodiscovery_config_none['web_endpoint'])] diff --git a/sonatype_nexus/datadog_checks/sonatype_nexus/errors.py b/sonatype_nexus/datadog_checks/sonatype_nexus/errors.py index 785ec38d9a6bc..322eee7eed673 100644 --- a/sonatype_nexus/datadog_checks/sonatype_nexus/errors.py +++ b/sonatype_nexus/datadog_checks/sonatype_nexus/errors.py @@ -6,6 +6,8 @@ import requests +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError + class APIError(Exception): default_message = "An unknown API error occurred." @@ -80,7 +82,7 @@ def wrapper(self, *args: Any, **kwargs: Any) -> Any: return response - except requests.exceptions.Timeout as ex: + except (requests.exceptions.Timeout, HTTPTimeoutError) as ex: self.log.error("TimeoutError: Timeout while requesting data from the API.") raise APIError("Timeout while requesting data from the API.") from ex diff --git a/spark/datadog_checks/spark/spark.py b/spark/datadog_checks/spark/spark.py index ef66975b4fdc9..222423e79edb8 100644 --- a/spark/datadog_checks/spark/spark.py +++ b/spark/datadog_checks/spark/spark.py @@ -1,6 +1,7 @@ # (C) Datadog, Inc. 2018-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) +from json import JSONDecodeError as StdJSONDecodeError from urllib.parse import urljoin, urlparse, urlsplit, urlunsplit from bs4 import BeautifulSoup @@ -8,6 +9,14 @@ from simplejson import JSONDecodeError from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative +from datadog_checks.base.utils.http_exceptions import ( + HTTPConnectionError as AgentHTTPConnectionError, +) +from datadog_checks.base.utils.http_exceptions import ( + HTTPInvalidURLError, + HTTPStatusError, + HTTPTimeoutError, +) from .constants import ( APPLICATION_STATES, @@ -442,12 +451,12 @@ def _describe_app(self, property, running_apps, addl_tags): ) if response is None: continue - except HTTPError: + except (HTTPError, HTTPStatusError): self.log.debug("Got an error collecting %s", property, exc_info=True) continue try: yield (response.json(), [f'app_name:{app_name}'] + addl_tags) - except JSONDecodeError: + except (JSONDecodeError, StdJSONDecodeError): self.log.debug( 'Skipping metrics for %s from app %s due to unparsable JSON payload.', property, app_name ) @@ -583,7 +592,7 @@ def _spark_structured_streams_metrics(self, running_apps, addl_tags): ) self._set_metric(metric_name, submission_type, value, tags=tags) - except HTTPError as e: + except (HTTPError, HTTPStatusError) as e: self.log.debug("No structured streaming metrics to collect from app %s. %s", app_name, e, exc_info=True) pass @@ -666,7 +675,7 @@ def _rest_request(self, url, object_path, service_name, tags, *args, **kwargs): response = self.http.get(proxy_redirect_url, cookies=self.proxy_redirect_cookies) response.raise_for_status() - except Timeout as e: + except (Timeout, HTTPTimeoutError) as e: self.service_check( service_name, AgentCheck.CRITICAL, @@ -675,8 +684,17 @@ def _rest_request(self, url, object_path, service_name, tags, *args, **kwargs): ) raise - except (HTTPError, InvalidURL, ConnectionError) as e: - if isinstance(e, ConnectionError) and self._should_suppress_connection_error(e, tags): + except ( + HTTPError, + InvalidURL, + ConnectionError, + HTTPStatusError, + HTTPInvalidURLError, + AgentHTTPConnectionError, + ) as e: + if isinstance(e, (ConnectionError, AgentHTTPConnectionError)) and self._should_suppress_connection_error( + e, tags + ): return None self.service_check( @@ -706,7 +724,7 @@ def _rest_request_to_json(self, address, object_path, service_name, tags, *args, try: response_json = response.json() - except JSONDecodeError as e: + except (JSONDecodeError, StdJSONDecodeError) as e: response_text = response.text.strip() if response_text and 'spark is starting up' in response_text.lower(): # Handle startup message based on retry configuration diff --git a/spark/tests/test_spark.py b/spark/tests/test_spark.py index 43bae42035523..6735156576c36 100644 --- a/spark/tests/test_spark.py +++ b/spark/tests/test_spark.py @@ -14,7 +14,7 @@ import urllib3 from requests import ConnectionError, RequestException -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.spark import SparkCheck @@ -161,13 +161,13 @@ def __hash__(self): CERTIFICATE_DIR = os.path.join(os.path.dirname(__file__), 'certificate') DEFAULT_RESPONSES = { - '/jobs': MockResponse(file_path=os.path.join(FIXTURE_DIR, 'job_metrics')), - '/stages': MockResponse(file_path=os.path.join(FIXTURE_DIR, 'stage_metrics')), - '/executors': MockResponse(file_path=os.path.join(FIXTURE_DIR, 'executor_metrics')), - '/storage/rdd': MockResponse(file_path=os.path.join(FIXTURE_DIR, 'rdd_metrics')), - '/streaming/statistics': MockResponse(file_path=os.path.join(FIXTURE_DIR, 'streaming_statistics')), - '/metrics/json': MockResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')), - '/api/v1/version': MockResponse(file_path=os.path.join(FIXTURE_DIR, 'version')), + '/jobs': MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'job_metrics')), + '/stages': MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'stage_metrics')), + '/executors': MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'executor_metrics')), + '/storage/rdd': MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'rdd_metrics')), + '/streaming/statistics': MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'streaming_statistics')), + '/metrics/json': MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')), + '/api/v1/version': MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'version')), } @@ -182,9 +182,9 @@ def yarn_requests_get_mock(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == YARN_APP_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'yarn_apps')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'yarn_apps')) elif arg_url == YARN_SPARK_APP_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_apps')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_apps')) return get_default_mock(url) @@ -203,100 +203,100 @@ def mesos_requests_get_mock(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == MESOS_APP_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'mesos_apps')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'mesos_apps')) elif arg_url == MESOS_SPARK_APP_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_apps')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_apps')) elif arg_url == MESOS_SPARK_JOB_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'job_metrics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'job_metrics')) elif arg_url == MESOS_SPARK_STAGE_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'stage_metrics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'stage_metrics')) elif arg_url == MESOS_SPARK_EXECUTOR_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'executor_metrics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'executor_metrics')) elif arg_url == MESOS_SPARK_RDD_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'rdd_metrics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'rdd_metrics')) elif arg_url == MESOS_SPARK_STREAMING_STATISTICS_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'streaming_statistics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'streaming_statistics')) elif arg_url == MESOS_SPARK_METRICS_JSON_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')) def driver_requests_get_mock(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == DRIVER_APP_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_apps')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_apps')) elif arg_url == DRIVER_SPARK_APP_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_apps')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_apps')) elif arg_url == DRIVER_SPARK_JOB_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'job_metrics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'job_metrics')) elif arg_url == DRIVER_SPARK_STAGE_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'stage_metrics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'stage_metrics')) elif arg_url == DRIVER_SPARK_EXECUTOR_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'executor_metrics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'executor_metrics')) elif arg_url == DRIVER_SPARK_RDD_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'rdd_metrics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'rdd_metrics')) elif arg_url == DRIVER_SPARK_STREAMING_STATISTICS_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'streaming_statistics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'streaming_statistics')) elif arg_url == DRIVER_SPARK_METRICS_JSON_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')) def standalone_requests_get_mock(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == STANDALONE_APP_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_standalone_apps')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_standalone_apps')) elif arg_url == STANDALONE_APP_HTML_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_standalone_app')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_standalone_app')) elif arg_url == STANDALONE_SPARK_APP_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_apps')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_apps')) elif arg_url == STANDALONE_SPARK_JOB_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'job_metrics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'job_metrics')) elif arg_url == STANDALONE_SPARK_STAGE_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'stage_metrics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'stage_metrics')) elif arg_url == STANDALONE_SPARK_EXECUTOR_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'executor_metrics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'executor_metrics')) elif arg_url == STANDALONE_SPARK_RDD_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'rdd_metrics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'rdd_metrics')) elif arg_url == STANDALONE_SPARK_STREAMING_STATISTICS_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'streaming_statistics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'streaming_statistics')) elif arg_url == STANDALONE_SPARK_METRICS_JSON_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')) def standalone_requests_pre20_get_mock(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == STANDALONE_APP_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_standalone_apps')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_standalone_apps')) elif arg_url == STANDALONE_APP_HTML_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_standalone_app')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_standalone_app')) elif arg_url == STANDALONE_SPARK_APP_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_apps_pre20')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'spark_apps_pre20')) elif arg_url == STANDALONE_SPARK_JOB_URL: - return MockResponse(status_code=404) + return MockHTTPResponse(status_code=404) elif arg_url == STANDALONE_SPARK_STAGE_URL: - return MockResponse(status_code=404) + return MockHTTPResponse(status_code=404) elif arg_url == STANDALONE_SPARK_EXECUTOR_URL: - return MockResponse(status_code=404) + return MockHTTPResponse(status_code=404) elif arg_url == STANDALONE_SPARK_RDD_URL: - return MockResponse(status_code=404) + return MockHTTPResponse(status_code=404) elif arg_url == STANDALONE_SPARK_STREAMING_STATISTICS_URL: - return MockResponse(status_code=404) + return MockHTTPResponse(status_code=404) elif arg_url == STANDALONE_SPARK_JOB_URL_PRE20: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'job_metrics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'job_metrics')) elif arg_url == STANDALONE_SPARK_STAGE_URL_PRE20: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'stage_metrics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'stage_metrics')) elif arg_url == STANDALONE_SPARK_EXECUTOR_URL_PRE20: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'executor_metrics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'executor_metrics')) elif arg_url == STANDALONE_SPARK_RDD_URL_PRE20: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'rdd_metrics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'rdd_metrics')) elif arg_url == STANDALONE_SPARK_STREAMING_STATISTICS_URL_PRE20: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'streaming_statistics')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'streaming_statistics')) elif arg_url == VERSION_PATH: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'version')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'version')) elif arg_url == STANDALONE_SPARK_METRICS_JSON_URL_PRE20: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')) + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')) def proxy_with_warning_page_mock(session, url, *args, **kwargs): @@ -314,7 +314,7 @@ def proxy_with_warning_page_mock(session, url, *args, **kwargs): url_parts[4] = urlencode(query) with open(os.path.join(FIXTURE_DIR, 'html_warning_page'), 'r') as f: body = f.read().replace('$REDIRECT_URL$', urlunparse(url_parts)) - return MockResponse(body, cookies={'proxy_cookie': 'foo'}) + return MockHTTPResponse(body, cookies={'proxy_cookie': 'foo'}) CHECK_NAME = 'spark' @@ -1190,10 +1190,10 @@ def test_do_not_crash_on_version_collection_failure(): @pytest.mark.unit def test_driver_startup_message_default_retries(aggregator, caplog): """Default behavior (startup_wait_retries=3): retry 3 times then raise.""" - from simplejson import JSONDecodeError + from json import JSONDecodeError check = SparkCheck('spark', {}, [DRIVER_CONFIG]) - response = MockResponse(content="Spark is starting up. Please wait a while until it's ready.") + response = MockHTTPResponse(content="Spark is starting up. Please wait a while until it's ready.") with caplog.at_level(logging.DEBUG): with mock.patch.object(check, '_rest_request', return_value=response): @@ -1222,12 +1222,12 @@ def test_driver_startup_message_default_retries(aggregator, caplog): @pytest.mark.parametrize("retries_value", [0, -1, -5]) def test_driver_startup_message_disabled(aggregator, retries_value): """When startup_wait_retries<=0, treat startup messages as errors immediately.""" - from simplejson import JSONDecodeError + from json import JSONDecodeError config = DRIVER_CONFIG.copy() config['startup_wait_retries'] = retries_value check = SparkCheck('spark', {}, [config]) - response = MockResponse(content="Spark is starting up. Please wait a while until it's ready.") + response = MockHTTPResponse(content="Spark is starting up. Please wait a while until it's ready.") with mock.patch.object(check, '_rest_request', return_value=response): with pytest.raises(JSONDecodeError): @@ -1243,12 +1243,12 @@ def test_driver_startup_message_disabled(aggregator, retries_value): @pytest.mark.unit def test_driver_startup_message_limited_retries(aggregator, caplog): """When startup_wait_retries>0, retry N times then raise.""" - from simplejson import JSONDecodeError + from json import JSONDecodeError config = DRIVER_CONFIG.copy() config['startup_wait_retries'] = 3 check = SparkCheck('spark', {}, [config]) - response = MockResponse(content="Spark is starting up. Please wait a while until it's ready.") + response = MockHTTPResponse(content="Spark is starting up. Please wait a while until it's ready.") with caplog.at_level(logging.DEBUG): with mock.patch.object(check, '_rest_request', return_value=response): @@ -1280,8 +1280,8 @@ def test_driver_startup_retry_counter_resets_on_success(caplog): config = DRIVER_CONFIG.copy() config['startup_wait_retries'] = 2 check = SparkCheck('spark', {}, [config]) - startup_response = MockResponse(content="Spark is starting up. Please wait a while until it's ready.") - success_response = MockResponse(json_data=[{"id": "app_001", "name": "TestApp"}]) + startup_response = MockHTTPResponse(content="Spark is starting up. Please wait a while until it's ready.") + success_response = MockHTTPResponse(json_data=[{"id": "app_001", "name": "TestApp"}]) with caplog.at_level(logging.DEBUG): with mock.patch.object(check, '_rest_request', return_value=startup_response): @@ -1359,7 +1359,7 @@ def test_do_not_crash_on_single_app_failure(): ids=["driver", "yarn", "mesos", "standalone", "standalone_pre_20"], ) def test_no_running_apps(aggregator, dd_run_check, instance, service_check, caplog): - with mock.patch('requests.Session.get', return_value=MockResponse("{}")): + with mock.patch('requests.Session.get', return_value=MockHTTPResponse("{}")): with caplog.at_level(logging.WARNING): dd_run_check(SparkCheck('spark', {}, [instance])) @@ -1378,9 +1378,11 @@ def test_no_running_apps(aggregator, dd_run_check, instance, service_check, capl @pytest.mark.parametrize( "mock_response", [ - pytest.param(MockResponse(content=""), id="Invalid JSON"), # this triggers json parsing error, - pytest.param(MockResponse(status_code=404), id="property not found"), - pytest.param(MockResponse(status_code=500), id="Spark internal server error"), # reported by users in the wild + pytest.param(MockHTTPResponse(content=""), id="Invalid JSON"), # this triggers json parsing error, + pytest.param(MockHTTPResponse(status_code=404), id="property not found"), + pytest.param( + MockHTTPResponse(status_code=500), id="Spark internal server error" + ), # reported by users in the wild ], ) @pytest.mark.parametrize( @@ -1412,7 +1414,7 @@ def get_without_json(session, url, *args, **kwargs): if arg_url == property_url: return mock_response elif arg_url == YARN_SPARK_APP_URL: - return MockResponse( + return MockHTTPResponse( json_data=[ { "id": SPARK_APP_ID, diff --git a/squid/tests/test_squid.py b/squid/tests/test_squid.py index 55a40cbe7c835..d0f8f7e9c9cf4 100644 --- a/squid/tests/test_squid.py +++ b/squid/tests/test_squid.py @@ -2,9 +2,10 @@ # All rights reserved # Licensed under Simplified BSD License (see LICENSE) -import mock import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse + from . import common @@ -78,12 +79,11 @@ def test_check_ok(aggregator, check, instance): ], ) @pytest.mark.usefixtures("dd_environment") -def test_version_metadata(check, instance, datadog_agent, raw_version, version_metadata, count): - with mock.patch('datadog_checks.base.utils.http.requests.Session.get') as g: - g.return_value.headers = {'Server': raw_version} +def test_version_metadata(check, instance, datadog_agent, mock_http, raw_version, version_metadata, count): + mock_http.get.return_value = MockHTTPResponse(headers={'Server': raw_version}) - check.check_id = 'test:123' - check.check(instance) + check.check_id = 'test:123' + check.check(instance) - datadog_agent.assert_metadata('test:123', version_metadata) - datadog_agent.assert_metadata_count(count) + datadog_agent.assert_metadata('test:123', version_metadata) + datadog_agent.assert_metadata_count(count) diff --git a/squid/tests/test_unit.py b/squid/tests/test_unit.py index 332e8c9123e2d..aba76270a8b36 100644 --- a/squid/tests/test_unit.py +++ b/squid/tests/test_unit.py @@ -6,6 +6,7 @@ import mock import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.squid import SquidCheck from . import common @@ -55,40 +56,37 @@ def test_parse_instance(aggregator, check): check.parse_instance(instance) -def test_get_counters(check): +def test_get_counters(check, mock_http): """ Squid can return a trailing newline at the end of its metrics and it would be treated as a metric line: an error would be raised attempting to parse the line due to a missing = character. See https://github.com/DataDog/integrations-core/pull/1643 """ - with mock.patch('datadog_checks.squid.squid.requests.Session.get') as g: - with mock.patch('datadog_checks.squid.SquidCheck.submit_version'): - g.return_value = mock.MagicMock(text="client_http.requests=42\n\n") - check.parse_counter = mock.MagicMock(return_value=('foo', 'bar')) - check.get_counters('host', 'port', []) - # we assert `parse_counter` was called only once despite the raw text - # containing multiple `\n` chars - check.parse_counter.assert_called_once() - - -def test_host_without_protocol(check, instance): - with mock.patch('datadog_checks.squid.squid.requests.Session.get') as g: - with mock.patch('datadog_checks.squid.SquidCheck.submit_version'): - g.return_value = mock.MagicMock(text="client_http.requests=42\n\n") - check.parse_counter = mock.MagicMock(return_value=('foo', 'bar')) - check.check(instance) - assert g.call_args.args[0] == 'http://localhost:3128/squid-internal-mgr/counters' - - -def test_host_https(check, instance): + with mock.patch('datadog_checks.squid.SquidCheck.submit_version'): + mock_http.get.return_value = MockHTTPResponse(content="client_http.requests=42\n\n") + check.parse_counter = mock.MagicMock(return_value=('foo', 'bar')) + check.get_counters('host', 'port', []) + # we assert `parse_counter` was called only once despite the raw text + # containing multiple `\n` chars + check.parse_counter.assert_called_once() + + +def test_host_without_protocol(check, instance, mock_http): + with mock.patch('datadog_checks.squid.SquidCheck.submit_version'): + mock_http.get.return_value = MockHTTPResponse(content="client_http.requests=42\n\n") + check.parse_counter = mock.MagicMock(return_value=('foo', 'bar')) + check.check(instance) + assert mock_http.get.call_args.args[0] == 'http://localhost:3128/squid-internal-mgr/counters' + + +def test_host_https(check, instance, mock_http): instance['host'] = 'https://localhost' - with mock.patch('datadog_checks.squid.squid.requests.Session.get') as g: - with mock.patch('datadog_checks.squid.SquidCheck.submit_version'): - g.return_value = mock.MagicMock(text="client_http.requests=42\n\n") - check.parse_counter = mock.MagicMock(return_value=('foo', 'bar')) - check.check(instance) - assert g.call_args.args[0] == 'https://localhost:3128/squid-internal-mgr/counters' + with mock.patch('datadog_checks.squid.SquidCheck.submit_version'): + mock_http.get.return_value = MockHTTPResponse(content="client_http.requests=42\n\n") + check.parse_counter = mock.MagicMock(return_value=('foo', 'bar')) + check.check(instance) + assert mock_http.get.call_args.args[0] == 'https://localhost:3128/squid-internal-mgr/counters' @pytest.mark.parametrize( @@ -103,17 +101,4 @@ def test_legacy_username_password(instance, auth_config): instance.update(auth_config) check = SquidCheck(common.CHECK_NAME, {}, {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests.Session.get') as g: - with mock.patch('datadog_checks.squid.SquidCheck.submit_version'): - check.get_counters('host', 'port', []) - - g.assert_called_with( - 'http://host:port/squid-internal-mgr/counters', - auth=('datadog_user', 'datadog_pass'), - cert=mock.ANY, - headers=mock.ANY, - proxies=mock.ANY, - timeout=mock.ANY, - verify=mock.ANY, - allow_redirects=mock.ANY, - ) + assert check.http.options['auth'] == ('datadog_user', 'datadog_pass') diff --git a/strimzi/tests/conftest.py b/strimzi/tests/conftest.py index 0499c622ed9bb..16ce3da9f2b10 100644 --- a/strimzi/tests/conftest.py +++ b/strimzi/tests/conftest.py @@ -8,8 +8,8 @@ import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse # noqa: F401 from datadog_checks.dev import run_command -from datadog_checks.dev.http import MockResponse from datadog_checks.dev.kind import kind_run from datadog_checks.dev.kube_port_forward import port_forward from datadog_checks.strimzi import StrimziCheck @@ -103,4 +103,4 @@ def mock_http_responses(url, **_params): pytest.fail(f"url `{url}` not registered") with open(os.path.join(HERE, 'fixtures', STRIMZI_VERSION, metrics_file)) as f: - return MockResponse(content=f.read()) + return MockHTTPResponse(content=f.read()) diff --git a/strimzi/tests/test_unit.py b/strimzi/tests/test_unit.py index 0c9475cefab1a..1cfb28a5478c1 100644 --- a/strimzi/tests/test_unit.py +++ b/strimzi/tests/test_unit.py @@ -54,9 +54,9 @@ def test_check_unique_operator( instance, metrics, tag, - mocker, + mock_http, ): - mocker.patch("requests.Session.get", wraps=mock_http_responses) + mock_http.get.side_effect = mock_http_responses dd_run_check(check(instance)) for expected_metric in metrics: @@ -75,8 +75,8 @@ def test_check_unique_operator( assert len(aggregator.service_check_names) == 1 -def test_check_all_operators(dd_run_check, aggregator, check, mocker): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_check_all_operators(dd_run_check, aggregator, check, mock_http): + mock_http.get.side_effect = mock_http_responses dd_run_check( check( { diff --git a/teamcity/tests/test_teamcity.py b/teamcity/tests/test_teamcity.py index 65c23a4a12dab..2800bd02efcef 100644 --- a/teamcity/tests/test_teamcity.py +++ b/teamcity/tests/test_teamcity.py @@ -4,7 +4,6 @@ from copy import deepcopy import pytest -from mock import ANY, patch from datadog_checks.teamcity.constants import ( SERVICE_CHECK_BUILD_PROBLEMS, @@ -17,7 +16,6 @@ BUILD_STATS_METRICS, BUILD_TAGS, EXPECTED_SERVICE_CHECK_TEST_RESULTS, - LEGACY_REST_INSTANCE, USE_OPENMETRICS, ) @@ -53,36 +51,6 @@ def test_build_event(dd_run_check, aggregator, rest_instance): aggregator.assert_event(msg_title="", msg_text="", count=0) -@pytest.mark.parametrize( - 'extra_config, expected_http_kwargs', - [ - pytest.param({'ssl_validation': True}, {'verify': True}, id="legacy ssl config True"), - pytest.param({'ssl_validation': False}, {'verify': False}, id="legacy ssl config False"), - pytest.param({}, {'verify': True}, id="legacy ssl config unset"), - ], -) -def test_config(dd_run_check, extra_config, expected_http_kwargs): - instance = deepcopy(LEGACY_REST_INSTANCE) - instance.update(extra_config) - check = TeamCityRest('teamcity', {}, [instance]) - - with patch('datadog_checks.base.utils.http.requests.Session.get') as r: - dd_run_check(check) - - http_wargs = { - 'auth': ANY, - 'cert': ANY, - 'headers': ANY, - 'proxies': ANY, - 'timeout': ANY, - 'verify': ANY, - 'allow_redirects': ANY, - } - http_wargs.update(expected_http_kwargs) - - r.assert_called_with(ANY, **http_wargs) - - @pytest.mark.parametrize( 'build_config, expected_error', [ diff --git a/teamcity/tests/test_unit.py b/teamcity/tests/test_unit.py index 20fd19a482ac6..10414201ffcb2 100644 --- a/teamcity/tests/test_unit.py +++ b/teamcity/tests/test_unit.py @@ -2,10 +2,12 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) from collections import OrderedDict +from copy import deepcopy import pytest from datadog_checks.teamcity.common import filter_build_configs, filter_items, filter_projects, normalize_server_url +from datadog_checks.teamcity.teamcity_rest import TeamCityRest from .common import ( CONFIG_ALL_BUILD_CONFIGS, @@ -18,6 +20,7 @@ CONFIG_ONLY_EXCLUDE_ONE_BUILD_CONFIG, CONFIG_ONLY_EXCLUDE_ONE_PROJECT, CONFIG_ONLY_INCLUDE_ONE_BUILD_CONFIG, + LEGACY_REST_INSTANCE, TEAMCITY_SERVER_VALUES, USE_OPENMETRICS, ) @@ -348,3 +351,20 @@ def test_filter_build_configs( filtered = filter_build_configs(check, build_configs_to_filter, 'ProjectID', {'ProjectID': filter_config}) assert filtered == expected_result + + +@pytest.mark.parametrize( + 'extra_config, expected_http_kwargs', + [ + pytest.param({'ssl_validation': True}, {'verify': True}, id="legacy ssl config True"), + pytest.param({'ssl_validation': False}, {'verify': False}, id="legacy ssl config False"), + pytest.param({}, {'verify': True}, id="legacy ssl config unset"), + ], +) +def test_config(extra_config, expected_http_kwargs): + instance = deepcopy(LEGACY_REST_INSTANCE) + instance.update(extra_config) + check = TeamCityRest('teamcity', {}, [instance]) + + for key, value in expected_http_kwargs.items(): + assert check.http.options[key] == value diff --git a/tekton/tests/common.py b/tekton/tests/common.py index 0203124213c86..a1d36404d371a 100644 --- a/tekton/tests/common.py +++ b/tekton/tests/common.py @@ -3,8 +3,8 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import os +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import get_here -from datadog_checks.dev.http import MockResponse from datadog_checks.tekton import TektonCheck HERE = get_here() @@ -152,4 +152,4 @@ def mock_http_responses(url, **_params): raise Exception(f"url `{url}` not registered") with open(os.path.join(HERE, 'fixtures', metrics_file)) as f: - return MockResponse(content=f.read()) + return MockHTTPResponse(content=f.read()) diff --git a/tekton/tests/test_unit.py b/tekton/tests/test_unit.py index 0010a2eae466d..de85f1b0857fb 100644 --- a/tekton/tests/test_unit.py +++ b/tekton/tests/test_unit.py @@ -16,8 +16,8 @@ pytest.param('triggers_instance', TRIGGERS_METRICS, 'triggers_controller', id='triggers'), ], ) -def test_check(dd_run_check, aggregator, mocker, instance, metrics, request, namespace): - mocker.patch("requests.Session.get", wraps=mock_http_responses) +def test_check(dd_run_check, aggregator, mock_http, instance, metrics, request, namespace): + mock_http.get.side_effect = mock_http_responses dd_run_check(check(request.getfixturevalue(instance))) for expected_metric in metrics: @@ -30,10 +30,10 @@ def test_check(dd_run_check, aggregator, mocker, instance, metrics, request, nam assert len(aggregator.service_check_names) == 1 -def test_invalid_url(dd_run_check, aggregator, pipelines_instance, mocker): +def test_invalid_url(dd_run_check, aggregator, pipelines_instance, mock_http): pipelines_instance["pipelines_controller_endpoint"] = "http://unknowwn" - mocker.patch("requests.Session.get", wraps=mock_http_responses) + mock_http.get.side_effect = mock_http_responses with pytest.raises(Exception): dd_run_check(check(pipelines_instance)) diff --git a/temporal/tests/conftest.py b/temporal/tests/conftest.py index 2ed04281aa8c5..68117b017db27 100644 --- a/temporal/tests/conftest.py +++ b/temporal/tests/conftest.py @@ -5,10 +5,10 @@ import os import time from contextlib import contextmanager -from unittest import mock import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import EnvVars, TempDir, docker_run, get_docker_hostname, get_here, run_command from datadog_checks.dev._env import get_state, save_state from datadog_checks.dev.conditions import CheckEndpoints @@ -82,14 +82,9 @@ def check(instance): @pytest.fixture() -def mock_metrics(): +def mock_metrics(mock_http): f_name = os.path.join(os.path.dirname(__file__), 'fixtures', 'metrics.txt') with open(f_name, 'r') as f: text_data = f.read() - with mock.patch( - 'requests.Session.get', - return_value=mock.MagicMock( - status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} - ), - ): - yield + mock_http.get.return_value = MockHTTPResponse(content=text_data, headers={'Content-Type': 'text/plain'}) + yield diff --git a/torchserve/tests/conftest.py b/torchserve/tests/conftest.py index b02a64195e086..022191fb25b3a 100644 --- a/torchserve/tests/conftest.py +++ b/torchserve/tests/conftest.py @@ -7,10 +7,10 @@ import pytest +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import EnvVars, TempDir, docker_run, get_here from datadog_checks.dev._env import get_state, save_state from datadog_checks.dev.conditions import CheckEndpoints, WaitFor -from datadog_checks.dev.http import MockResponse from datadog_checks.torchserve import TorchserveCheck from .common import ( @@ -135,7 +135,7 @@ def _mock_http_responses(url, **_params): pytest.fail(f"url `{url}` not registered") with open(os.path.join(HERE, 'fixtures', metrics_file)) as f: - return MockResponse(content=f.read()) + return MockHTTPResponse(content=f.read()) return _mock_http_responses diff --git a/torchserve/tests/management/test_model_discovery.py b/torchserve/tests/management/test_model_discovery.py index ea17df181c441..cb02407f916e5 100644 --- a/torchserve/tests/management/test_model_discovery.py +++ b/torchserve/tests/management/test_model_discovery.py @@ -95,7 +95,7 @@ ), ], ) -def test_get_models(check, mocked_management_instance, expected_models, fixture_folder, status_codes): +def test_get_models(check, mocked_management_instance, expected_models, fixture_folder, status_codes, mock_http): # Build all the responses our mock will return responses = [] full_path = get_fixture_path(os.path.join("management", "pagination", fixture_folder)) @@ -109,21 +109,19 @@ def test_get_models(check, mocked_management_instance, expected_models, fixture_ mock_resp.raise_for_status.side_effect = HTTPError() if status_code != 200 else None responses.append(mock_resp) - req = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): - discovery = ModelDiscovery(check(mocked_management_instance), include=[".*"]) - req.get.side_effect = responses - assert [('.*', model['modelName'], model, None) for model in expected_models] == list(discovery.get_items()) - assert req.get.call_count == len(status_codes) + mock_http.get.side_effect = responses + discovery = ModelDiscovery(check(mocked_management_instance), include=[".*"]) + assert [('.*', model['modelName'], model, None) for model in expected_models] == list(discovery.get_items()) + assert mock_http.get.call_count == len(status_codes) - # Validate we used the right params - assert req.get.call_args_list[0].kwargs["params"] == {"limit": 100} + # Validate we used the right params + assert mock_http.get.call_args_list[0].kwargs["params"] == {"limit": 100} - for index, _ in enumerate(status_codes[1:], start=1): - # The nextPageToken from the call n comes from the answer n-1 - assert req.get.call_args_list[index].kwargs["params"] == { - "limit": 100, - "nextPageToken": responses[index - 1].json.return_value["nextPageToken"], - } + for index, _ in enumerate(status_codes[1:], start=1): + # The nextPageToken from the call n comes from the answer n-1 + assert mock_http.get.call_args_list[index].kwargs["params"] == { + "limit": 100, + "nextPageToken": responses[index - 1].json.return_value["nextPageToken"], + } - assert discovery.api_status == (AgentCheck.CRITICAL if status_codes[0] != 200 else AgentCheck.OK) + assert discovery.api_status == (AgentCheck.CRITICAL if status_codes[0] != 200 else AgentCheck.OK) diff --git a/torchserve/tests/management/test_unit.py b/torchserve/tests/management/test_unit.py index 15cd3b694faff..1e099ff64b3fe 100644 --- a/torchserve/tests/management/test_unit.py +++ b/torchserve/tests/management/test_unit.py @@ -4,7 +4,7 @@ import pytest from datadog_checks.base import AgentCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev.utils import get_metadata_metrics from ..conftest import mock_http_responses @@ -39,7 +39,7 @@ def custom_mock_http_responses(url, **_params): 'http://torchserve:8081/models/linear_regression_1_1/all', 'http://torchserve:8081/models/linear_regression_2_2/all', ): - return MockResponse(status_code=500) + return MockHTTPResponse(status_code=500) return mock_http_responses()(url) diff --git a/traefik_mesh/changelog.d/22676.changed b/traefik_mesh/changelog.d/22676.changed new file mode 100644 index 0000000000000..f39347eb80c40 --- /dev/null +++ b/traefik_mesh/changelog.d/22676.changed @@ -0,0 +1 @@ +Remove unused ``url`` parameter from ``get_version``. \ No newline at end of file diff --git a/traefik_mesh/datadog_checks/traefik_mesh/check.py b/traefik_mesh/datadog_checks/traefik_mesh/check.py index 0495315e963c1..ad1394c8a9aca 100644 --- a/traefik_mesh/datadog_checks/traefik_mesh/check.py +++ b/traefik_mesh/datadog_checks/traefik_mesh/check.py @@ -8,6 +8,9 @@ import requests from datadog_checks.base import AgentCheck, OpenMetricsBaseCheckV2 +from datadog_checks.base.utils.http_exceptions import HTTPConnectionError as _HTTPConnectionError +from datadog_checks.base.utils.http_exceptions import HTTPStatusError +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError as _HTTPTimeoutError from datadog_checks.traefik_mesh.config_models import ConfigMixin from datadog_checks.traefik_mesh.metrics import METRIC_MAP, RENAME_LABELS @@ -112,10 +115,15 @@ def _get_json(self, url): resp = self.http.get(url) resp.raise_for_status() return resp.json() - except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError) as e: + except ( + requests.exceptions.HTTPError, + requests.exceptions.ConnectionError, + HTTPStatusError, + _HTTPConnectionError, + ) as e: self.warning( "Couldn't connect to URL: %s with exception: %s. Please verify the address is reachable", url, e ) - except requests.exceptions.Timeout as e: + except (requests.exceptions.Timeout, _HTTPTimeoutError) as e: self.warning("Connection timeout when connecting to %s: %s", url, e) return None diff --git a/traefik_mesh/tests/test_unit.py b/traefik_mesh/tests/test_unit.py index 012efd1574bcb..6648b74bf30db 100644 --- a/traefik_mesh/tests/test_unit.py +++ b/traefik_mesh/tests/test_unit.py @@ -5,6 +5,9 @@ import pytest from datadog_checks.base.constants import ServiceCheck +from datadog_checks.base.utils.http_exceptions import HTTPConnectionError as _HTTPConnectionError +from datadog_checks.base.utils.http_exceptions import HTTPStatusError +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError as _HTTPTimeoutError from datadog_checks.dev.utils import assert_service_checks, get_metadata_metrics from datadog_checks.traefik_mesh import TraefikMeshCheck @@ -156,3 +159,21 @@ def test_submit_version(datadog_agent, dd_run_check, mock_http_response): } datadog_agent.assert_metadata('test:123', version_metadata) + + +def test_get_json_handles_http_status_error(): + check = TraefikMeshCheck('traefik_mesh', {}, [OM_MOCKED_INSTANCE]) + with mock.patch('requests.Session.get', side_effect=HTTPStatusError('404 Client Error')): + assert check._get_json('http://example.com/api') is None + + +def test_get_json_handles_http_connection_error(): + check = TraefikMeshCheck('traefik_mesh', {}, [OM_MOCKED_INSTANCE]) + with mock.patch('requests.Session.get', side_effect=_HTTPConnectionError('Connection refused')): + assert check._get_json('http://example.com/api') is None + + +def test_get_json_handles_http_timeout_error(): + check = TraefikMeshCheck('traefik_mesh', {}, [OM_MOCKED_INSTANCE]) + with mock.patch('requests.Session.get', side_effect=_HTTPTimeoutError('Read timed out')): + assert check._get_json('http://example.com/api') is None diff --git a/twistlock/tests/test_twistlock.py b/twistlock/tests/test_twistlock.py index c6bef536c290c..7b9442ab0e793 100644 --- a/twistlock/tests/test_twistlock.py +++ b/twistlock/tests/test_twistlock.py @@ -8,8 +8,8 @@ import pytest from datadog_checks.base import AgentCheck +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import get_here -from datadog_checks.dev.http import MockResponse from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.twistlock import TwistlockCheck @@ -38,7 +38,7 @@ def mock_get_factory(fixture_group): def mock_get(session, url, *args, **kwargs): split_url = url.split('/') path = split_url[-1] - return MockResponse(file_path=os.path.join(HERE, 'fixtures', fixture_group, '{}.json'.format(path))) + return MockHTTPResponse(file_path=os.path.join(HERE, 'fixtures', fixture_group, '{}.json'.format(path))) return mock_get @@ -97,8 +97,8 @@ def mock_get(url, *args, **kwargs): path = split_url[-1] if path != 'images': - return MockResponse(file_path=os.path.join(HERE, 'fixtures', fixture_group, '{}.json'.format(path))) - return MockResponse(file_path=os.path.join(HERE, 'fixtures', 'empty_images.json')) + return MockHTTPResponse(file_path=os.path.join(HERE, 'fixtures', fixture_group, '{}.json'.format(path))) + return MockHTTPResponse(file_path=os.path.join(HERE, 'fixtures', 'empty_images.json')) with mock.patch('requests.Session.get', side_effect=mock_get): check.check(instance) @@ -110,7 +110,7 @@ def test_err_response(aggregator, instance): with pytest.raises(Exception, match='^Error in response'): with mock.patch( - 'requests.Session.get', return_value=MockResponse('{"err": "invalid credentials"}'), autospec=True + 'requests.Session.get', return_value=MockHTTPResponse('{"err": "invalid credentials"}'), autospec=True ): check.check(instance) diff --git a/vault/changelog.d/22676.added b/vault/changelog.d/22676.added new file mode 100644 index 0000000000000..5bcdc39844d6b --- /dev/null +++ b/vault/changelog.d/22676.added @@ -0,0 +1 @@ +Reapply auth headers to check.http after scraper started sharing it \ No newline at end of file diff --git a/vault/datadog_checks/vault/check.py b/vault/datadog_checks/vault/check.py index 5726d1eb4bc35..2e83f811a4d2b 100644 --- a/vault/datadog_checks/vault/check.py +++ b/vault/datadog_checks/vault/check.py @@ -43,7 +43,7 @@ def __init__(self, name, init_config, instances): self.scraper_configs.clear() # https://www.vaultproject.io/api-docs#the-x-vault-request-header - self.http.options['headers']['X-Vault-Request'] = 'true' + self.http.set_header('X-Vault-Request', 'true') # Before scrapers are configured self.check_initializations.insert(-1, self.parse_config) @@ -238,8 +238,12 @@ def configure_scrapers(self): }, } } + if hasattr(self, '_http'): + del self._http + self.http.set_header('X-Vault-Request', 'true') if self.config.client_token: config['headers']['X-Vault-Token'] = self.config.client_token + self.http.set_header('X-Vault-Token', self.config.client_token) self.scraper_configs.clear() self.scraper_configs.append(config) diff --git a/vault/datadog_checks/vault/vault.py b/vault/datadog_checks/vault/vault.py index b216644712494..b3667bb1d3134 100644 --- a/vault/datadog_checks/vault/vault.py +++ b/vault/datadog_checks/vault/vault.py @@ -6,6 +6,7 @@ import requests from datadog_checks.base import OpenMetricsBaseCheck, is_affirmative +from datadog_checks.base.utils.http_exceptions import HTTPTimeoutError from .check import VaultCheckV2 from .common import API_METHODS, DEFAULT_API_VERSION, SYS_HEALTH_DEFAULT_CODES, SYS_LEADER_DEFAULT_CODES, Api, Leader @@ -257,7 +258,7 @@ def access_api(self, url, ignore_status_codes=None): msg = 'The Vault endpoint `{}` returned invalid json data: {}.'.format(url, e) self.service_check(self.SERVICE_CHECK_CONNECT, self.CRITICAL, message=msg, tags=self._tags) raise ApiUnreachable(msg) - except requests.exceptions.Timeout: + except (requests.exceptions.Timeout, HTTPTimeoutError): msg = 'Vault endpoint `{}` timed out after {} seconds'.format(url, self.http.options['timeout'][0]) self.service_check(self.SERVICE_CHECK_CONNECT, self.CRITICAL, message=msg, tags=self._tags) raise ApiUnreachable(msg) @@ -318,7 +319,7 @@ def renew_client_token(self): self.set_client_token(f.read().decode('utf-8')) def _set_header(self, http_wrapper, header, value): - http_wrapper.options['headers'][header] = value + http_wrapper.set_header(header, value) def get_scraper_config(self, instance): # This validation is called during `__init__` but we don't need it diff --git a/vault/tests/test_vault.py b/vault/tests/test_vault.py index cf90dddea9d2a..0fd5d4a72bc6b 100644 --- a/vault/tests/test_vault.py +++ b/vault/tests/test_vault.py @@ -8,7 +8,7 @@ import pytest import requests -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.vault import Vault from datadog_checks.vault.common import DEFAULT_API_VERSION from datadog_checks.vault.errors import ApiUnreachable @@ -78,11 +78,11 @@ def test_service_check_connect_ok_all_tags(self, aggregator, dd_run_check, globa def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': - return MockResponse( + return MockHTTPResponse( json_data={'ha_enabled': False, 'is_self': True, 'leader_address': '', 'leader_cluster_address': ''} ) elif url == instance['api_url'] + '/sys/health': - return MockResponse( + return MockHTTPResponse( json_data={ 'cluster_id': '9e25ccdb-09ea-8bd8-0521-34cf3ef7a4cc', 'cluster_name': 'vault-cluster-f5f44063', @@ -135,7 +135,7 @@ def test_service_check_500_fail(self, aggregator, dd_run_check, global_tags): instance.update(INSTANCES['main']) c = Vault(Vault.CHECK_NAME, {}, [instance]) - with mock.patch('requests.Session.get', return_value=MockResponse(status_code=500)): + with mock.patch('requests.Session.get', return_value=MockHTTPResponse(status_code=500)): with pytest.raises( Exception, match=r'^The Vault endpoint `{}.+?` returned 500$'.format(re.escape(instance['api_url'])) ): @@ -171,11 +171,11 @@ def test_service_check_unsealed_ok_all_tags(self, aggregator, dd_run_check, glob def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': - return MockResponse( + return MockHTTPResponse( json_data={'ha_enabled': False, 'is_self': True, 'leader_address': '', 'leader_cluster_address': ''} ) elif url == instance['api_url'] + '/sys/health': - return MockResponse( + return MockHTTPResponse( json_data={ 'cluster_id': '9e25ccdb-09ea-8bd8-0521-34cf3ef7a4cc', 'cluster_name': 'vault-cluster-f5f44063', @@ -214,7 +214,7 @@ def test_service_check_unsealed_fail(self, aggregator, dd_run_check, use_openmet def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': - return MockResponse( + return MockHTTPResponse( json_data={ 'cluster_id': '9e25ccdb-09ea-8bd8-0521-34cf3ef7a4cc', 'cluster_name': 'vault-cluster-f5f44063', @@ -255,11 +255,11 @@ def test_service_check_initialized_ok_all_tags(self, aggregator, dd_run_check, g def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': - return MockResponse( + return MockHTTPResponse( json_data={'ha_enabled': False, 'is_self': True, 'leader_address': '', 'leader_cluster_address': ''} ) elif url == instance['api_url'] + '/sys/health': - return MockResponse( + return MockHTTPResponse( json_data={ 'cluster_id': '9e25ccdb-09ea-8bd8-0521-34cf3ef7a4cc', 'cluster_name': 'vault-cluster-f5f44063', @@ -298,7 +298,7 @@ def test_service_check_initialized_fail(self, aggregator, dd_run_check, use_open def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': - return MockResponse( + return MockHTTPResponse( json_data={ 'cluster_id': '9e25ccdb-09ea-8bd8-0521-34cf3ef7a4cc', 'cluster_name': 'vault-cluster-f5f44063', @@ -329,11 +329,11 @@ def test_disable_legacy_cluster_tag(self, aggregator, dd_run_check, global_tags) def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': - return MockResponse( + return MockHTTPResponse( json_data={'ha_enabled': False, 'is_self': True, 'leader_address': '', 'leader_cluster_address': ''} ) elif url == instance['api_url'] + '/sys/health': - return MockResponse( + return MockHTTPResponse( json_data={ 'cluster_id': '9e25ccdb-09ea-8bd8-0521-34cf3ef7a4cc', 'cluster_name': 'vault-cluster-f5f44063', @@ -370,7 +370,7 @@ def test_replication_dr_mode(self, aggregator, dd_run_check, use_openmetrics): def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': - return MockResponse( + return MockHTTPResponse( json_data={ 'cluster_id': '9e25ccdb-09ea-8bd8-0521-34cf3ef7a4cc', 'cluster_name': 'vault-cluster-f5f44063', @@ -407,7 +407,7 @@ def test_replication_dr_mode_collect_secondary(self, aggregator, dd_run_check, u def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': - return MockResponse( + return MockHTTPResponse( json_data={ 'cluster_id': '9e25ccdb-09ea-8bd8-0521-34cf3ef7a4cc', 'cluster_name': 'vault-cluster-f5f44063', @@ -453,7 +453,7 @@ def mock_requests_get(session, url, *args, **kwargs): else: replication_dr_mode = 'secondary' - return MockResponse( + return MockHTTPResponse( json_data={ 'cluster_id': '9e25ccdb-09ea-8bd8-0521-34cf3ef7a4cc', 'cluster_name': 'vault-cluster-f5f44063', @@ -506,7 +506,7 @@ def test_event_leader_change(self, aggregator, dd_run_check, cluster, use_openme def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': - return MockResponse( + return MockHTTPResponse( json_data={ 'ha_enabled': False, 'is_self': True, @@ -547,7 +547,7 @@ def test_leader_change_not_self(self, aggregator, dd_run_check, use_openmetrics) def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': - return MockResponse( + return MockHTTPResponse( json_data={ 'ha_enabled': False, 'is_self': False, @@ -573,7 +573,7 @@ def test_is_leader_metric_true(self, aggregator, dd_run_check, use_openmetrics): def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': - return MockResponse( + return MockHTTPResponse( json_data={ 'ha_enabled': False, 'is_self': True, @@ -599,7 +599,7 @@ def test_is_leader_metric_false(self, aggregator, dd_run_check, use_openmetrics) def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': - return MockResponse( + return MockHTTPResponse( json_data={ 'ha_enabled': False, 'is_self': False, @@ -626,7 +626,7 @@ def test_sys_health_non_standard_status_codes(self, aggregator, dd_run_check, st def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': - return MockResponse( + return MockHTTPResponse( json_data={ 'cluster_id': '9e25ccdb-09ea-8bd8-0521-34cf3ef7a4cc', 'cluster_name': 'vault-cluster-f5f44063', @@ -660,7 +660,7 @@ def test_sys_leader_non_standard_status_codes(self, aggregator, dd_run_check, us def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': - return MockResponse(json_data={'errors': ["Vault is sealed"]}, status_code=503) + return MockHTTPResponse(json_data={'errors': ["Vault is sealed"]}, status_code=503) return requests_get(url, *args, **kwargs) with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): diff --git a/vllm/tests/test_unit.py b/vllm/tests/test_unit.py index f6aa9f5cca85f..e1f6dba11bc9d 100644 --- a/vllm/tests/test_unit.py +++ b/vllm/tests/test_unit.py @@ -7,7 +7,7 @@ import pytest from datadog_checks.base.constants import ServiceCheck -from datadog_checks.dev.http import MockResponse +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.vllm import vLLMCheck @@ -19,8 +19,8 @@ def test_check_vllm(dd_run_check, aggregator, datadog_agent, instance): check.check_id = "test:123" mock_responses = [ - MockResponse(file_path=get_fixture_path("vllm_metrics.txt")), - MockResponse(file_path=get_fixture_path("vllm_version.json")), + MockHTTPResponse(file_path=get_fixture_path("vllm_metrics.txt")), + MockHTTPResponse(file_path=get_fixture_path("vllm_version.json")), ] with mock.patch('requests.Session.get', side_effect=mock_responses): @@ -43,8 +43,8 @@ def test_check_vllm_w_ray_prefix(dd_run_check, aggregator, datadog_agent, ray_in check.check_id = "test:123" mock_responses = [ - MockResponse(file_path=get_fixture_path("ray_vllm_metrics.txt")), - MockResponse(file_path=get_fixture_path("vllm_version.json")), + MockHTTPResponse(file_path=get_fixture_path("ray_vllm_metrics.txt")), + MockHTTPResponse(file_path=get_fixture_path("vllm_version.json")), ] with mock.patch('requests.Session.get', side_effect=mock_responses): @@ -81,7 +81,7 @@ def test_emits_critical_openemtrics_service_check_when_service_is_down( """ mock_http_response(status_code=404) check = vLLMCheck("vllm", {}, [instance]) - with pytest.raises(Exception, match='requests.exceptions.HTTPError'): + with pytest.raises(Exception, match='HTTPStatusError'): dd_run_check(check) aggregator.assert_all_metrics_covered() diff --git a/vsphere/tests/common.py b/vsphere/tests/common.py index 562b990e34dbb..54eb7b5de9c06 100644 --- a/vsphere/tests/common.py +++ b/vsphere/tests/common.py @@ -8,8 +8,8 @@ import mock from pyVmomi import vim, vmodl +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.base.utils.time import get_current_datetime -from datadog_checks.dev.http import MockResponse from datadog_checks.vsphere.api_rest import VSphereRestAPI HERE = os.path.abspath(os.path.dirname(__file__)) @@ -1091,7 +1091,7 @@ def __init__(self): def get(self, url, *args, **kwargs): if '/api/' in url: - return MockResponse({}, 404) + return MockHTTPResponse(json_data={}, status_code=404) parsed_url = urlparse(url) path_and_args = parsed_url.path + "?" + parsed_url.query if parsed_url.query else parsed_url.path path_parts = path_and_args.split('/') @@ -1101,7 +1101,7 @@ def get(self, url, *args, **kwargs): if re.match(r'.*/category/id:.*$', url): parts = url.split('_') num = parts[len(parts) - 1] - return MockResponse( + return MockHTTPResponse( json_data={ "value": { "name": "my_cat_name_{}".format(num), @@ -1116,7 +1116,7 @@ def get(self, url, *args, **kwargs): elif re.match(r'.*/tagging/tag/id:.*$', url): parts = url.split('_') num = parts[len(parts) - 1] - return MockResponse( + return MockHTTPResponse( json_data={ "value": { "category_id": "cat_id_{}".format(num), @@ -1132,7 +1132,7 @@ def get(self, url, *args, **kwargs): def post(self, url, *args, **kwargs): if '/api/' in url: - return MockResponse({}, 404) + return MockHTTPResponse(json_data={}, status_code=404) assert kwargs['headers']['Content-Type'] == 'application/json' parsed_url = urlparse(url) path_and_args = parsed_url.path + "?" + parsed_url.query if parsed_url.query else parsed_url.path @@ -1141,12 +1141,12 @@ def post(self, url, *args, **kwargs): if subpath in self.exceptions: raise self.exceptions[subpath] if re.match(r'.*/session$', url): - return MockResponse( + return MockHTTPResponse( json_data={"value": "dummy-token"}, status_code=200, ) elif re.match(r'.*/tagging/tag-association\?~action=list-attached-tags-on-objects$', url): - return MockResponse( + return MockHTTPResponse( json_data={ "value": [ {"object_id": {"id": "vm1", "type": "VirtualMachine"}, "tag_ids": ["tag_id_1", "tag_id_2"]}, @@ -1173,7 +1173,7 @@ def get(self, url, *args, **kwargs): if re.match(r'.*/category/.*$', url): parts = url.split('_') num = parts[len(parts) - 1] - return MockResponse( + return MockHTTPResponse( json_data={ 'name': 'my_cat_name_{}'.format(num), 'description': 'VM category description', @@ -1186,7 +1186,7 @@ def get(self, url, *args, **kwargs): elif re.match(r'.*/tagging/tag/.*$', url): parts = url.split('_') num = parts[len(parts) - 1] - return MockResponse( + return MockHTTPResponse( json_data={ 'category_id': 'cat_id_{}'.format(num), 'name': 'my_tag_name_{}'.format(num), @@ -1207,12 +1207,12 @@ def post(self, url, *args, **kwargs): if subpath in self.exceptions: raise self.exceptions[subpath] if re.match(r'.*/session$', url): - return MockResponse( + return MockHTTPResponse( json_data="dummy-token", status_code=200, ) elif re.match(r'.*/tagging/tag-association\?action=list-attached-tags-on-objects$', url): - return MockResponse( + return MockHTTPResponse( json_data=[ {'tag_ids': ['tag_id_1', 'tag_id_2'], 'object_id': {'id': 'vm1', 'type': 'VirtualMachine'}}, {'tag_ids': ['tag_id_2'], 'object_id': {'id': 'ds1', 'type': 'Datastore'}}, diff --git a/yarn/tests/conftest.py b/yarn/tests/conftest.py index 1ca370584a43b..e98c47abdae54 100644 --- a/yarn/tests/conftest.py +++ b/yarn/tests/conftest.py @@ -7,12 +7,10 @@ from urllib.parse import urljoin import pytest -from mock import patch -from requests.exceptions import SSLError +from datadog_checks.base.utils.http_testing import MockHTTPResponse from datadog_checks.dev import docker_run from datadog_checks.dev.conditions import CheckEndpoints -from datadog_checks.dev.http import MockResponse from datadog_checks.yarn import YarnCheck from datadog_checks.yarn.yarn import YARN_APPS_PATH, YARN_CLUSTER_METRICS_PATH, YARN_NODES_PATH, YARN_SCHEDULER_PATH @@ -20,8 +18,6 @@ FIXTURE_DIR, HERE, INSTANCE_INTEGRATION, - TEST_PASSWORD, - TEST_USERNAME, YARN_APPS_URL, YARN_CLUSTER_METRICS_URL, YARN_NODES_URL, @@ -56,56 +52,23 @@ def instance(): @pytest.fixture -def mocked_request(): - with patch("requests.Session.get", new=requests_get_mock): - yield +def mocked_request(mock_http): + mock_http.get.side_effect = requests_get_mock + yield @pytest.fixture -def mocked_auth_request(): - def requests_auth_get(session, *args, **kwargs): - # Make sure we're passing in authentication - assert 'auth' in kwargs, 'Missing "auth" argument in requests.Session.get(...) call' - - # Make sure we've got the correct username and password - assert kwargs['auth'] == ( - TEST_USERNAME, - TEST_PASSWORD, - ), "Incorrect username or password in requests.Session.get" - - # Return mocked request.get(...) - return requests_get_mock(session, *args, **kwargs) - - with patch("requests.Session.get", new=requests_auth_get): - yield - - -@pytest.fixture -def mocked_bad_cert_request(): - """ - Mock request.Session.get to an endpoint with a badly configured ssl cert - """ - - def requests_bad_cert_get(session, *args, **kwargs): - # Make sure we're passing in the 'verify' argument - assert 'verify' in kwargs, 'Missing "verify" argument in requests.Session.get(...) call' - - if kwargs['verify']: - raise SSLError("certificate verification failed for {}".format(args[0])) - - # Return the actual response - return requests_get_mock(session, *args, **kwargs) - - with patch("requests.Session.get", new=requests_bad_cert_get): - yield - - -def requests_get_mock(session, *args, **kwargs): - if args[0] == YARN_CLUSTER_METRICS_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'cluster_metrics')) - elif args[0] == YARN_APPS_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'apps_metrics')) - elif args[0] == YARN_NODES_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'nodes_metrics')) - elif args[0] == YARN_SCHEDULER_URL: - return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'scheduler_metrics')) +def mocked_auth_request(mock_http): + mock_http.get.side_effect = requests_get_mock + yield + + +def requests_get_mock(url, *args, **kwargs): + if url == YARN_CLUSTER_METRICS_URL: + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'cluster_metrics')) + elif url == YARN_APPS_URL: + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'apps_metrics')) + elif url == YARN_NODES_URL: + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'nodes_metrics')) + elif url == YARN_SCHEDULER_URL: + return MockHTTPResponse(file_path=os.path.join(FIXTURE_DIR, 'scheduler_metrics')) diff --git a/yarn/tests/test_yarn.py b/yarn/tests/test_yarn.py index 0ff1a36ccc18c..c98c76e3bc055 100644 --- a/yarn/tests/test_yarn.py +++ b/yarn/tests/test_yarn.py @@ -5,7 +5,6 @@ import re import pytest -from requests.exceptions import SSLError from datadog_checks.yarn import YarnCheck from datadog_checks.yarn.yarn import ( @@ -267,7 +266,12 @@ def test_auth(aggregator, mocked_auth_request): ) -def test_ssl_verification(aggregator, mocked_bad_cert_request): +def test_ssl_verification(aggregator, mock_http): + from requests.exceptions import SSLError + + from .conftest import requests_get_mock + + mock_http.get.side_effect = SSLError("certificate verification failed") instance = YARN_SSL_VERIFY_TRUE_CONFIG['instances'][0] # Instantiate YarnCheck @@ -288,6 +292,7 @@ def test_ssl_verification(aggregator, mocked_bad_cert_request): raise AssertionError('Should have thrown an SSLError due to a badly configured certificate') # Run the check on the same configuration, but with verify=False. We shouldn't get an exception. + mock_http.get.side_effect = requests_get_mock instance = YARN_SSL_VERIFY_FALSE_CONFIG['instances'][0] yarn = YarnCheck('yarn', {}, [instance]) yarn.check(instance)