2018-04-17 06:40:02 +02:00
|
|
|
|
#!/usr/bin/env python
|
|
|
|
|
#
|
|
|
|
|
# A library that provides a Python interface to the Telegram Bot API
|
2024-02-19 20:06:25 +01:00
|
|
|
|
# Copyright (C) 2015-2024
|
2018-04-17 06:40:02 +02:00
|
|
|
|
# Leandro Toledo de Souza <devs@python-telegram-bot.org>
|
|
|
|
|
#
|
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
|
# it under the terms of the GNU Lesser Public License as published by
|
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
|
# (at your option) any later version.
|
|
|
|
|
#
|
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
# GNU Lesser Public License for more details.
|
|
|
|
|
#
|
|
|
|
|
# You should have received a copy of the GNU Lesser Public License
|
|
|
|
|
# along with this program. If not, see [http://www.gnu.org/licenses/].
|
2022-04-24 12:38:09 +02:00
|
|
|
|
"""Here we run tests directly with HTTPXRequest because that's easier than providing dummy
|
|
|
|
|
implementations for BaseRequest and we want to test HTTPXRequest anyway."""
|
|
|
|
|
import asyncio
|
|
|
|
|
import json
|
2023-04-18 16:17:20 +02:00
|
|
|
|
import logging
|
2022-04-24 12:38:09 +02:00
|
|
|
|
from collections import defaultdict
|
2024-10-24 20:48:49 +02:00
|
|
|
|
from collections.abc import Coroutine
|
2022-04-24 12:38:09 +02:00
|
|
|
|
from dataclasses import dataclass
|
|
|
|
|
from http import HTTPStatus
|
2024-10-24 20:48:49 +02:00
|
|
|
|
from typing import Any, Callable
|
2021-10-05 19:50:11 +02:00
|
|
|
|
|
2022-04-24 12:38:09 +02:00
|
|
|
|
import httpx
|
2018-04-17 06:40:02 +02:00
|
|
|
|
import pytest
|
2023-10-22 12:42:22 +02:00
|
|
|
|
from httpx import AsyncHTTPTransport
|
2018-04-17 06:40:02 +02:00
|
|
|
|
|
2024-08-02 22:28:38 +02:00
|
|
|
|
from telegram import InputFile
|
2022-04-24 12:38:09 +02:00
|
|
|
|
from telegram._utils.defaultvalue import DEFAULT_NONE
|
2024-07-21 21:13:30 +02:00
|
|
|
|
from telegram._utils.strings import TextEncoding
|
2022-04-24 12:38:09 +02:00
|
|
|
|
from telegram.error import (
|
2022-05-05 09:27:54 +02:00
|
|
|
|
BadRequest,
|
2022-04-24 12:38:09 +02:00
|
|
|
|
ChatMigrated,
|
2022-05-05 09:27:54 +02:00
|
|
|
|
Conflict,
|
2022-04-24 12:38:09 +02:00
|
|
|
|
Forbidden,
|
|
|
|
|
InvalidToken,
|
2022-05-05 09:27:54 +02:00
|
|
|
|
NetworkError,
|
|
|
|
|
RetryAfter,
|
|
|
|
|
TelegramError,
|
2022-04-24 12:38:09 +02:00
|
|
|
|
TimedOut,
|
|
|
|
|
)
|
2023-11-26 16:44:18 +01:00
|
|
|
|
from telegram.request import BaseRequest, RequestData
|
2022-04-24 12:38:09 +02:00
|
|
|
|
from telegram.request._httpxrequest import HTTPXRequest
|
2023-11-26 16:44:18 +01:00
|
|
|
|
from telegram.request._requestparameter import RequestParameter
|
2023-10-23 21:09:28 +02:00
|
|
|
|
from telegram.warnings import PTBDeprecationWarning
|
2023-02-22 20:19:46 +01:00
|
|
|
|
from tests.auxil.envvars import TEST_WITH_OPT_DEPS
|
2024-08-02 22:28:38 +02:00
|
|
|
|
from tests.auxil.files import data_file
|
2024-06-18 22:25:02 +02:00
|
|
|
|
from tests.auxil.networking import NonchalantHttpxRequest
|
2023-02-22 20:19:46 +01:00
|
|
|
|
from tests.auxil.slots import mro_slots
|
2022-10-31 10:12:18 +01:00
|
|
|
|
|
2023-02-11 10:45:17 +01:00
|
|
|
|
# We only need mixed_rqs fixture, but it uses the others, so pytest needs us to import them as well
|
2022-04-24 12:38:09 +02:00
|
|
|
|
from .test_requestdata import ( # noqa: F401
|
|
|
|
|
file_params,
|
2022-05-05 09:27:54 +02:00
|
|
|
|
input_media_photo,
|
|
|
|
|
input_media_video,
|
2022-04-24 12:38:09 +02:00
|
|
|
|
inputfiles,
|
2022-05-05 09:27:54 +02:00
|
|
|
|
mixed_params,
|
|
|
|
|
mixed_rqs,
|
2022-04-24 12:38:09 +02:00
|
|
|
|
simple_params,
|
|
|
|
|
)
|
2018-04-17 06:40:02 +02:00
|
|
|
|
|
|
|
|
|
|
2022-04-24 12:38:09 +02:00
|
|
|
|
def mocker_factory(
|
|
|
|
|
response: bytes, return_code: int = HTTPStatus.OK
|
2024-10-24 20:48:49 +02:00
|
|
|
|
) -> Callable[[tuple[Any]], Coroutine[Any, Any, tuple[int, bytes]]]:
|
2022-04-24 12:38:09 +02:00
|
|
|
|
async def make_assertion(*args, **kwargs):
|
|
|
|
|
return return_code, response
|
2021-05-29 16:18:16 +02:00
|
|
|
|
|
2022-04-24 12:38:09 +02:00
|
|
|
|
return make_assertion
|
2021-05-29 16:18:16 +02:00
|
|
|
|
|
2018-04-17 06:40:02 +02:00
|
|
|
|
|
2024-08-07 21:56:46 +02:00
|
|
|
|
@pytest.fixture
|
2022-04-24 12:38:09 +02:00
|
|
|
|
async def httpx_request():
|
2024-06-18 22:25:02 +02:00
|
|
|
|
async with NonchalantHttpxRequest() as rq:
|
2022-04-24 12:38:09 +02:00
|
|
|
|
yield rq
|
2020-02-02 23:12:27 +01:00
|
|
|
|
|
|
|
|
|
|
2022-10-31 10:12:18 +01:00
|
|
|
|
@pytest.mark.skipif(
|
|
|
|
|
TEST_WITH_OPT_DEPS, reason="Only relevant if the optional dependency is not installed"
|
|
|
|
|
)
|
2023-03-12 16:30:39 +01:00
|
|
|
|
class TestNoSocksHTTP2WithoutRequest:
|
2024-09-13 19:10:09 +02:00
|
|
|
|
async def test_init(self, offline_bot):
|
2022-10-31 10:12:18 +01:00
|
|
|
|
with pytest.raises(RuntimeError, match=r"python-telegram-bot\[socks\]"):
|
2023-10-23 21:09:28 +02:00
|
|
|
|
HTTPXRequest(proxy="socks5://foo")
|
2023-03-12 16:30:39 +01:00
|
|
|
|
with pytest.raises(RuntimeError, match=r"python-telegram-bot\[http2\]"):
|
|
|
|
|
HTTPXRequest(http_version="2")
|
2022-10-31 10:12:18 +01:00
|
|
|
|
|
|
|
|
|
|
2023-03-12 16:30:39 +01:00
|
|
|
|
@pytest.mark.skipif(not TEST_WITH_OPT_DEPS, reason="Optional dependencies not installed")
|
|
|
|
|
class TestHTTP2WithRequest:
|
2023-08-01 16:19:33 +02:00
|
|
|
|
@pytest.mark.parametrize("http_version", ["2", "2.0"])
|
|
|
|
|
async def test_http_2_response(self, http_version):
|
|
|
|
|
httpx_request = HTTPXRequest(http_version=http_version)
|
2023-03-12 16:30:39 +01:00
|
|
|
|
async with httpx_request:
|
|
|
|
|
resp = await httpx_request._client.request(
|
|
|
|
|
url="https://python-telegram-bot.org",
|
|
|
|
|
method="GET",
|
|
|
|
|
headers={"User-Agent": httpx_request.USER_AGENT},
|
|
|
|
|
)
|
|
|
|
|
assert resp.http_version == "HTTP/2"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# I picked not TEST_XXX because that's the default, meaning it will run by default for an end-user
|
|
|
|
|
# who runs pytest.
|
|
|
|
|
@pytest.mark.skipif(not TEST_WITH_OPT_DEPS, reason="No need to run this twice")
|
2023-02-11 10:45:17 +01:00
|
|
|
|
class TestRequestWithoutRequest:
|
2022-04-24 12:38:09 +02:00
|
|
|
|
test_flag = None
|
2020-02-02 23:12:27 +01:00
|
|
|
|
|
2022-04-24 12:38:09 +02:00
|
|
|
|
@pytest.fixture(autouse=True)
|
2023-03-25 19:18:04 +01:00
|
|
|
|
def _reset(self):
|
2022-04-24 12:38:09 +02:00
|
|
|
|
self.test_flag = None
|
2021-10-05 19:50:11 +02:00
|
|
|
|
|
2023-02-11 10:45:17 +01:00
|
|
|
|
async def test_init_import_errors(self, monkeypatch):
|
2022-10-31 10:12:18 +01:00
|
|
|
|
"""Makes sure that import errors are forwarded - related to TestNoSocks above"""
|
|
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
|
raise ImportError("Other Error Message")
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx.AsyncClient, "__init__", __init__)
|
|
|
|
|
|
|
|
|
|
# Make sure that other exceptions are forwarded
|
|
|
|
|
with pytest.raises(ImportError, match=r"Other Error Message"):
|
2023-10-23 21:09:28 +02:00
|
|
|
|
HTTPXRequest(proxy="socks5://foo")
|
2022-10-31 10:12:18 +01:00
|
|
|
|
|
2023-02-22 20:19:46 +01:00
|
|
|
|
def test_slot_behaviour(self):
|
2022-04-24 12:38:09 +02:00
|
|
|
|
inst = HTTPXRequest()
|
|
|
|
|
for attr in inst.__slots__:
|
2023-03-25 19:18:04 +01:00
|
|
|
|
at = f"_{inst.__class__.__name__}{attr}" if attr.startswith("__") else attr
|
|
|
|
|
assert getattr(inst, at, "err") != "err", f"got extra slot '{at}'"
|
2022-04-24 12:38:09 +02:00
|
|
|
|
assert len(mro_slots(inst)) == len(set(mro_slots(inst))), "duplicate slot"
|
2021-10-05 19:50:11 +02:00
|
|
|
|
|
2024-09-11 22:34:18 +02:00
|
|
|
|
def test_httpx_kwargs(self, monkeypatch):
|
|
|
|
|
self.test_flag = {}
|
|
|
|
|
|
|
|
|
|
orig_init = httpx.AsyncClient.__init__
|
|
|
|
|
|
|
|
|
|
class Client(httpx.AsyncClient):
|
|
|
|
|
def __init__(*args, **kwargs):
|
|
|
|
|
orig_init(*args, **kwargs)
|
|
|
|
|
self.test_flag["args"] = args
|
|
|
|
|
self.test_flag["kwargs"] = kwargs
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx, "AsyncClient", Client)
|
|
|
|
|
|
|
|
|
|
HTTPXRequest(
|
|
|
|
|
connect_timeout=1,
|
|
|
|
|
connection_pool_size=42,
|
|
|
|
|
http_version="2",
|
|
|
|
|
httpx_kwargs={
|
|
|
|
|
"timeout": httpx.Timeout(7),
|
|
|
|
|
"limits": httpx.Limits(max_connections=7),
|
|
|
|
|
"http1": True,
|
|
|
|
|
"verify": False,
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
kwargs = self.test_flag["kwargs"]
|
|
|
|
|
|
|
|
|
|
assert kwargs["timeout"].connect == 7
|
|
|
|
|
assert kwargs["limits"].max_connections == 7
|
|
|
|
|
assert kwargs["http1"] is True
|
|
|
|
|
assert kwargs["verify"] is False
|
|
|
|
|
|
2022-04-24 12:38:09 +02:00
|
|
|
|
async def test_context_manager(self, monkeypatch):
|
|
|
|
|
async def initialize():
|
|
|
|
|
self.test_flag = ["initialize"]
|
|
|
|
|
|
|
|
|
|
async def shutdown():
|
|
|
|
|
self.test_flag.append("stop")
|
|
|
|
|
|
2024-06-18 22:25:02 +02:00
|
|
|
|
httpx_request = NonchalantHttpxRequest()
|
2022-04-24 12:38:09 +02:00
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx_request, "initialize", initialize)
|
|
|
|
|
monkeypatch.setattr(httpx_request, "shutdown", shutdown)
|
|
|
|
|
|
|
|
|
|
async with httpx_request:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
assert self.test_flag == ["initialize", "stop"]
|
|
|
|
|
|
|
|
|
|
async def test_context_manager_exception_on_init(self, monkeypatch):
|
|
|
|
|
async def initialize():
|
|
|
|
|
raise RuntimeError("initialize")
|
|
|
|
|
|
|
|
|
|
async def shutdown():
|
|
|
|
|
self.test_flag = "stop"
|
|
|
|
|
|
2024-06-18 22:25:02 +02:00
|
|
|
|
httpx_request = NonchalantHttpxRequest()
|
2022-04-24 12:38:09 +02:00
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx_request, "initialize", initialize)
|
|
|
|
|
monkeypatch.setattr(httpx_request, "shutdown", shutdown)
|
|
|
|
|
|
|
|
|
|
with pytest.raises(RuntimeError, match="initialize"):
|
|
|
|
|
async with httpx_request:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
assert self.test_flag == "stop"
|
|
|
|
|
|
|
|
|
|
async def test_replaced_unprintable_char(self, monkeypatch, httpx_request):
|
|
|
|
|
"""Clients can send arbitrary bytes in callback data. Make sure that we just replace
|
|
|
|
|
those
|
|
|
|
|
"""
|
|
|
|
|
server_response = b'{"result": "test_string\x80"}'
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx_request, "do_request", mocker_factory(response=server_response))
|
|
|
|
|
|
|
|
|
|
assert await httpx_request.post(None, None, None) == "test_string<EFBFBD>"
|
2022-06-01 21:26:18 +02:00
|
|
|
|
# Explicitly call `parse_json_payload` here is well so that this public method is covered
|
|
|
|
|
# not only implicitly.
|
|
|
|
|
assert httpx_request.parse_json_payload(server_response) == {"result": "test_string<EFBFBD>"}
|
2022-04-24 12:38:09 +02:00
|
|
|
|
|
2023-04-18 16:17:20 +02:00
|
|
|
|
async def test_illegal_json_response(self, monkeypatch, httpx_request: HTTPXRequest, caplog):
|
2022-04-24 12:38:09 +02:00
|
|
|
|
# for proper JSON it should be `"result":` instead of `result:`
|
|
|
|
|
server_response = b'{result: "test_string"}'
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx_request, "do_request", mocker_factory(response=server_response))
|
|
|
|
|
|
2024-10-24 20:48:49 +02:00
|
|
|
|
with (
|
|
|
|
|
pytest.raises(TelegramError, match="Invalid server response"),
|
|
|
|
|
caplog.at_level(logging.ERROR),
|
2023-04-18 16:17:20 +02:00
|
|
|
|
):
|
2022-04-24 12:38:09 +02:00
|
|
|
|
await httpx_request.post(None, None, None)
|
|
|
|
|
|
2023-04-18 16:17:20 +02:00
|
|
|
|
assert len(caplog.records) == 1
|
|
|
|
|
record = caplog.records[0]
|
|
|
|
|
assert record.name == "telegram.request.BaseRequest"
|
|
|
|
|
assert record.getMessage().endswith(f'invalid JSON data: "{server_response.decode()}"')
|
|
|
|
|
|
2022-04-24 12:38:09 +02:00
|
|
|
|
async def test_chat_migrated(self, monkeypatch, httpx_request: HTTPXRequest):
|
2022-05-26 19:16:30 +02:00
|
|
|
|
server_response = b'{"ok": "False", "parameters": {"migrate_to_chat_id": 123}}'
|
2022-04-24 12:38:09 +02:00
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(
|
|
|
|
|
httpx_request,
|
|
|
|
|
"do_request",
|
|
|
|
|
mocker_factory(response=server_response, return_code=HTTPStatus.BAD_REQUEST),
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
with pytest.raises(ChatMigrated, match="New chat id: 123") as exc_info:
|
|
|
|
|
await httpx_request.post(None, None, None)
|
|
|
|
|
|
|
|
|
|
assert exc_info.value.new_chat_id == 123
|
|
|
|
|
|
|
|
|
|
async def test_retry_after(self, monkeypatch, httpx_request: HTTPXRequest):
|
2022-05-26 19:16:30 +02:00
|
|
|
|
server_response = b'{"ok": "False", "parameters": {"retry_after": 42}}'
|
2022-04-24 12:38:09 +02:00
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(
|
|
|
|
|
httpx_request,
|
|
|
|
|
"do_request",
|
|
|
|
|
mocker_factory(response=server_response, return_code=HTTPStatus.BAD_REQUEST),
|
|
|
|
|
)
|
|
|
|
|
|
2022-05-26 19:16:30 +02:00
|
|
|
|
with pytest.raises(RetryAfter, match="Retry in 42") as exc_info:
|
2022-04-24 12:38:09 +02:00
|
|
|
|
await httpx_request.post(None, None, None)
|
|
|
|
|
|
2022-05-26 19:16:30 +02:00
|
|
|
|
assert exc_info.value.retry_after == 42
|
2022-04-24 12:38:09 +02:00
|
|
|
|
|
|
|
|
|
async def test_unknown_request_params(self, monkeypatch, httpx_request: HTTPXRequest):
|
|
|
|
|
server_response = b'{"ok": "False", "parameters": {"unknown": "42"}}'
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(
|
|
|
|
|
httpx_request,
|
|
|
|
|
"do_request",
|
|
|
|
|
mocker_factory(response=server_response, return_code=HTTPStatus.BAD_REQUEST),
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
with pytest.raises(
|
|
|
|
|
BadRequest,
|
|
|
|
|
match="{'unknown': '42'}",
|
|
|
|
|
):
|
|
|
|
|
await httpx_request.post(None, None, None)
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize("description", [True, False])
|
|
|
|
|
async def test_error_description(self, monkeypatch, httpx_request: HTTPXRequest, description):
|
|
|
|
|
response_data = {"ok": "False"}
|
|
|
|
|
if description:
|
|
|
|
|
match = "ErrorDescription"
|
|
|
|
|
response_data["description"] = match
|
|
|
|
|
else:
|
|
|
|
|
match = "Unknown HTTPError"
|
|
|
|
|
|
2024-07-21 21:13:30 +02:00
|
|
|
|
server_response = json.dumps(response_data).encode(TextEncoding.UTF_8)
|
2022-04-24 12:38:09 +02:00
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(
|
|
|
|
|
httpx_request,
|
|
|
|
|
"do_request",
|
|
|
|
|
mocker_factory(response=server_response, return_code=-1),
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
with pytest.raises(NetworkError, match=match):
|
|
|
|
|
await httpx_request.post(None, None, None)
|
|
|
|
|
|
|
|
|
|
# Special casing for bad gateway
|
|
|
|
|
if not description:
|
|
|
|
|
monkeypatch.setattr(
|
|
|
|
|
httpx_request,
|
|
|
|
|
"do_request",
|
|
|
|
|
mocker_factory(response=server_response, return_code=HTTPStatus.BAD_GATEWAY),
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
with pytest.raises(NetworkError, match="Bad Gateway"):
|
|
|
|
|
await httpx_request.post(None, None, None)
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
2023-03-25 19:18:04 +01:00
|
|
|
|
("code", "exception_class"),
|
2022-04-24 12:38:09 +02:00
|
|
|
|
[
|
|
|
|
|
(HTTPStatus.FORBIDDEN, Forbidden),
|
|
|
|
|
(HTTPStatus.NOT_FOUND, InvalidToken),
|
|
|
|
|
(HTTPStatus.UNAUTHORIZED, InvalidToken),
|
|
|
|
|
(HTTPStatus.BAD_REQUEST, BadRequest),
|
|
|
|
|
(HTTPStatus.CONFLICT, Conflict),
|
|
|
|
|
(HTTPStatus.BAD_GATEWAY, NetworkError),
|
|
|
|
|
(-1, NetworkError),
|
|
|
|
|
],
|
|
|
|
|
)
|
|
|
|
|
async def test_special_errors(
|
|
|
|
|
self, monkeypatch, httpx_request: HTTPXRequest, code, exception_class
|
|
|
|
|
):
|
|
|
|
|
server_response = b'{"ok": "False", "description": "Test Message"}'
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(
|
|
|
|
|
httpx_request,
|
|
|
|
|
"do_request",
|
|
|
|
|
mocker_factory(response=server_response, return_code=code),
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
with pytest.raises(exception_class, match="Test Message"):
|
2022-08-03 08:16:48 +02:00
|
|
|
|
await httpx_request.post("", None, None)
|
2022-04-24 12:38:09 +02:00
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
2023-03-25 19:18:04 +01:00
|
|
|
|
("exception", "catch_class", "match"),
|
2022-04-24 12:38:09 +02:00
|
|
|
|
[
|
|
|
|
|
(TelegramError("TelegramError"), TelegramError, "TelegramError"),
|
|
|
|
|
(
|
|
|
|
|
RuntimeError("CustomError"),
|
2023-07-18 12:02:13 +02:00
|
|
|
|
NetworkError,
|
2022-04-24 12:38:09 +02:00
|
|
|
|
r"HTTP implementation: RuntimeError\('CustomError'\)",
|
|
|
|
|
),
|
|
|
|
|
],
|
|
|
|
|
)
|
|
|
|
|
async def test_exceptions_in_do_request(
|
|
|
|
|
self, monkeypatch, httpx_request: HTTPXRequest, exception, catch_class, match
|
|
|
|
|
):
|
|
|
|
|
async def do_request(*args, **kwargs):
|
|
|
|
|
raise exception
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(
|
|
|
|
|
httpx_request,
|
|
|
|
|
"do_request",
|
|
|
|
|
do_request,
|
|
|
|
|
)
|
|
|
|
|
|
2023-07-18 12:02:13 +02:00
|
|
|
|
with pytest.raises(catch_class, match=match) as exc_info:
|
2022-04-24 12:38:09 +02:00
|
|
|
|
await httpx_request.post(None, None, None)
|
|
|
|
|
|
2023-07-18 12:02:13 +02:00
|
|
|
|
if catch_class is NetworkError:
|
|
|
|
|
assert exc_info.value.__cause__ is exception
|
|
|
|
|
|
2022-04-24 12:38:09 +02:00
|
|
|
|
async def test_retrieve(self, monkeypatch, httpx_request):
|
|
|
|
|
"""Here we just test that retrieve gives us the raw bytes instead of trying to parse them
|
|
|
|
|
as json
|
|
|
|
|
"""
|
|
|
|
|
server_response = b'{"result": "test_string\x80"}'
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx_request, "do_request", mocker_factory(response=server_response))
|
|
|
|
|
|
|
|
|
|
assert await httpx_request.retrieve(None, None) == server_response
|
|
|
|
|
|
2023-11-26 16:44:18 +01:00
|
|
|
|
async def test_timeout_propagation_to_do_request(self, monkeypatch, httpx_request):
|
2022-04-24 12:38:09 +02:00
|
|
|
|
async def make_assertion(*args, **kwargs):
|
|
|
|
|
self.test_flag = (
|
|
|
|
|
kwargs.get("read_timeout"),
|
|
|
|
|
kwargs.get("connect_timeout"),
|
|
|
|
|
kwargs.get("write_timeout"),
|
|
|
|
|
kwargs.get("pool_timeout"),
|
|
|
|
|
)
|
|
|
|
|
return HTTPStatus.OK, b'{"ok": "True", "result": {}}'
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx_request, "do_request", make_assertion)
|
|
|
|
|
|
2023-11-26 16:44:18 +01:00
|
|
|
|
await httpx_request.post("url", None)
|
2022-04-24 12:38:09 +02:00
|
|
|
|
assert self.test_flag == (DEFAULT_NONE, DEFAULT_NONE, DEFAULT_NONE, DEFAULT_NONE)
|
|
|
|
|
|
|
|
|
|
await httpx_request.post(
|
|
|
|
|
"url", None, read_timeout=1, connect_timeout=2, write_timeout=3, pool_timeout=4
|
|
|
|
|
)
|
|
|
|
|
assert self.test_flag == (1, 2, 3, 4)
|
|
|
|
|
|
2023-11-27 18:24:21 +01:00
|
|
|
|
def test_read_timeout_not_implemented(self):
|
|
|
|
|
class SimpleRequest(BaseRequest):
|
|
|
|
|
async def do_request(self, *args, **kwargs):
|
|
|
|
|
raise httpx.ReadTimeout("read timeout")
|
|
|
|
|
|
|
|
|
|
async def initialize(self) -> None:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
async def shutdown(self) -> None:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
with pytest.raises(NotImplementedError):
|
|
|
|
|
SimpleRequest().read_timeout
|
|
|
|
|
|
2023-11-26 16:44:18 +01:00
|
|
|
|
@pytest.mark.parametrize("media", [True, False])
|
|
|
|
|
async def test_timeout_propagation_write_timeout(
|
|
|
|
|
self, monkeypatch, media, input_media_photo, recwarn # noqa: F811
|
|
|
|
|
):
|
|
|
|
|
class CustomRequest(BaseRequest):
|
|
|
|
|
async def initialize(self_) -> None:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
async def shutdown(self_) -> None:
|
|
|
|
|
pass
|
|
|
|
|
|
2024-10-24 20:48:49 +02:00
|
|
|
|
async def do_request(self_, *args, **kwargs) -> tuple[int, bytes]:
|
2023-11-26 16:44:18 +01:00
|
|
|
|
self.test_flag = (
|
|
|
|
|
kwargs.get("read_timeout"),
|
|
|
|
|
kwargs.get("connect_timeout"),
|
|
|
|
|
kwargs.get("write_timeout"),
|
|
|
|
|
kwargs.get("pool_timeout"),
|
|
|
|
|
)
|
|
|
|
|
return HTTPStatus.OK, b'{"ok": "True", "result": {}}'
|
|
|
|
|
|
|
|
|
|
custom_request = CustomRequest()
|
|
|
|
|
data = {"string": "string", "int": 1, "float": 1.0}
|
|
|
|
|
if media:
|
|
|
|
|
data["media"] = input_media_photo
|
|
|
|
|
request_data = RequestData(
|
|
|
|
|
parameters=[RequestParameter.from_input(key, value) for key, value in data.items()],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# First make sure that custom timeouts are always respected
|
|
|
|
|
await custom_request.post(
|
|
|
|
|
"url", request_data, read_timeout=1, connect_timeout=2, write_timeout=3, pool_timeout=4
|
|
|
|
|
)
|
|
|
|
|
assert self.test_flag == (1, 2, 3, 4)
|
|
|
|
|
|
|
|
|
|
# Now also ensure that the default timeout for media requests is 20 seconds
|
|
|
|
|
await custom_request.post("url", request_data)
|
|
|
|
|
assert self.test_flag == (
|
|
|
|
|
DEFAULT_NONE,
|
|
|
|
|
DEFAULT_NONE,
|
|
|
|
|
20 if media else DEFAULT_NONE,
|
|
|
|
|
DEFAULT_NONE,
|
|
|
|
|
)
|
|
|
|
|
|
2023-12-29 22:32:07 +01:00
|
|
|
|
print("warnings")
|
|
|
|
|
for entry in recwarn:
|
|
|
|
|
print(entry.message)
|
2023-11-26 16:44:18 +01:00
|
|
|
|
if media:
|
|
|
|
|
assert len(recwarn) == 1
|
|
|
|
|
assert "will default to `BaseRequest.DEFAULT_NONE` instead of 20" in str(
|
|
|
|
|
recwarn[0].message
|
|
|
|
|
)
|
|
|
|
|
assert recwarn[0].category is PTBDeprecationWarning
|
|
|
|
|
assert recwarn[0].filename == __file__
|
|
|
|
|
else:
|
|
|
|
|
assert len(recwarn) == 0
|
|
|
|
|
|
2022-04-24 12:38:09 +02:00
|
|
|
|
|
2023-03-12 16:30:39 +01:00
|
|
|
|
@pytest.mark.skipif(not TEST_WITH_OPT_DEPS, reason="No need to run this twice")
|
2023-02-11 10:45:17 +01:00
|
|
|
|
class TestHTTPXRequestWithoutRequest:
|
2022-04-24 12:38:09 +02:00
|
|
|
|
test_flag = None
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(autouse=True)
|
2023-03-25 19:18:04 +01:00
|
|
|
|
def _reset(self):
|
2022-04-24 12:38:09 +02:00
|
|
|
|
self.test_flag = None
|
|
|
|
|
|
2023-10-23 21:09:28 +02:00
|
|
|
|
# We parametrize this to make sure that the legacy `proxy_url` argument is still supported
|
|
|
|
|
@pytest.mark.parametrize("proxy_argument", ["proxy", "proxy_url"])
|
|
|
|
|
def test_init(self, monkeypatch, proxy_argument):
|
2022-04-24 12:38:09 +02:00
|
|
|
|
@dataclass
|
|
|
|
|
class Client:
|
|
|
|
|
timeout: object
|
2023-12-29 22:32:07 +01:00
|
|
|
|
proxy: object
|
2022-04-24 12:38:09 +02:00
|
|
|
|
limits: object
|
2023-01-20 21:15:02 +01:00
|
|
|
|
http1: object
|
|
|
|
|
http2: object
|
2023-10-22 12:42:22 +02:00
|
|
|
|
transport: object = None
|
2022-04-24 12:38:09 +02:00
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx, "AsyncClient", Client)
|
|
|
|
|
|
|
|
|
|
request = HTTPXRequest()
|
|
|
|
|
assert request._client.timeout == httpx.Timeout(connect=5.0, read=5.0, write=5.0, pool=1.0)
|
2023-12-29 22:32:07 +01:00
|
|
|
|
assert request._client.proxy is None
|
2022-04-24 12:38:09 +02:00
|
|
|
|
assert request._client.limits == httpx.Limits(
|
|
|
|
|
max_connections=1, max_keepalive_connections=1
|
|
|
|
|
)
|
2023-03-12 16:30:39 +01:00
|
|
|
|
assert request._client.http1 is True
|
|
|
|
|
assert not request._client.http2
|
2022-04-24 12:38:09 +02:00
|
|
|
|
|
2023-10-23 21:09:28 +02:00
|
|
|
|
kwargs = {
|
|
|
|
|
"connection_pool_size": 42,
|
|
|
|
|
proxy_argument: "proxy",
|
|
|
|
|
"connect_timeout": 43,
|
|
|
|
|
"read_timeout": 44,
|
|
|
|
|
"write_timeout": 45,
|
|
|
|
|
"pool_timeout": 46,
|
|
|
|
|
}
|
|
|
|
|
request = HTTPXRequest(**kwargs)
|
2023-12-29 22:32:07 +01:00
|
|
|
|
assert request._client.proxy == "proxy"
|
2022-04-24 12:38:09 +02:00
|
|
|
|
assert request._client.limits == httpx.Limits(
|
|
|
|
|
max_connections=42, max_keepalive_connections=42
|
|
|
|
|
)
|
|
|
|
|
assert request._client.timeout == httpx.Timeout(connect=43, read=44, write=45, pool=46)
|
|
|
|
|
|
2023-10-23 21:09:28 +02:00
|
|
|
|
def test_proxy_mutually_exclusive(self):
|
|
|
|
|
with pytest.raises(ValueError, match="mutually exclusive"):
|
|
|
|
|
HTTPXRequest(proxy="proxy", proxy_url="proxy_url")
|
|
|
|
|
|
|
|
|
|
def test_proxy_url_deprecation_warning(self, recwarn):
|
|
|
|
|
HTTPXRequest(proxy_url="http://127.0.0.1:3128")
|
|
|
|
|
assert len(recwarn) == 1
|
|
|
|
|
assert recwarn[0].category is PTBDeprecationWarning
|
|
|
|
|
assert "`proxy_url` is deprecated" in str(recwarn[0].message)
|
|
|
|
|
assert recwarn[0].filename == __file__, "incorrect stacklevel"
|
|
|
|
|
|
2022-04-24 12:38:09 +02:00
|
|
|
|
async def test_multiple_inits_and_shutdowns(self, monkeypatch):
|
|
|
|
|
self.test_flag = defaultdict(int)
|
|
|
|
|
|
|
|
|
|
orig_init = httpx.AsyncClient.__init__
|
|
|
|
|
orig_aclose = httpx.AsyncClient.aclose
|
|
|
|
|
|
|
|
|
|
class Client(httpx.AsyncClient):
|
|
|
|
|
def __init__(*args, **kwargs):
|
|
|
|
|
orig_init(*args, **kwargs)
|
|
|
|
|
self.test_flag["init"] += 1
|
|
|
|
|
|
|
|
|
|
async def aclose(*args, **kwargs):
|
|
|
|
|
await orig_aclose(*args, **kwargs)
|
|
|
|
|
self.test_flag["shutdown"] += 1
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx, "AsyncClient", Client)
|
|
|
|
|
|
|
|
|
|
# Create a new one instead of using the fixture so that the mocking can work
|
|
|
|
|
httpx_request = HTTPXRequest()
|
|
|
|
|
|
|
|
|
|
await httpx_request.initialize()
|
|
|
|
|
await httpx_request.initialize()
|
|
|
|
|
await httpx_request.initialize()
|
|
|
|
|
await httpx_request.shutdown()
|
|
|
|
|
await httpx_request.shutdown()
|
|
|
|
|
await httpx_request.shutdown()
|
|
|
|
|
|
|
|
|
|
assert self.test_flag["init"] == 1
|
|
|
|
|
assert self.test_flag["shutdown"] == 1
|
|
|
|
|
|
2023-01-20 21:15:02 +01:00
|
|
|
|
async def test_http_version_error(self):
|
|
|
|
|
with pytest.raises(ValueError, match="`http_version` must be either"):
|
|
|
|
|
HTTPXRequest(http_version="1.0")
|
|
|
|
|
|
2022-04-24 12:38:09 +02:00
|
|
|
|
async def test_do_request_after_shutdown(self, httpx_request):
|
|
|
|
|
await httpx_request.shutdown()
|
|
|
|
|
with pytest.raises(RuntimeError, match="not initialized"):
|
|
|
|
|
await httpx_request.do_request(url="url", method="GET")
|
|
|
|
|
|
|
|
|
|
async def test_context_manager(self, monkeypatch):
|
|
|
|
|
async def initialize():
|
|
|
|
|
self.test_flag = ["initialize"]
|
|
|
|
|
|
|
|
|
|
async def aclose(*args):
|
|
|
|
|
self.test_flag.append("stop")
|
|
|
|
|
|
2024-06-18 22:25:02 +02:00
|
|
|
|
httpx_request = NonchalantHttpxRequest()
|
2022-04-24 12:38:09 +02:00
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx_request, "initialize", initialize)
|
|
|
|
|
monkeypatch.setattr(httpx.AsyncClient, "aclose", aclose)
|
|
|
|
|
|
|
|
|
|
async with httpx_request:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
assert self.test_flag == ["initialize", "stop"]
|
|
|
|
|
|
|
|
|
|
async def test_context_manager_exception_on_init(self, monkeypatch):
|
|
|
|
|
async def initialize():
|
|
|
|
|
raise RuntimeError("initialize")
|
|
|
|
|
|
|
|
|
|
async def aclose(*args):
|
|
|
|
|
self.test_flag = "stop"
|
|
|
|
|
|
2024-06-18 22:25:02 +02:00
|
|
|
|
httpx_request = NonchalantHttpxRequest()
|
2022-04-24 12:38:09 +02:00
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx_request, "initialize", initialize)
|
|
|
|
|
monkeypatch.setattr(httpx.AsyncClient, "aclose", aclose)
|
|
|
|
|
|
|
|
|
|
with pytest.raises(RuntimeError, match="initialize"):
|
|
|
|
|
async with httpx_request:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
assert self.test_flag == "stop"
|
|
|
|
|
|
|
|
|
|
async def test_do_request_default_timeouts(self, monkeypatch):
|
|
|
|
|
default_timeouts = httpx.Timeout(connect=42, read=43, write=44, pool=45)
|
|
|
|
|
|
|
|
|
|
async def make_assertion(_, **kwargs):
|
|
|
|
|
self.test_flag = kwargs.get("timeout") == default_timeouts
|
|
|
|
|
return httpx.Response(HTTPStatus.OK)
|
|
|
|
|
|
|
|
|
|
async with HTTPXRequest(
|
|
|
|
|
connect_timeout=default_timeouts.connect,
|
|
|
|
|
read_timeout=default_timeouts.read,
|
|
|
|
|
write_timeout=default_timeouts.write,
|
|
|
|
|
pool_timeout=default_timeouts.pool,
|
|
|
|
|
) as httpx_request:
|
|
|
|
|
monkeypatch.setattr(httpx.AsyncClient, "request", make_assertion)
|
|
|
|
|
await httpx_request.do_request(method="GET", url="URL")
|
|
|
|
|
|
|
|
|
|
assert self.test_flag
|
|
|
|
|
|
|
|
|
|
async def test_do_request_manual_timeouts(self, monkeypatch, httpx_request):
|
|
|
|
|
default_timeouts = httpx.Timeout(connect=42, read=43, write=44, pool=45)
|
|
|
|
|
manual_timeouts = httpx.Timeout(connect=52, read=53, write=54, pool=55)
|
|
|
|
|
|
|
|
|
|
async def make_assertion(_, **kwargs):
|
|
|
|
|
self.test_flag = kwargs.get("timeout") == manual_timeouts
|
|
|
|
|
return httpx.Response(HTTPStatus.OK)
|
|
|
|
|
|
|
|
|
|
async with HTTPXRequest(
|
|
|
|
|
connect_timeout=default_timeouts.connect,
|
|
|
|
|
read_timeout=default_timeouts.read,
|
|
|
|
|
write_timeout=default_timeouts.write,
|
|
|
|
|
pool_timeout=default_timeouts.pool,
|
2024-06-30 18:22:12 +02:00
|
|
|
|
) as httpx_request_ctx:
|
2022-04-24 12:38:09 +02:00
|
|
|
|
monkeypatch.setattr(httpx.AsyncClient, "request", make_assertion)
|
2024-06-30 18:22:12 +02:00
|
|
|
|
await httpx_request_ctx.do_request(
|
2022-04-24 12:38:09 +02:00
|
|
|
|
method="GET",
|
|
|
|
|
url="URL",
|
|
|
|
|
connect_timeout=manual_timeouts.connect,
|
|
|
|
|
read_timeout=manual_timeouts.read,
|
|
|
|
|
write_timeout=manual_timeouts.write,
|
|
|
|
|
pool_timeout=manual_timeouts.pool,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
assert self.test_flag
|
|
|
|
|
|
|
|
|
|
async def test_do_request_params_no_data(self, monkeypatch, httpx_request):
|
|
|
|
|
async def make_assertion(self, **kwargs):
|
|
|
|
|
method_assertion = kwargs.get("method") == "method"
|
|
|
|
|
url_assertion = kwargs.get("url") == "url"
|
|
|
|
|
files_assertion = kwargs.get("files") is None
|
|
|
|
|
data_assertion = kwargs.get("data") is None
|
|
|
|
|
if method_assertion and url_assertion and files_assertion and data_assertion:
|
|
|
|
|
return httpx.Response(HTTPStatus.OK)
|
|
|
|
|
return httpx.Response(HTTPStatus.BAD_REQUEST)
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx.AsyncClient, "request", make_assertion)
|
|
|
|
|
code, _ = await httpx_request.do_request(method="method", url="url")
|
|
|
|
|
assert code == HTTPStatus.OK
|
|
|
|
|
|
|
|
|
|
async def test_do_request_params_with_data(
|
2023-08-02 11:51:17 +02:00
|
|
|
|
self, monkeypatch, httpx_request, mixed_rqs # noqa: F811
|
2022-04-24 12:38:09 +02:00
|
|
|
|
):
|
|
|
|
|
async def make_assertion(self, **kwargs):
|
|
|
|
|
method_assertion = kwargs.get("method") == "method"
|
|
|
|
|
url_assertion = kwargs.get("url") == "url"
|
|
|
|
|
files_assertion = kwargs.get("files") == mixed_rqs.multipart_data
|
|
|
|
|
data_assertion = kwargs.get("data") == mixed_rqs.json_parameters
|
|
|
|
|
if method_assertion and url_assertion and files_assertion and data_assertion:
|
|
|
|
|
return httpx.Response(HTTPStatus.OK)
|
|
|
|
|
return httpx.Response(HTTPStatus.BAD_REQUEST)
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx.AsyncClient, "request", make_assertion)
|
|
|
|
|
code, _ = await httpx_request.do_request(
|
|
|
|
|
method="method",
|
|
|
|
|
url="url",
|
|
|
|
|
request_data=mixed_rqs,
|
|
|
|
|
)
|
|
|
|
|
assert code == HTTPStatus.OK
|
|
|
|
|
|
|
|
|
|
async def test_do_request_return_value(self, monkeypatch, httpx_request):
|
|
|
|
|
async def make_assertion(self, method, url, headers, timeout, files, data):
|
|
|
|
|
return httpx.Response(123, content=b"content")
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx.AsyncClient, "request", make_assertion)
|
|
|
|
|
code, content = await httpx_request.do_request(
|
|
|
|
|
"method",
|
|
|
|
|
"url",
|
|
|
|
|
)
|
|
|
|
|
assert code == 123
|
|
|
|
|
assert content == b"content"
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
2023-07-18 12:02:13 +02:00
|
|
|
|
("raised_exception", "expected_class", "expected_message"),
|
2023-01-15 13:40:20 +01:00
|
|
|
|
[
|
2023-07-18 12:02:13 +02:00
|
|
|
|
(httpx.TimeoutException("timeout"), TimedOut, "Timed out"),
|
|
|
|
|
(httpx.ReadError("read_error"), NetworkError, "httpx.ReadError: read_error"),
|
2023-01-15 13:40:20 +01:00
|
|
|
|
],
|
2022-04-24 12:38:09 +02:00
|
|
|
|
)
|
|
|
|
|
async def test_do_request_exceptions(
|
2023-07-18 12:02:13 +02:00
|
|
|
|
self, monkeypatch, httpx_request, raised_exception, expected_class, expected_message
|
2022-04-24 12:38:09 +02:00
|
|
|
|
):
|
|
|
|
|
async def make_assertion(self, method, url, headers, timeout, files, data):
|
2023-07-18 12:02:13 +02:00
|
|
|
|
raise raised_exception
|
2022-04-24 12:38:09 +02:00
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx.AsyncClient, "request", make_assertion)
|
|
|
|
|
|
2023-07-18 12:02:13 +02:00
|
|
|
|
with pytest.raises(expected_class, match=expected_message) as exc_info:
|
2022-04-24 12:38:09 +02:00
|
|
|
|
await httpx_request.do_request(
|
|
|
|
|
"method",
|
|
|
|
|
"url",
|
|
|
|
|
)
|
|
|
|
|
|
2023-07-18 12:02:13 +02:00
|
|
|
|
assert exc_info.value.__cause__ is raised_exception
|
|
|
|
|
|
2022-04-24 12:38:09 +02:00
|
|
|
|
async def test_do_request_pool_timeout(self, monkeypatch):
|
2023-07-18 12:02:13 +02:00
|
|
|
|
pool_timeout = httpx.PoolTimeout("pool timeout")
|
|
|
|
|
|
2022-04-24 12:38:09 +02:00
|
|
|
|
async def request(_, **kwargs):
|
|
|
|
|
if self.test_flag is None:
|
|
|
|
|
self.test_flag = True
|
|
|
|
|
else:
|
2023-07-18 12:02:13 +02:00
|
|
|
|
raise pool_timeout
|
2022-04-24 12:38:09 +02:00
|
|
|
|
return httpx.Response(HTTPStatus.OK)
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx.AsyncClient, "request", request)
|
|
|
|
|
|
2023-03-25 19:18:04 +01:00
|
|
|
|
async with HTTPXRequest(pool_timeout=0.02) as httpx_request:
|
2023-07-18 12:02:13 +02:00
|
|
|
|
with pytest.raises(TimedOut, match="Pool timeout") as exc_info:
|
2022-04-24 12:38:09 +02:00
|
|
|
|
await asyncio.gather(
|
|
|
|
|
httpx_request.do_request(method="GET", url="URL"),
|
|
|
|
|
httpx_request.do_request(method="GET", url="URL"),
|
|
|
|
|
)
|
|
|
|
|
|
2023-07-18 12:02:13 +02:00
|
|
|
|
assert exc_info.value.__cause__ is pool_timeout
|
|
|
|
|
|
2023-11-26 16:44:18 +01:00
|
|
|
|
@pytest.mark.parametrize("media", [True, False])
|
|
|
|
|
async def test_do_request_write_timeout(
|
|
|
|
|
self, monkeypatch, media, httpx_request, input_media_photo, recwarn # noqa: F811
|
|
|
|
|
):
|
|
|
|
|
async def request(_, **kwargs):
|
|
|
|
|
self.test_flag = kwargs.get("timeout")
|
|
|
|
|
return httpx.Response(HTTPStatus.OK, content=b'{"ok": "True", "result": {}}')
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx.AsyncClient, "request", request)
|
|
|
|
|
|
|
|
|
|
data = {"string": "string", "int": 1, "float": 1.0}
|
|
|
|
|
if media:
|
|
|
|
|
data["media"] = input_media_photo
|
|
|
|
|
request_data = RequestData(
|
|
|
|
|
parameters=[RequestParameter.from_input(key, value) for key, value in data.items()],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# First make sure that custom timeouts are always respected
|
|
|
|
|
await httpx_request.post(
|
|
|
|
|
"url", request_data, read_timeout=1, connect_timeout=2, write_timeout=3, pool_timeout=4
|
|
|
|
|
)
|
|
|
|
|
assert self.test_flag == httpx.Timeout(read=1, connect=2, write=3, pool=4)
|
|
|
|
|
|
|
|
|
|
# Now also ensure that the default timeout for media requests is 20 seconds
|
|
|
|
|
await httpx_request.post("url", request_data)
|
|
|
|
|
assert self.test_flag == httpx.Timeout(read=5, connect=5, write=20 if media else 5, pool=1)
|
|
|
|
|
|
|
|
|
|
# Just for double-checking, since warnings are issued for implementations of BaseRequest
|
|
|
|
|
# other than HTTPXRequest
|
|
|
|
|
assert len(recwarn) == 0
|
2024-02-26 17:47:04 +01:00
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize("init", [True, False])
|
|
|
|
|
async def test_setting_media_write_timeout(
|
|
|
|
|
self, monkeypatch, init, input_media_photo, recwarn # noqa: F811
|
|
|
|
|
):
|
|
|
|
|
httpx_request = HTTPXRequest(media_write_timeout=42) if init else HTTPXRequest()
|
|
|
|
|
|
|
|
|
|
async def request(_, **kwargs):
|
|
|
|
|
self.test_flag = kwargs["timeout"].write
|
|
|
|
|
return httpx.Response(HTTPStatus.OK, content=b'{"ok": "True", "result": {}}')
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(httpx.AsyncClient, "request", request)
|
|
|
|
|
|
|
|
|
|
data = {"string": "string", "int": 1, "float": 1.0, "media": input_media_photo}
|
|
|
|
|
request_data = RequestData(
|
|
|
|
|
parameters=[RequestParameter.from_input(key, value) for key, value in data.items()],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# First make sure that custom timeouts are always respected
|
|
|
|
|
await httpx_request.post(
|
|
|
|
|
"url",
|
|
|
|
|
request_data,
|
|
|
|
|
write_timeout=43,
|
|
|
|
|
)
|
|
|
|
|
assert self.test_flag == 43
|
|
|
|
|
|
|
|
|
|
# Now also ensure that the init value is respected
|
|
|
|
|
await httpx_request.post("url", request_data)
|
|
|
|
|
assert self.test_flag == 42 if init else 20
|
|
|
|
|
|
|
|
|
|
# Just for double-checking, since warnings are issued for implementations of BaseRequest
|
|
|
|
|
# other than HTTPXRequest
|
|
|
|
|
assert len(recwarn) == 0
|
2023-11-26 16:44:18 +01:00
|
|
|
|
|
2023-10-22 12:42:22 +02:00
|
|
|
|
async def test_socket_opts(self, monkeypatch):
|
|
|
|
|
transport_kwargs = {}
|
|
|
|
|
transport_init = AsyncHTTPTransport.__init__
|
|
|
|
|
|
|
|
|
|
def init_transport(*args, **kwargs):
|
|
|
|
|
nonlocal transport_kwargs
|
|
|
|
|
transport_kwargs = kwargs.copy()
|
|
|
|
|
transport_init(*args, **kwargs)
|
|
|
|
|
|
|
|
|
|
monkeypatch.setattr(AsyncHTTPTransport, "__init__", init_transport)
|
|
|
|
|
|
|
|
|
|
HTTPXRequest()
|
|
|
|
|
assert "socket_options" not in transport_kwargs
|
|
|
|
|
|
|
|
|
|
transport_kwargs = {}
|
|
|
|
|
HTTPXRequest(socket_options=((1, 2, 3),))
|
|
|
|
|
assert transport_kwargs["socket_options"] == ((1, 2, 3),)
|
|
|
|
|
|
2023-11-27 18:24:21 +01:00
|
|
|
|
@pytest.mark.parametrize("read_timeout", [None, 1, 2, 3])
|
|
|
|
|
async def test_read_timeout_property(self, read_timeout):
|
|
|
|
|
assert HTTPXRequest(read_timeout=read_timeout).read_timeout == read_timeout
|
|
|
|
|
|
2023-02-11 10:45:17 +01:00
|
|
|
|
|
2023-03-12 16:30:39 +01:00
|
|
|
|
@pytest.mark.skipif(not TEST_WITH_OPT_DEPS, reason="No need to run this twice")
|
2023-02-11 10:45:17 +01:00
|
|
|
|
class TestHTTPXRequestWithRequest:
|
2024-09-13 19:10:09 +02:00
|
|
|
|
async def test_multiple_init_cycles(self):
|
|
|
|
|
# nothing really to assert - this should just not fail
|
|
|
|
|
httpx_request = HTTPXRequest()
|
|
|
|
|
async with httpx_request:
|
|
|
|
|
await httpx_request.do_request(url="https://python-telegram-bot.org", method="GET")
|
|
|
|
|
async with httpx_request:
|
|
|
|
|
await httpx_request.do_request(url="https://python-telegram-bot.org", method="GET")
|
|
|
|
|
|
|
|
|
|
async def test_http_1_response(self):
|
|
|
|
|
httpx_request = HTTPXRequest(http_version="1.1")
|
|
|
|
|
async with httpx_request:
|
|
|
|
|
resp = await httpx_request._client.request(
|
|
|
|
|
url="https://python-telegram-bot.org",
|
|
|
|
|
method="GET",
|
|
|
|
|
headers={"User-Agent": httpx_request.USER_AGENT},
|
|
|
|
|
)
|
|
|
|
|
assert resp.http_version == "HTTP/1.1"
|
|
|
|
|
|
2023-02-11 10:45:17 +01:00
|
|
|
|
async def test_do_request_wait_for_pool(self, httpx_request):
|
2022-04-24 12:38:09 +02:00
|
|
|
|
"""The pool logic is buried rather deeply in httpxcore, so we make actual requests here
|
|
|
|
|
instead of mocking"""
|
|
|
|
|
task_1 = asyncio.create_task(
|
|
|
|
|
httpx_request.do_request(
|
|
|
|
|
method="GET", url="https://python-telegram-bot.org/static/testfiles/telegram.mp4"
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
task_2 = asyncio.create_task(
|
|
|
|
|
httpx_request.do_request(
|
|
|
|
|
method="GET", url="https://python-telegram-bot.org/static/testfiles/telegram.mp4"
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
done, pending = await asyncio.wait({task_1, task_2}, return_when=asyncio.FIRST_COMPLETED)
|
|
|
|
|
assert len(done) == len(pending) == 1
|
|
|
|
|
done, pending = await asyncio.wait({task_1, task_2}, return_when=asyncio.ALL_COMPLETED)
|
|
|
|
|
assert len(done) == 2
|
|
|
|
|
assert len(pending) == 0
|
2022-05-05 09:27:54 +02:00
|
|
|
|
try: # retrieve exceptions from tasks
|
|
|
|
|
task_1.exception()
|
|
|
|
|
task_2.exception()
|
|
|
|
|
except (asyncio.CancelledError, asyncio.InvalidStateError):
|
|
|
|
|
pass
|
2024-08-02 22:28:38 +02:00
|
|
|
|
|
|
|
|
|
async def test_input_file_postponed_read(self, bot, chat_id):
|
|
|
|
|
"""Here we test that `read_file_handle=False` is correctly handled by HTTPXRequest.
|
|
|
|
|
Since manually building the RequestData object has no real benefit, we simply use the Bot
|
|
|
|
|
for that.
|
|
|
|
|
"""
|
|
|
|
|
message = await bot.send_document(
|
|
|
|
|
document=InputFile(data_file("telegram.jpg").open("rb"), read_file_handle=False),
|
|
|
|
|
chat_id=chat_id,
|
|
|
|
|
)
|
|
|
|
|
assert message.document
|
|
|
|
|
assert message.document.file_name == "telegram.jpg"
|