second commit
This commit is contained in:
15
env/lib/python3.11/site-packages/httpx/_transports/__init__.py
vendored
Normal file
15
env/lib/python3.11/site-packages/httpx/_transports/__init__.py
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
from .asgi import *
|
||||
from .base import *
|
||||
from .default import *
|
||||
from .mock import *
|
||||
from .wsgi import *
|
||||
|
||||
__all__ = [
|
||||
"ASGITransport",
|
||||
"AsyncBaseTransport",
|
||||
"BaseTransport",
|
||||
"AsyncHTTPTransport",
|
||||
"HTTPTransport",
|
||||
"MockTransport",
|
||||
"WSGITransport",
|
||||
]
|
BIN
env/lib/python3.11/site-packages/httpx/_transports/__pycache__/__init__.cpython-311.pyc
vendored
Normal file
BIN
env/lib/python3.11/site-packages/httpx/_transports/__pycache__/__init__.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.11/site-packages/httpx/_transports/__pycache__/asgi.cpython-311.pyc
vendored
Normal file
BIN
env/lib/python3.11/site-packages/httpx/_transports/__pycache__/asgi.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.11/site-packages/httpx/_transports/__pycache__/base.cpython-311.pyc
vendored
Normal file
BIN
env/lib/python3.11/site-packages/httpx/_transports/__pycache__/base.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.11/site-packages/httpx/_transports/__pycache__/default.cpython-311.pyc
vendored
Normal file
BIN
env/lib/python3.11/site-packages/httpx/_transports/__pycache__/default.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.11/site-packages/httpx/_transports/__pycache__/mock.cpython-311.pyc
vendored
Normal file
BIN
env/lib/python3.11/site-packages/httpx/_transports/__pycache__/mock.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.11/site-packages/httpx/_transports/__pycache__/wsgi.cpython-311.pyc
vendored
Normal file
BIN
env/lib/python3.11/site-packages/httpx/_transports/__pycache__/wsgi.cpython-311.pyc
vendored
Normal file
Binary file not shown.
187
env/lib/python3.11/site-packages/httpx/_transports/asgi.py
vendored
Normal file
187
env/lib/python3.11/site-packages/httpx/_transports/asgi.py
vendored
Normal file
@ -0,0 +1,187 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing
|
||||
|
||||
from .._models import Request, Response
|
||||
from .._types import AsyncByteStream
|
||||
from .base import AsyncBaseTransport
|
||||
|
||||
if typing.TYPE_CHECKING: # pragma: no cover
|
||||
import asyncio
|
||||
|
||||
import trio
|
||||
|
||||
Event = typing.Union[asyncio.Event, trio.Event]
|
||||
|
||||
|
||||
_Message = typing.MutableMapping[str, typing.Any]
|
||||
_Receive = typing.Callable[[], typing.Awaitable[_Message]]
|
||||
_Send = typing.Callable[
|
||||
[typing.MutableMapping[str, typing.Any]], typing.Awaitable[None]
|
||||
]
|
||||
_ASGIApp = typing.Callable[
|
||||
[typing.MutableMapping[str, typing.Any], _Receive, _Send], typing.Awaitable[None]
|
||||
]
|
||||
|
||||
__all__ = ["ASGITransport"]
|
||||
|
||||
|
||||
def is_running_trio() -> bool:
|
||||
try:
|
||||
# sniffio is a dependency of trio.
|
||||
|
||||
# See https://github.com/python-trio/trio/issues/2802
|
||||
import sniffio
|
||||
|
||||
if sniffio.current_async_library() == "trio":
|
||||
return True
|
||||
except ImportError: # pragma: nocover
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def create_event() -> Event:
|
||||
if is_running_trio():
|
||||
import trio
|
||||
|
||||
return trio.Event()
|
||||
|
||||
import asyncio
|
||||
|
||||
return asyncio.Event()
|
||||
|
||||
|
||||
class ASGIResponseStream(AsyncByteStream):
|
||||
def __init__(self, body: list[bytes]) -> None:
|
||||
self._body = body
|
||||
|
||||
async def __aiter__(self) -> typing.AsyncIterator[bytes]:
|
||||
yield b"".join(self._body)
|
||||
|
||||
|
||||
class ASGITransport(AsyncBaseTransport):
|
||||
"""
|
||||
A custom AsyncTransport that handles sending requests directly to an ASGI app.
|
||||
|
||||
```python
|
||||
transport = httpx.ASGITransport(
|
||||
app=app,
|
||||
root_path="/submount",
|
||||
client=("1.2.3.4", 123)
|
||||
)
|
||||
client = httpx.AsyncClient(transport=transport)
|
||||
```
|
||||
|
||||
Arguments:
|
||||
|
||||
* `app` - The ASGI application.
|
||||
* `raise_app_exceptions` - Boolean indicating if exceptions in the application
|
||||
should be raised. Default to `True`. Can be set to `False` for use cases
|
||||
such as testing the content of a client 500 response.
|
||||
* `root_path` - The root path on which the ASGI application should be mounted.
|
||||
* `client` - A two-tuple indicating the client IP and port of incoming requests.
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
app: _ASGIApp,
|
||||
raise_app_exceptions: bool = True,
|
||||
root_path: str = "",
|
||||
client: tuple[str, int] = ("127.0.0.1", 123),
|
||||
) -> None:
|
||||
self.app = app
|
||||
self.raise_app_exceptions = raise_app_exceptions
|
||||
self.root_path = root_path
|
||||
self.client = client
|
||||
|
||||
async def handle_async_request(
|
||||
self,
|
||||
request: Request,
|
||||
) -> Response:
|
||||
assert isinstance(request.stream, AsyncByteStream)
|
||||
|
||||
# ASGI scope.
|
||||
scope = {
|
||||
"type": "http",
|
||||
"asgi": {"version": "3.0"},
|
||||
"http_version": "1.1",
|
||||
"method": request.method,
|
||||
"headers": [(k.lower(), v) for (k, v) in request.headers.raw],
|
||||
"scheme": request.url.scheme,
|
||||
"path": request.url.path,
|
||||
"raw_path": request.url.raw_path.split(b"?")[0],
|
||||
"query_string": request.url.query,
|
||||
"server": (request.url.host, request.url.port),
|
||||
"client": self.client,
|
||||
"root_path": self.root_path,
|
||||
}
|
||||
|
||||
# Request.
|
||||
request_body_chunks = request.stream.__aiter__()
|
||||
request_complete = False
|
||||
|
||||
# Response.
|
||||
status_code = None
|
||||
response_headers = None
|
||||
body_parts = []
|
||||
response_started = False
|
||||
response_complete = create_event()
|
||||
|
||||
# ASGI callables.
|
||||
|
||||
async def receive() -> dict[str, typing.Any]:
|
||||
nonlocal request_complete
|
||||
|
||||
if request_complete:
|
||||
await response_complete.wait()
|
||||
return {"type": "http.disconnect"}
|
||||
|
||||
try:
|
||||
body = await request_body_chunks.__anext__()
|
||||
except StopAsyncIteration:
|
||||
request_complete = True
|
||||
return {"type": "http.request", "body": b"", "more_body": False}
|
||||
return {"type": "http.request", "body": body, "more_body": True}
|
||||
|
||||
async def send(message: typing.MutableMapping[str, typing.Any]) -> None:
|
||||
nonlocal status_code, response_headers, response_started
|
||||
|
||||
if message["type"] == "http.response.start":
|
||||
assert not response_started
|
||||
|
||||
status_code = message["status"]
|
||||
response_headers = message.get("headers", [])
|
||||
response_started = True
|
||||
|
||||
elif message["type"] == "http.response.body":
|
||||
assert not response_complete.is_set()
|
||||
body = message.get("body", b"")
|
||||
more_body = message.get("more_body", False)
|
||||
|
||||
if body and request.method != "HEAD":
|
||||
body_parts.append(body)
|
||||
|
||||
if not more_body:
|
||||
response_complete.set()
|
||||
|
||||
try:
|
||||
await self.app(scope, receive, send)
|
||||
except Exception: # noqa: PIE-786
|
||||
if self.raise_app_exceptions:
|
||||
raise
|
||||
|
||||
response_complete.set()
|
||||
if status_code is None:
|
||||
status_code = 500
|
||||
if response_headers is None:
|
||||
response_headers = {}
|
||||
|
||||
assert response_complete.is_set()
|
||||
assert status_code is not None
|
||||
assert response_headers is not None
|
||||
|
||||
stream = ASGIResponseStream(body_parts)
|
||||
|
||||
return Response(status_code, headers=response_headers, stream=stream)
|
86
env/lib/python3.11/site-packages/httpx/_transports/base.py
vendored
Normal file
86
env/lib/python3.11/site-packages/httpx/_transports/base.py
vendored
Normal file
@ -0,0 +1,86 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing
|
||||
from types import TracebackType
|
||||
|
||||
from .._models import Request, Response
|
||||
|
||||
T = typing.TypeVar("T", bound="BaseTransport")
|
||||
A = typing.TypeVar("A", bound="AsyncBaseTransport")
|
||||
|
||||
__all__ = ["AsyncBaseTransport", "BaseTransport"]
|
||||
|
||||
|
||||
class BaseTransport:
|
||||
def __enter__(self: T) -> T:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None = None,
|
||||
exc_value: BaseException | None = None,
|
||||
traceback: TracebackType | None = None,
|
||||
) -> None:
|
||||
self.close()
|
||||
|
||||
def handle_request(self, request: Request) -> Response:
|
||||
"""
|
||||
Send a single HTTP request and return a response.
|
||||
|
||||
Developers shouldn't typically ever need to call into this API directly,
|
||||
since the Client class provides all the higher level user-facing API
|
||||
niceties.
|
||||
|
||||
In order to properly release any network resources, the response
|
||||
stream should *either* be consumed immediately, with a call to
|
||||
`response.stream.read()`, or else the `handle_request` call should
|
||||
be followed with a try/finally block to ensuring the stream is
|
||||
always closed.
|
||||
|
||||
Example usage:
|
||||
|
||||
with httpx.HTTPTransport() as transport:
|
||||
req = httpx.Request(
|
||||
method=b"GET",
|
||||
url=(b"https", b"www.example.com", 443, b"/"),
|
||||
headers=[(b"Host", b"www.example.com")],
|
||||
)
|
||||
resp = transport.handle_request(req)
|
||||
body = resp.stream.read()
|
||||
print(resp.status_code, resp.headers, body)
|
||||
|
||||
|
||||
Takes a `Request` instance as the only argument.
|
||||
|
||||
Returns a `Response` instance.
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"The 'handle_request' method must be implemented."
|
||||
) # pragma: no cover
|
||||
|
||||
def close(self) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class AsyncBaseTransport:
|
||||
async def __aenter__(self: A) -> A:
|
||||
return self
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None = None,
|
||||
exc_value: BaseException | None = None,
|
||||
traceback: TracebackType | None = None,
|
||||
) -> None:
|
||||
await self.aclose()
|
||||
|
||||
async def handle_async_request(
|
||||
self,
|
||||
request: Request,
|
||||
) -> Response:
|
||||
raise NotImplementedError(
|
||||
"The 'handle_async_request' method must be implemented."
|
||||
) # pragma: no cover
|
||||
|
||||
async def aclose(self) -> None:
|
||||
pass
|
406
env/lib/python3.11/site-packages/httpx/_transports/default.py
vendored
Normal file
406
env/lib/python3.11/site-packages/httpx/_transports/default.py
vendored
Normal file
@ -0,0 +1,406 @@
|
||||
"""
|
||||
Custom transports, with nicely configured defaults.
|
||||
|
||||
The following additional keyword arguments are currently supported by httpcore...
|
||||
|
||||
* uds: str
|
||||
* local_address: str
|
||||
* retries: int
|
||||
|
||||
Example usages...
|
||||
|
||||
# Disable HTTP/2 on a single specific domain.
|
||||
mounts = {
|
||||
"all://": httpx.HTTPTransport(http2=True),
|
||||
"all://*example.org": httpx.HTTPTransport()
|
||||
}
|
||||
|
||||
# Using advanced httpcore configuration, with connection retries.
|
||||
transport = httpx.HTTPTransport(retries=1)
|
||||
client = httpx.Client(transport=transport)
|
||||
|
||||
# Using advanced httpcore configuration, with unix domain sockets.
|
||||
transport = httpx.HTTPTransport(uds="socket.uds")
|
||||
client = httpx.Client(transport=transport)
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import typing
|
||||
from types import TracebackType
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
import ssl # pragma: no cover
|
||||
|
||||
import httpx # pragma: no cover
|
||||
|
||||
from .._config import DEFAULT_LIMITS, Limits, Proxy, create_ssl_context
|
||||
from .._exceptions import (
|
||||
ConnectError,
|
||||
ConnectTimeout,
|
||||
LocalProtocolError,
|
||||
NetworkError,
|
||||
PoolTimeout,
|
||||
ProtocolError,
|
||||
ProxyError,
|
||||
ReadError,
|
||||
ReadTimeout,
|
||||
RemoteProtocolError,
|
||||
TimeoutException,
|
||||
UnsupportedProtocol,
|
||||
WriteError,
|
||||
WriteTimeout,
|
||||
)
|
||||
from .._models import Request, Response
|
||||
from .._types import AsyncByteStream, CertTypes, ProxyTypes, SyncByteStream
|
||||
from .._urls import URL
|
||||
from .base import AsyncBaseTransport, BaseTransport
|
||||
|
||||
T = typing.TypeVar("T", bound="HTTPTransport")
|
||||
A = typing.TypeVar("A", bound="AsyncHTTPTransport")
|
||||
|
||||
SOCKET_OPTION = typing.Union[
|
||||
typing.Tuple[int, int, int],
|
||||
typing.Tuple[int, int, typing.Union[bytes, bytearray]],
|
||||
typing.Tuple[int, int, None, int],
|
||||
]
|
||||
|
||||
__all__ = ["AsyncHTTPTransport", "HTTPTransport"]
|
||||
|
||||
HTTPCORE_EXC_MAP: dict[type[Exception], type[httpx.HTTPError]] = {}
|
||||
|
||||
|
||||
def _load_httpcore_exceptions() -> dict[type[Exception], type[httpx.HTTPError]]:
|
||||
import httpcore
|
||||
|
||||
return {
|
||||
httpcore.TimeoutException: TimeoutException,
|
||||
httpcore.ConnectTimeout: ConnectTimeout,
|
||||
httpcore.ReadTimeout: ReadTimeout,
|
||||
httpcore.WriteTimeout: WriteTimeout,
|
||||
httpcore.PoolTimeout: PoolTimeout,
|
||||
httpcore.NetworkError: NetworkError,
|
||||
httpcore.ConnectError: ConnectError,
|
||||
httpcore.ReadError: ReadError,
|
||||
httpcore.WriteError: WriteError,
|
||||
httpcore.ProxyError: ProxyError,
|
||||
httpcore.UnsupportedProtocol: UnsupportedProtocol,
|
||||
httpcore.ProtocolError: ProtocolError,
|
||||
httpcore.LocalProtocolError: LocalProtocolError,
|
||||
httpcore.RemoteProtocolError: RemoteProtocolError,
|
||||
}
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def map_httpcore_exceptions() -> typing.Iterator[None]:
|
||||
global HTTPCORE_EXC_MAP
|
||||
if len(HTTPCORE_EXC_MAP) == 0:
|
||||
HTTPCORE_EXC_MAP = _load_httpcore_exceptions()
|
||||
try:
|
||||
yield
|
||||
except Exception as exc:
|
||||
mapped_exc = None
|
||||
|
||||
for from_exc, to_exc in HTTPCORE_EXC_MAP.items():
|
||||
if not isinstance(exc, from_exc):
|
||||
continue
|
||||
# We want to map to the most specific exception we can find.
|
||||
# Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to
|
||||
# `httpx.ReadTimeout`, not just `httpx.TimeoutException`.
|
||||
if mapped_exc is None or issubclass(to_exc, mapped_exc):
|
||||
mapped_exc = to_exc
|
||||
|
||||
if mapped_exc is None: # pragma: no cover
|
||||
raise
|
||||
|
||||
message = str(exc)
|
||||
raise mapped_exc(message) from exc
|
||||
|
||||
|
||||
class ResponseStream(SyncByteStream):
|
||||
def __init__(self, httpcore_stream: typing.Iterable[bytes]) -> None:
|
||||
self._httpcore_stream = httpcore_stream
|
||||
|
||||
def __iter__(self) -> typing.Iterator[bytes]:
|
||||
with map_httpcore_exceptions():
|
||||
for part in self._httpcore_stream:
|
||||
yield part
|
||||
|
||||
def close(self) -> None:
|
||||
if hasattr(self._httpcore_stream, "close"):
|
||||
self._httpcore_stream.close()
|
||||
|
||||
|
||||
class HTTPTransport(BaseTransport):
|
||||
def __init__(
|
||||
self,
|
||||
verify: ssl.SSLContext | str | bool = True,
|
||||
cert: CertTypes | None = None,
|
||||
trust_env: bool = True,
|
||||
http1: bool = True,
|
||||
http2: bool = False,
|
||||
limits: Limits = DEFAULT_LIMITS,
|
||||
proxy: ProxyTypes | None = None,
|
||||
uds: str | None = None,
|
||||
local_address: str | None = None,
|
||||
retries: int = 0,
|
||||
socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
|
||||
) -> None:
|
||||
import httpcore
|
||||
|
||||
proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy
|
||||
ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env)
|
||||
|
||||
if proxy is None:
|
||||
self._pool = httpcore.ConnectionPool(
|
||||
ssl_context=ssl_context,
|
||||
max_connections=limits.max_connections,
|
||||
max_keepalive_connections=limits.max_keepalive_connections,
|
||||
keepalive_expiry=limits.keepalive_expiry,
|
||||
http1=http1,
|
||||
http2=http2,
|
||||
uds=uds,
|
||||
local_address=local_address,
|
||||
retries=retries,
|
||||
socket_options=socket_options,
|
||||
)
|
||||
elif proxy.url.scheme in ("http", "https"):
|
||||
self._pool = httpcore.HTTPProxy(
|
||||
proxy_url=httpcore.URL(
|
||||
scheme=proxy.url.raw_scheme,
|
||||
host=proxy.url.raw_host,
|
||||
port=proxy.url.port,
|
||||
target=proxy.url.raw_path,
|
||||
),
|
||||
proxy_auth=proxy.raw_auth,
|
||||
proxy_headers=proxy.headers.raw,
|
||||
ssl_context=ssl_context,
|
||||
proxy_ssl_context=proxy.ssl_context,
|
||||
max_connections=limits.max_connections,
|
||||
max_keepalive_connections=limits.max_keepalive_connections,
|
||||
keepalive_expiry=limits.keepalive_expiry,
|
||||
http1=http1,
|
||||
http2=http2,
|
||||
socket_options=socket_options,
|
||||
)
|
||||
elif proxy.url.scheme in ("socks5", "socks5h"):
|
||||
try:
|
||||
import socksio # noqa
|
||||
except ImportError: # pragma: no cover
|
||||
raise ImportError(
|
||||
"Using SOCKS proxy, but the 'socksio' package is not installed. "
|
||||
"Make sure to install httpx using `pip install httpx[socks]`."
|
||||
) from None
|
||||
|
||||
self._pool = httpcore.SOCKSProxy(
|
||||
proxy_url=httpcore.URL(
|
||||
scheme=proxy.url.raw_scheme,
|
||||
host=proxy.url.raw_host,
|
||||
port=proxy.url.port,
|
||||
target=proxy.url.raw_path,
|
||||
),
|
||||
proxy_auth=proxy.raw_auth,
|
||||
ssl_context=ssl_context,
|
||||
max_connections=limits.max_connections,
|
||||
max_keepalive_connections=limits.max_keepalive_connections,
|
||||
keepalive_expiry=limits.keepalive_expiry,
|
||||
http1=http1,
|
||||
http2=http2,
|
||||
)
|
||||
else: # pragma: no cover
|
||||
raise ValueError(
|
||||
"Proxy protocol must be either 'http', 'https', 'socks5', or 'socks5h',"
|
||||
f" but got {proxy.url.scheme!r}."
|
||||
)
|
||||
|
||||
def __enter__(self: T) -> T: # Use generics for subclass support.
|
||||
self._pool.__enter__()
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None = None,
|
||||
exc_value: BaseException | None = None,
|
||||
traceback: TracebackType | None = None,
|
||||
) -> None:
|
||||
with map_httpcore_exceptions():
|
||||
self._pool.__exit__(exc_type, exc_value, traceback)
|
||||
|
||||
def handle_request(
|
||||
self,
|
||||
request: Request,
|
||||
) -> Response:
|
||||
assert isinstance(request.stream, SyncByteStream)
|
||||
import httpcore
|
||||
|
||||
req = httpcore.Request(
|
||||
method=request.method,
|
||||
url=httpcore.URL(
|
||||
scheme=request.url.raw_scheme,
|
||||
host=request.url.raw_host,
|
||||
port=request.url.port,
|
||||
target=request.url.raw_path,
|
||||
),
|
||||
headers=request.headers.raw,
|
||||
content=request.stream,
|
||||
extensions=request.extensions,
|
||||
)
|
||||
with map_httpcore_exceptions():
|
||||
resp = self._pool.handle_request(req)
|
||||
|
||||
assert isinstance(resp.stream, typing.Iterable)
|
||||
|
||||
return Response(
|
||||
status_code=resp.status,
|
||||
headers=resp.headers,
|
||||
stream=ResponseStream(resp.stream),
|
||||
extensions=resp.extensions,
|
||||
)
|
||||
|
||||
def close(self) -> None:
|
||||
self._pool.close()
|
||||
|
||||
|
||||
class AsyncResponseStream(AsyncByteStream):
|
||||
def __init__(self, httpcore_stream: typing.AsyncIterable[bytes]) -> None:
|
||||
self._httpcore_stream = httpcore_stream
|
||||
|
||||
async def __aiter__(self) -> typing.AsyncIterator[bytes]:
|
||||
with map_httpcore_exceptions():
|
||||
async for part in self._httpcore_stream:
|
||||
yield part
|
||||
|
||||
async def aclose(self) -> None:
|
||||
if hasattr(self._httpcore_stream, "aclose"):
|
||||
await self._httpcore_stream.aclose()
|
||||
|
||||
|
||||
class AsyncHTTPTransport(AsyncBaseTransport):
|
||||
def __init__(
|
||||
self,
|
||||
verify: ssl.SSLContext | str | bool = True,
|
||||
cert: CertTypes | None = None,
|
||||
trust_env: bool = True,
|
||||
http1: bool = True,
|
||||
http2: bool = False,
|
||||
limits: Limits = DEFAULT_LIMITS,
|
||||
proxy: ProxyTypes | None = None,
|
||||
uds: str | None = None,
|
||||
local_address: str | None = None,
|
||||
retries: int = 0,
|
||||
socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
|
||||
) -> None:
|
||||
import httpcore
|
||||
|
||||
proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy
|
||||
ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env)
|
||||
|
||||
if proxy is None:
|
||||
self._pool = httpcore.AsyncConnectionPool(
|
||||
ssl_context=ssl_context,
|
||||
max_connections=limits.max_connections,
|
||||
max_keepalive_connections=limits.max_keepalive_connections,
|
||||
keepalive_expiry=limits.keepalive_expiry,
|
||||
http1=http1,
|
||||
http2=http2,
|
||||
uds=uds,
|
||||
local_address=local_address,
|
||||
retries=retries,
|
||||
socket_options=socket_options,
|
||||
)
|
||||
elif proxy.url.scheme in ("http", "https"):
|
||||
self._pool = httpcore.AsyncHTTPProxy(
|
||||
proxy_url=httpcore.URL(
|
||||
scheme=proxy.url.raw_scheme,
|
||||
host=proxy.url.raw_host,
|
||||
port=proxy.url.port,
|
||||
target=proxy.url.raw_path,
|
||||
),
|
||||
proxy_auth=proxy.raw_auth,
|
||||
proxy_headers=proxy.headers.raw,
|
||||
proxy_ssl_context=proxy.ssl_context,
|
||||
ssl_context=ssl_context,
|
||||
max_connections=limits.max_connections,
|
||||
max_keepalive_connections=limits.max_keepalive_connections,
|
||||
keepalive_expiry=limits.keepalive_expiry,
|
||||
http1=http1,
|
||||
http2=http2,
|
||||
socket_options=socket_options,
|
||||
)
|
||||
elif proxy.url.scheme in ("socks5", "socks5h"):
|
||||
try:
|
||||
import socksio # noqa
|
||||
except ImportError: # pragma: no cover
|
||||
raise ImportError(
|
||||
"Using SOCKS proxy, but the 'socksio' package is not installed. "
|
||||
"Make sure to install httpx using `pip install httpx[socks]`."
|
||||
) from None
|
||||
|
||||
self._pool = httpcore.AsyncSOCKSProxy(
|
||||
proxy_url=httpcore.URL(
|
||||
scheme=proxy.url.raw_scheme,
|
||||
host=proxy.url.raw_host,
|
||||
port=proxy.url.port,
|
||||
target=proxy.url.raw_path,
|
||||
),
|
||||
proxy_auth=proxy.raw_auth,
|
||||
ssl_context=ssl_context,
|
||||
max_connections=limits.max_connections,
|
||||
max_keepalive_connections=limits.max_keepalive_connections,
|
||||
keepalive_expiry=limits.keepalive_expiry,
|
||||
http1=http1,
|
||||
http2=http2,
|
||||
)
|
||||
else: # pragma: no cover
|
||||
raise ValueError(
|
||||
"Proxy protocol must be either 'http', 'https', 'socks5', or 'socks5h',"
|
||||
" but got {proxy.url.scheme!r}."
|
||||
)
|
||||
|
||||
async def __aenter__(self: A) -> A: # Use generics for subclass support.
|
||||
await self._pool.__aenter__()
|
||||
return self
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None = None,
|
||||
exc_value: BaseException | None = None,
|
||||
traceback: TracebackType | None = None,
|
||||
) -> None:
|
||||
with map_httpcore_exceptions():
|
||||
await self._pool.__aexit__(exc_type, exc_value, traceback)
|
||||
|
||||
async def handle_async_request(
|
||||
self,
|
||||
request: Request,
|
||||
) -> Response:
|
||||
assert isinstance(request.stream, AsyncByteStream)
|
||||
import httpcore
|
||||
|
||||
req = httpcore.Request(
|
||||
method=request.method,
|
||||
url=httpcore.URL(
|
||||
scheme=request.url.raw_scheme,
|
||||
host=request.url.raw_host,
|
||||
port=request.url.port,
|
||||
target=request.url.raw_path,
|
||||
),
|
||||
headers=request.headers.raw,
|
||||
content=request.stream,
|
||||
extensions=request.extensions,
|
||||
)
|
||||
with map_httpcore_exceptions():
|
||||
resp = await self._pool.handle_async_request(req)
|
||||
|
||||
assert isinstance(resp.stream, typing.AsyncIterable)
|
||||
|
||||
return Response(
|
||||
status_code=resp.status,
|
||||
headers=resp.headers,
|
||||
stream=AsyncResponseStream(resp.stream),
|
||||
extensions=resp.extensions,
|
||||
)
|
||||
|
||||
async def aclose(self) -> None:
|
||||
await self._pool.aclose()
|
43
env/lib/python3.11/site-packages/httpx/_transports/mock.py
vendored
Normal file
43
env/lib/python3.11/site-packages/httpx/_transports/mock.py
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing
|
||||
|
||||
from .._models import Request, Response
|
||||
from .base import AsyncBaseTransport, BaseTransport
|
||||
|
||||
SyncHandler = typing.Callable[[Request], Response]
|
||||
AsyncHandler = typing.Callable[[Request], typing.Coroutine[None, None, Response]]
|
||||
|
||||
|
||||
__all__ = ["MockTransport"]
|
||||
|
||||
|
||||
class MockTransport(AsyncBaseTransport, BaseTransport):
|
||||
def __init__(self, handler: SyncHandler | AsyncHandler) -> None:
|
||||
self.handler = handler
|
||||
|
||||
def handle_request(
|
||||
self,
|
||||
request: Request,
|
||||
) -> Response:
|
||||
request.read()
|
||||
response = self.handler(request)
|
||||
if not isinstance(response, Response): # pragma: no cover
|
||||
raise TypeError("Cannot use an async handler in a sync Client")
|
||||
return response
|
||||
|
||||
async def handle_async_request(
|
||||
self,
|
||||
request: Request,
|
||||
) -> Response:
|
||||
await request.aread()
|
||||
response = self.handler(request)
|
||||
|
||||
# Allow handler to *optionally* be an `async` function.
|
||||
# If it is, then the `response` variable need to be awaited to actually
|
||||
# return the result.
|
||||
|
||||
if not isinstance(response, Response):
|
||||
response = await response
|
||||
|
||||
return response
|
149
env/lib/python3.11/site-packages/httpx/_transports/wsgi.py
vendored
Normal file
149
env/lib/python3.11/site-packages/httpx/_transports/wsgi.py
vendored
Normal file
@ -0,0 +1,149 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import itertools
|
||||
import sys
|
||||
import typing
|
||||
|
||||
from .._models import Request, Response
|
||||
from .._types import SyncByteStream
|
||||
from .base import BaseTransport
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from _typeshed import OptExcInfo # pragma: no cover
|
||||
from _typeshed.wsgi import WSGIApplication # pragma: no cover
|
||||
|
||||
_T = typing.TypeVar("_T")
|
||||
|
||||
|
||||
__all__ = ["WSGITransport"]
|
||||
|
||||
|
||||
def _skip_leading_empty_chunks(body: typing.Iterable[_T]) -> typing.Iterable[_T]:
|
||||
body = iter(body)
|
||||
for chunk in body:
|
||||
if chunk:
|
||||
return itertools.chain([chunk], body)
|
||||
return []
|
||||
|
||||
|
||||
class WSGIByteStream(SyncByteStream):
|
||||
def __init__(self, result: typing.Iterable[bytes]) -> None:
|
||||
self._close = getattr(result, "close", None)
|
||||
self._result = _skip_leading_empty_chunks(result)
|
||||
|
||||
def __iter__(self) -> typing.Iterator[bytes]:
|
||||
for part in self._result:
|
||||
yield part
|
||||
|
||||
def close(self) -> None:
|
||||
if self._close is not None:
|
||||
self._close()
|
||||
|
||||
|
||||
class WSGITransport(BaseTransport):
|
||||
"""
|
||||
A custom transport that handles sending requests directly to an WSGI app.
|
||||
The simplest way to use this functionality is to use the `app` argument.
|
||||
|
||||
```
|
||||
client = httpx.Client(app=app)
|
||||
```
|
||||
|
||||
Alternatively, you can setup the transport instance explicitly.
|
||||
This allows you to include any additional configuration arguments specific
|
||||
to the WSGITransport class:
|
||||
|
||||
```
|
||||
transport = httpx.WSGITransport(
|
||||
app=app,
|
||||
script_name="/submount",
|
||||
remote_addr="1.2.3.4"
|
||||
)
|
||||
client = httpx.Client(transport=transport)
|
||||
```
|
||||
|
||||
Arguments:
|
||||
|
||||
* `app` - The WSGI application.
|
||||
* `raise_app_exceptions` - Boolean indicating if exceptions in the application
|
||||
should be raised. Default to `True`. Can be set to `False` for use cases
|
||||
such as testing the content of a client 500 response.
|
||||
* `script_name` - The root path on which the WSGI application should be mounted.
|
||||
* `remote_addr` - A string indicating the client IP of incoming requests.
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
app: WSGIApplication,
|
||||
raise_app_exceptions: bool = True,
|
||||
script_name: str = "",
|
||||
remote_addr: str = "127.0.0.1",
|
||||
wsgi_errors: typing.TextIO | None = None,
|
||||
) -> None:
|
||||
self.app = app
|
||||
self.raise_app_exceptions = raise_app_exceptions
|
||||
self.script_name = script_name
|
||||
self.remote_addr = remote_addr
|
||||
self.wsgi_errors = wsgi_errors
|
||||
|
||||
def handle_request(self, request: Request) -> Response:
|
||||
request.read()
|
||||
wsgi_input = io.BytesIO(request.content)
|
||||
|
||||
port = request.url.port or {"http": 80, "https": 443}[request.url.scheme]
|
||||
environ = {
|
||||
"wsgi.version": (1, 0),
|
||||
"wsgi.url_scheme": request.url.scheme,
|
||||
"wsgi.input": wsgi_input,
|
||||
"wsgi.errors": self.wsgi_errors or sys.stderr,
|
||||
"wsgi.multithread": True,
|
||||
"wsgi.multiprocess": False,
|
||||
"wsgi.run_once": False,
|
||||
"REQUEST_METHOD": request.method,
|
||||
"SCRIPT_NAME": self.script_name,
|
||||
"PATH_INFO": request.url.path,
|
||||
"QUERY_STRING": request.url.query.decode("ascii"),
|
||||
"SERVER_NAME": request.url.host,
|
||||
"SERVER_PORT": str(port),
|
||||
"SERVER_PROTOCOL": "HTTP/1.1",
|
||||
"REMOTE_ADDR": self.remote_addr,
|
||||
}
|
||||
for header_key, header_value in request.headers.raw:
|
||||
key = header_key.decode("ascii").upper().replace("-", "_")
|
||||
if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"):
|
||||
key = "HTTP_" + key
|
||||
environ[key] = header_value.decode("ascii")
|
||||
|
||||
seen_status = None
|
||||
seen_response_headers = None
|
||||
seen_exc_info = None
|
||||
|
||||
def start_response(
|
||||
status: str,
|
||||
response_headers: list[tuple[str, str]],
|
||||
exc_info: OptExcInfo | None = None,
|
||||
) -> typing.Callable[[bytes], typing.Any]:
|
||||
nonlocal seen_status, seen_response_headers, seen_exc_info
|
||||
seen_status = status
|
||||
seen_response_headers = response_headers
|
||||
seen_exc_info = exc_info
|
||||
return lambda _: None
|
||||
|
||||
result = self.app(environ, start_response)
|
||||
|
||||
stream = WSGIByteStream(result)
|
||||
|
||||
assert seen_status is not None
|
||||
assert seen_response_headers is not None
|
||||
if seen_exc_info and seen_exc_info[0] and self.raise_app_exceptions:
|
||||
raise seen_exc_info[1]
|
||||
|
||||
status_code = int(seen_status.split()[0])
|
||||
headers = [
|
||||
(key.encode("ascii"), value.encode("ascii"))
|
||||
for key, value in seen_response_headers
|
||||
]
|
||||
|
||||
return Response(status_code, headers=headers, stream=stream)
|
Reference in New Issue
Block a user