!!! note
Only use these functions if you’re testing HTTPX in a console
or making a small number of requests. Using a Client will
enable HTTP/2 and connection pooling for more efficient and
long-lived connections.
::: httpx.request :docstring:
::: httpx.get :docstring:
::: httpx.options :docstring:
::: httpx.head :docstring:
::: httpx.post :docstring:
::: httpx.put :docstring:
::: httpx.patch :docstring:
::: httpx.delete :docstring:
::: httpx.stream :docstring:
Client::: httpx.Client :docstring: :members: headers cookies params auth request get head options post put patch delete stream build_request send close
AsyncClient::: httpx.AsyncClient :docstring: :members: headers cookies params auth request get head options post put patch delete stream build_request send aclose
ResponseAn HTTP response.
def __init__(...).status_code - int.reason_phrase - str.http_version - "HTTP/2" or "HTTP/1.1".url - URL.headers - Headers.content - bytes.text - str.encoding - str.is_redirect - bool.request - Request.next_request - Optional[Request].cookies - Cookies.history - List[Response].elapsed - timedelta
close() on the corresponding response received for that request.
total_seconds() to correctly get
the total elapsed seconds.def .raise_for_status() - Responsedef .json() - Anydef .read() - bytesdef .iter_raw([chunk_size]) - bytes iteratordef .iter_bytes([chunk_size]) - bytes iteratordef .iter_text([chunk_size]) - text iteratordef .iter_lines() - text iteratordef .close() - Nonedef .next() - Responsedef .aread() - bytesdef .aiter_raw([chunk_size]) - async bytes iteratordef .aiter_bytes([chunk_size]) - async bytes iteratordef .aiter_text([chunk_size]) - async text iteratordef .aiter_lines() - async text iteratordef .aclose() - Nonedef .anext() - ResponseRequestAn HTTP request. Can be constructed explicitly for more control over exactly what gets sent over the wire.
>>> request = httpx.Request("GET", "https://example.org", headers={'host': 'example.org'})
>>> response = client.send(request)
def __init__(method, url, [params], [headers], [cookies], [content], [data], [files], [json], [stream]).method - str.url - URL.content - byte, byte iterator, or byte async iterator.headers - Headers.cookies - CookiesURLA normalized, IDNA supporting URL.
>>> url = URL("https://example.org/")
>>> url.host
'example.org'
def __init__(url, **kwargs).scheme - str.authority - str.host - str.port - int.path - str.query - str.raw_path - str.fragment - str.is_ssl - bool.is_absolute_url - bool.is_relative_url - booldef .copy_with([scheme], [authority], [path], [query], [fragment]) - URLHeadersA case-insensitive multi-dict.
>>> headers = Headers({'Content-Type': 'application/json'})
>>> headers['content-type']
'application/json'
def __init__(self, headers, encoding=None)def copy() - HeadersCookiesA dict-like cookie store.
>>> cookies = Cookies()
>>> cookies.set("name", "value", domain="example.org")
def __init__(cookies: [dict, Cookies, CookieJar]).jar - CookieJardef extract_cookies(response)def set_cookie_header(request)def set(name, value, [domain], [path])def get(name, [domain], [path])def delete(name, [domain], [path])def clear([domain], [path])ProxyA configuration of the proxy server.
>>> proxy = Proxy("http://proxy.example.com:8030")
>>> client = Client(proxy=proxy)
def __init__(url, [ssl_context], [auth], [headers]).url - URL.auth - tuple[str, str].headers - Headers.ssl_context - SSLContext