id int64 0 190k | prompt stringlengths 21 13.4M | docstring stringlengths 1 12k ⌀ |
|---|---|---|
157,712 | import base64
import binascii
from typing import Any, Union
def base64url_encode(input: bytes) -> bytes:
return base64.urlsafe_b64encode(input).replace(b"=", b"")
def bytes_from_int(val: int) -> bytes:
remaining = val
byte_length = 0
while remaining != 0:
remaining >>= 8
byte_length += 1
return val.to_bytes(byte_length, "big", signed=False)
def to_base64url_uint(val: int) -> bytes:
if val < 0:
raise ValueError("Must be a positive integer")
int_bytes = bytes_from_int(val)
if len(int_bytes) == 0:
int_bytes = b"\x00"
return base64url_encode(int_bytes) | null |
157,713 | import base64
import binascii
from typing import Any, Union
def base64url_decode(input: Union[str, bytes]) -> bytes:
if isinstance(input, str):
input = input.encode("ascii")
rem = len(input) % 4
if rem > 0:
input += b"=" * (4 - rem)
return base64.urlsafe_b64decode(input)
def from_base64url_uint(val: Union[str, bytes]) -> int:
if isinstance(val, str):
val = val.encode("ascii")
data = base64url_decode(val)
return int.from_bytes(data, byteorder="big") | null |
157,714 | import base64
import binascii
from typing import Any, Union
def number_to_bytes(num: int, num_bytes: int) -> bytes:
padded_hex = "%0*x" % (2 * num_bytes, num)
return binascii.a2b_hex(padded_hex.encode("ascii"))
def der_to_raw_signature(der_sig: bytes, curve: EllipticCurve) -> bytes:
num_bits = curve.key_size
num_bytes = (num_bits + 7) // 8
r, s = decode_dss_signature(der_sig)
return number_to_bytes(r, num_bytes) + number_to_bytes(s, num_bytes) | null |
157,715 | import base64
import binascii
from typing import Any, Union
def bytes_to_number(string: bytes) -> int:
return int(binascii.b2a_hex(string), 16)
def raw_to_der_signature(raw_sig: bytes, curve: EllipticCurve) -> bytes:
num_bits = curve.key_size
num_bytes = (num_bits + 7) // 8
if len(raw_sig) != 2 * num_bytes:
raise ValueError("Invalid signature")
r = bytes_to_number(raw_sig[:num_bytes])
s = bytes_to_number(raw_sig[num_bytes:])
return encode_dss_signature(r, s) | null |
157,716 | import hashlib
import hmac
import json
from .exceptions import InvalidKeyError
from .utils import (
base64url_decode,
base64url_encode,
der_to_raw_signature,
force_bytes,
from_base64url_uint,
raw_to_der_signature,
to_base64url_uint,
)
class NoneAlgorithm(Algorithm):
"""
Placeholder for use when no signing or verification
operations are required.
"""
def prepare_key(self, key):
if key == "":
key = None
if key is not None:
raise InvalidKeyError('When alg = "none", key value must be None.')
return key
def sign(self, msg, key):
return b""
def verify(self, msg, key, sig):
return False
class HMACAlgorithm(Algorithm):
"""
Performs signing and verification operations using HMAC
and the specified hash function.
"""
SHA256 = hashlib.sha256
SHA384 = hashlib.sha384
SHA512 = hashlib.sha512
def __init__(self, hash_alg):
self.hash_alg = hash_alg
def prepare_key(self, key):
key = force_bytes(key)
invalid_strings = [
b"-----BEGIN PUBLIC KEY-----",
b"-----BEGIN CERTIFICATE-----",
b"-----BEGIN RSA PUBLIC KEY-----",
b"ssh-rsa",
]
if any(string_value in key for string_value in invalid_strings):
raise InvalidKeyError(
"The specified key is an asymmetric key or x509 certificate and"
" should not be used as an HMAC secret."
)
return key
def to_jwk(key_obj):
return json.dumps(
{
"k": base64url_encode(force_bytes(key_obj)).decode(),
"kty": "oct",
}
)
def from_jwk(jwk):
try:
if isinstance(jwk, str):
obj = json.loads(jwk)
elif isinstance(jwk, dict):
obj = jwk
else:
raise ValueError
except ValueError:
raise InvalidKeyError("Key is not valid JSON")
if obj.get("kty") != "oct":
raise InvalidKeyError("Not an HMAC key")
return base64url_decode(obj["k"])
def sign(self, msg, key):
return hmac.new(key, msg, self.hash_alg).digest()
def verify(self, msg, key, sig):
return hmac.compare_digest(sig, self.sign(msg, key))
if has_crypto:
class RSAAlgorithm(Algorithm):
"""
Performs signing and verification operations using
RSASSA-PKCS-v1_5 and the specified hash function.
"""
SHA256 = hashes.SHA256
SHA384 = hashes.SHA384
SHA512 = hashes.SHA512
def __init__(self, hash_alg):
self.hash_alg = hash_alg
def prepare_key(self, key):
if isinstance(key, (RSAPrivateKey, RSAPublicKey)):
return key
if not isinstance(key, (bytes, str)):
raise TypeError("Expecting a PEM-formatted key.")
key = force_bytes(key)
try:
if key.startswith(b"ssh-rsa"):
key = load_ssh_public_key(key)
else:
key = load_pem_private_key(key, password=None)
except ValueError:
key = load_pem_public_key(key)
return key
def to_jwk(key_obj):
obj = None
if getattr(key_obj, "private_numbers", None):
# Private key
numbers = key_obj.private_numbers()
obj = {
"kty": "RSA",
"key_ops": ["sign"],
"n": to_base64url_uint(numbers.public_numbers.n).decode(),
"e": to_base64url_uint(numbers.public_numbers.e).decode(),
"d": to_base64url_uint(numbers.d).decode(),
"p": to_base64url_uint(numbers.p).decode(),
"q": to_base64url_uint(numbers.q).decode(),
"dp": to_base64url_uint(numbers.dmp1).decode(),
"dq": to_base64url_uint(numbers.dmq1).decode(),
"qi": to_base64url_uint(numbers.iqmp).decode(),
}
elif getattr(key_obj, "verify", None):
# Public key
numbers = key_obj.public_numbers()
obj = {
"kty": "RSA",
"key_ops": ["verify"],
"n": to_base64url_uint(numbers.n).decode(),
"e": to_base64url_uint(numbers.e).decode(),
}
else:
raise InvalidKeyError("Not a public or private key")
return json.dumps(obj)
def from_jwk(jwk):
try:
if isinstance(jwk, str):
obj = json.loads(jwk)
elif isinstance(jwk, dict):
obj = jwk
else:
raise ValueError
except ValueError:
raise InvalidKeyError("Key is not valid JSON")
if obj.get("kty") != "RSA":
raise InvalidKeyError("Not an RSA key")
if "d" in obj and "e" in obj and "n" in obj:
# Private key
if "oth" in obj:
raise InvalidKeyError(
"Unsupported RSA private key: > 2 primes not supported"
)
other_props = ["p", "q", "dp", "dq", "qi"]
props_found = [prop in obj for prop in other_props]
any_props_found = any(props_found)
if any_props_found and not all(props_found):
raise InvalidKeyError(
"RSA key must include all parameters if any are present besides d"
)
public_numbers = RSAPublicNumbers(
from_base64url_uint(obj["e"]),
from_base64url_uint(obj["n"]),
)
if any_props_found:
numbers = RSAPrivateNumbers(
d=from_base64url_uint(obj["d"]),
p=from_base64url_uint(obj["p"]),
q=from_base64url_uint(obj["q"]),
dmp1=from_base64url_uint(obj["dp"]),
dmq1=from_base64url_uint(obj["dq"]),
iqmp=from_base64url_uint(obj["qi"]),
public_numbers=public_numbers,
)
else:
d = from_base64url_uint(obj["d"])
p, q = rsa_recover_prime_factors(
public_numbers.n, d, public_numbers.e
)
numbers = RSAPrivateNumbers(
d=d,
p=p,
q=q,
dmp1=rsa_crt_dmp1(d, p),
dmq1=rsa_crt_dmq1(d, q),
iqmp=rsa_crt_iqmp(p, q),
public_numbers=public_numbers,
)
return numbers.private_key()
elif "n" in obj and "e" in obj:
# Public key
numbers = RSAPublicNumbers(
from_base64url_uint(obj["e"]),
from_base64url_uint(obj["n"]),
)
return numbers.public_key()
else:
raise InvalidKeyError("Not a public or private key")
def sign(self, msg, key):
return key.sign(msg, padding.PKCS1v15(), self.hash_alg())
def verify(self, msg, key, sig):
try:
key.verify(sig, msg, padding.PKCS1v15(), self.hash_alg())
return True
except InvalidSignature:
return False
class ECAlgorithm(Algorithm):
"""
Performs signing and verification operations using
ECDSA and the specified hash function
"""
SHA256 = hashes.SHA256
SHA384 = hashes.SHA384
SHA512 = hashes.SHA512
def __init__(self, hash_alg):
self.hash_alg = hash_alg
def prepare_key(self, key):
if isinstance(key, (EllipticCurvePrivateKey, EllipticCurvePublicKey)):
return key
if not isinstance(key, (bytes, str)):
raise TypeError("Expecting a PEM-formatted key.")
key = force_bytes(key)
# Attempt to load key. We don't know if it's
# a Signing Key or a Verifying Key, so we try
# the Verifying Key first.
try:
if key.startswith(b"ecdsa-sha2-"):
key = load_ssh_public_key(key)
else:
key = load_pem_public_key(key)
except ValueError:
key = load_pem_private_key(key, password=None)
return key
def sign(self, msg, key):
der_sig = key.sign(msg, ec.ECDSA(self.hash_alg()))
return der_to_raw_signature(der_sig, key.curve)
def verify(self, msg, key, sig):
try:
der_sig = raw_to_der_signature(sig, key.curve)
except ValueError:
return False
try:
if isinstance(key, EllipticCurvePrivateKey):
key = key.public_key()
key.verify(der_sig, msg, ec.ECDSA(self.hash_alg()))
return True
except InvalidSignature:
return False
def from_jwk(jwk):
try:
if isinstance(jwk, str):
obj = json.loads(jwk)
elif isinstance(jwk, dict):
obj = jwk
else:
raise ValueError
except ValueError:
raise InvalidKeyError("Key is not valid JSON")
if obj.get("kty") != "EC":
raise InvalidKeyError("Not an Elliptic curve key")
if "x" not in obj or "y" not in obj:
raise InvalidKeyError("Not an Elliptic curve key")
x = base64url_decode(obj.get("x"))
y = base64url_decode(obj.get("y"))
curve = obj.get("crv")
if curve == "P-256":
if len(x) == len(y) == 32:
curve_obj = ec.SECP256R1()
else:
raise InvalidKeyError("Coords should be 32 bytes for curve P-256")
elif curve == "P-384":
if len(x) == len(y) == 48:
curve_obj = ec.SECP384R1()
else:
raise InvalidKeyError("Coords should be 48 bytes for curve P-384")
elif curve == "P-521":
if len(x) == len(y) == 66:
curve_obj = ec.SECP521R1()
else:
raise InvalidKeyError("Coords should be 66 bytes for curve P-521")
elif curve == "secp256k1":
if len(x) == len(y) == 32:
curve_obj = ec.SECP256K1()
else:
raise InvalidKeyError(
"Coords should be 32 bytes for curve secp256k1"
)
else:
raise InvalidKeyError(f"Invalid curve: {curve}")
public_numbers = ec.EllipticCurvePublicNumbers(
x=int.from_bytes(x, byteorder="big"),
y=int.from_bytes(y, byteorder="big"),
curve=curve_obj,
)
if "d" not in obj:
return public_numbers.public_key()
d = base64url_decode(obj.get("d"))
if len(d) != len(x):
raise InvalidKeyError(
"D should be {} bytes for curve {}", len(x), curve
)
return ec.EllipticCurvePrivateNumbers(
int.from_bytes(d, byteorder="big"), public_numbers
).private_key()
class RSAPSSAlgorithm(RSAAlgorithm):
"""
Performs a signature using RSASSA-PSS with MGF1
"""
def sign(self, msg, key):
return key.sign(
msg,
padding.PSS(
mgf=padding.MGF1(self.hash_alg()),
salt_length=self.hash_alg.digest_size,
),
self.hash_alg(),
)
def verify(self, msg, key, sig):
try:
key.verify(
sig,
msg,
padding.PSS(
mgf=padding.MGF1(self.hash_alg()),
salt_length=self.hash_alg.digest_size,
),
self.hash_alg(),
)
return True
except InvalidSignature:
return False
class OKPAlgorithm(Algorithm):
"""
Performs signing and verification operations using EdDSA
This class requires ``cryptography>=2.6`` to be installed.
"""
def __init__(self, **kwargs):
pass
def prepare_key(self, key):
if isinstance(
key,
(Ed25519PrivateKey, Ed25519PublicKey, Ed448PrivateKey, Ed448PublicKey),
):
return key
if isinstance(key, (bytes, str)):
if isinstance(key, str):
key = key.encode("utf-8")
str_key = key.decode("utf-8")
if "-----BEGIN PUBLIC" in str_key:
return load_pem_public_key(key)
if "-----BEGIN PRIVATE" in str_key:
return load_pem_private_key(key, password=None)
if str_key[0:4] == "ssh-":
return load_ssh_public_key(key)
raise TypeError("Expecting a PEM-formatted or OpenSSH key.")
def sign(self, msg, key):
"""
Sign a message ``msg`` using the EdDSA private key ``key``
:param str|bytes msg: Message to sign
:param Ed25519PrivateKey}Ed448PrivateKey key: A :class:`.Ed25519PrivateKey`
or :class:`.Ed448PrivateKey` iinstance
:return bytes signature: The signature, as bytes
"""
msg = bytes(msg, "utf-8") if type(msg) is not bytes else msg
return key.sign(msg)
def verify(self, msg, key, sig):
"""
Verify a given ``msg`` against a signature ``sig`` using the EdDSA key ``key``
:param str|bytes sig: EdDSA signature to check ``msg`` against
:param str|bytes msg: Message to sign
:param Ed25519PrivateKey|Ed25519PublicKey|Ed448PrivateKey|Ed448PublicKey key:
A private or public EdDSA key instance
:return bool verified: True if signature is valid, False if not.
"""
try:
msg = bytes(msg, "utf-8") if type(msg) is not bytes else msg
sig = bytes(sig, "utf-8") if type(sig) is not bytes else sig
if isinstance(key, (Ed25519PrivateKey, Ed448PrivateKey)):
key = key.public_key()
key.verify(sig, msg)
return True # If no exception was raised, the signature is valid.
except cryptography.exceptions.InvalidSignature:
return False
def to_jwk(key):
if isinstance(key, (Ed25519PublicKey, Ed448PublicKey)):
x = key.public_bytes(
encoding=Encoding.Raw,
format=PublicFormat.Raw,
)
crv = "Ed25519" if isinstance(key, Ed25519PublicKey) else "Ed448"
return json.dumps(
{
"x": base64url_encode(force_bytes(x)).decode(),
"kty": "OKP",
"crv": crv,
}
)
if isinstance(key, (Ed25519PrivateKey, Ed448PrivateKey)):
d = key.private_bytes(
encoding=Encoding.Raw,
format=PrivateFormat.Raw,
encryption_algorithm=NoEncryption(),
)
x = key.public_key().public_bytes(
encoding=Encoding.Raw,
format=PublicFormat.Raw,
)
crv = "Ed25519" if isinstance(key, Ed25519PrivateKey) else "Ed448"
return json.dumps(
{
"x": base64url_encode(force_bytes(x)).decode(),
"d": base64url_encode(force_bytes(d)).decode(),
"kty": "OKP",
"crv": crv,
}
)
raise InvalidKeyError("Not a public or private key")
def from_jwk(jwk):
try:
if isinstance(jwk, str):
obj = json.loads(jwk)
elif isinstance(jwk, dict):
obj = jwk
else:
raise ValueError
except ValueError:
raise InvalidKeyError("Key is not valid JSON")
if obj.get("kty") != "OKP":
raise InvalidKeyError("Not an Octet Key Pair")
curve = obj.get("crv")
if curve != "Ed25519" and curve != "Ed448":
raise InvalidKeyError(f"Invalid curve: {curve}")
if "x" not in obj:
raise InvalidKeyError('OKP should have "x" parameter')
x = base64url_decode(obj.get("x"))
try:
if "d" not in obj:
if curve == "Ed25519":
return Ed25519PublicKey.from_public_bytes(x)
return Ed448PublicKey.from_public_bytes(x)
d = base64url_decode(obj.get("d"))
if curve == "Ed25519":
return Ed25519PrivateKey.from_private_bytes(d)
return Ed448PrivateKey.from_private_bytes(d)
except ValueError as err:
raise InvalidKeyError("Invalid key parameter") from err
The provided code snippet includes necessary dependencies for implementing the `get_default_algorithms` function. Write a Python function `def get_default_algorithms()` to solve the following problem:
Returns the algorithms that are implemented by the library.
Here is the function:
def get_default_algorithms():
"""
Returns the algorithms that are implemented by the library.
"""
default_algorithms = {
"none": NoneAlgorithm(),
"HS256": HMACAlgorithm(HMACAlgorithm.SHA256),
"HS384": HMACAlgorithm(HMACAlgorithm.SHA384),
"HS512": HMACAlgorithm(HMACAlgorithm.SHA512),
}
if has_crypto:
default_algorithms.update(
{
"RS256": RSAAlgorithm(RSAAlgorithm.SHA256),
"RS384": RSAAlgorithm(RSAAlgorithm.SHA384),
"RS512": RSAAlgorithm(RSAAlgorithm.SHA512),
"ES256": ECAlgorithm(ECAlgorithm.SHA256),
"ES256K": ECAlgorithm(ECAlgorithm.SHA256),
"ES384": ECAlgorithm(ECAlgorithm.SHA384),
"ES521": ECAlgorithm(ECAlgorithm.SHA512),
"ES512": ECAlgorithm(
ECAlgorithm.SHA512
), # Backward compat for #219 fix
"PS256": RSAPSSAlgorithm(RSAPSSAlgorithm.SHA256),
"PS384": RSAPSSAlgorithm(RSAPSSAlgorithm.SHA384),
"PS512": RSAPSSAlgorithm(RSAPSSAlgorithm.SHA512),
"EdDSA": OKPAlgorithm(),
}
)
return default_algorithms | Returns the algorithms that are implemented by the library. |
157,717 | import json
import platform
import sys
from . import __version__ as pyjwt_version
try:
import cryptography
except ModuleNotFoundError:
cryptography = None # type: ignore
The provided code snippet includes necessary dependencies for implementing the `info` function. Write a Python function `def info()` to solve the following problem:
Generate information for a bug report. Based on the requests package help utility module.
Here is the function:
def info():
"""
Generate information for a bug report.
Based on the requests package help utility module.
"""
try:
platform_info = {
"system": platform.system(),
"release": platform.release(),
}
except OSError:
platform_info = {"system": "Unknown", "release": "Unknown"}
implementation = platform.python_implementation()
if implementation == "CPython":
implementation_version = platform.python_version()
elif implementation == "PyPy":
implementation_version = "{}.{}.{}".format(
sys.pypy_version_info.major,
sys.pypy_version_info.minor,
sys.pypy_version_info.micro,
)
if sys.pypy_version_info.releaselevel != "final":
implementation_version = "".join(
[implementation_version, sys.pypy_version_info.releaselevel]
)
else:
implementation_version = "Unknown"
return {
"platform": platform_info,
"implementation": {
"name": implementation,
"version": implementation_version,
},
"cryptography": {"version": getattr(cryptography, "__version__", "")},
"pyjwt": {"version": pyjwt_version},
} | Generate information for a bug report. Based on the requests package help utility module. |
157,718 | import os
import secrets
from pathlib import Path
from flask.cli import with_appcontext
from flask import current_app
import click
from flask_meld.templates import (
base_html_template,
config_template,
components,
components_template,
env_template,
index_html_template,
init_template,
requirements_template,
wsgi_template,
)
The provided code snippet includes necessary dependencies for implementing the `meld` function. Write a Python function `def meld()` to solve the following problem:
Flask-Meld specific commands
Here is the function:
def meld():
"""Flask-Meld specific commands""" | Flask-Meld specific commands |
157,719 | import os
import secrets
from pathlib import Path
from flask.cli import with_appcontext
from flask import current_app
import click
from flask_meld.templates import (
base_html_template,
config_template,
components,
components_template,
env_template,
index_html_template,
init_template,
requirements_template,
wsgi_template,
)
The provided code snippet includes necessary dependencies for implementing the `new` function. Write a Python function `def new()` to solve the following problem:
Commands for new keyword
Here is the function:
def new():
"""Commands for new keyword""" | Commands for new keyword |
157,720 | import os
import secrets
from pathlib import Path
from flask.cli import with_appcontext
from flask import current_app
import click
from flask_meld.templates import (
base_html_template,
config_template,
components,
components_template,
env_template,
index_html_template,
init_template,
requirements_template,
wsgi_template,
)
def generate_meld_app(name, base_dir=None):
try:
if not base_dir:
base_dir = Path.cwd() / name
os.makedirs(base_dir / "app" / "meld" / "components")
os.makedirs(base_dir / "app" / "meld" / "templates")
os.makedirs(base_dir / "app" / "templates")
os.makedirs(base_dir / "app" / "static" / "images")
os.makedirs(base_dir / "app" / "static" / "css")
os.makedirs(base_dir / "tests")
generate_file_with_content(
base_dir, "requirements.txt", requirements_template.template
)
generate_file_with_content(base_dir, "config.py", config_template.template)
generate_file_with_content(base_dir, "app/__init__.py", init_template.template)
generate_file_with_content(base_dir, "app/wsgi.py", wsgi_template.template)
generate_file_with_content(
base_dir, "app/templates/base.html", base_html_template.template
)
generate_file_with_content(
base_dir, "app/templates/index.html", index_html_template.template
)
generated_secret_key = secrets.token_hex(16)
generate_file_with_content(
base_dir, ".env", env_template.substitute(secret_key=generated_secret_key)
)
except OSError:
pass
The provided code snippet includes necessary dependencies for implementing the `project` function. Write a Python function `def project(name)` to solve the following problem:
Create a new flask-meld app with application defaults
Here is the function:
def project(name):
"""Create a new flask-meld app with application defaults"""
click.echo(f"Creating app {name}")
generate_meld_app(name) | Create a new flask-meld app with application defaults |
157,721 | import ast
from werkzeug.wrappers.response import Response
import functools
from .component import get_component_class
from flask import jsonify, current_app
import orjson
def parse_call_method_name(call_method_name: str):
params = None
method_name = call_method_name
if "(" in call_method_name and call_method_name.endswith(")"):
param_idx = call_method_name.index("(")
params_str = call_method_name[param_idx:]
# Remove the arguments from the method name
method_name = call_method_name.replace(params_str, "")
# Remove parenthesis
params_str = params_str[1:-1]
if params_str != "":
try:
params = ast.literal_eval("[" + params_str + "]")
except (ValueError, SyntaxError):
params = list(map(str.strip, params_str.split(",")))
return method_name, params
def get_component_class(component_name):
"""
Get a component class based on a component name.
"""
module_name = convert_to_snake_case(component_name)
class_name = convert_to_camel_case(module_name)
module = get_component_module(module_name)
component_class = getattr(module, class_name)
return component_class
def process_message(message):
meld_id = message["id"]
component_name = message["componentName"]
action_queue = message["actionQueue"]
data = message["data"]
Component = get_component_class(component_name)
component = Component(meld_id, **data)
return_data = None
for action in action_queue:
payload = action.get("payload", None)
if "syncInput" in action["type"]:
if hasattr(component, payload["name"]):
setattr(component, payload["name"], payload["value"])
if component._form:
field_name = payload.get("name")
if field_name in component._form._fields:
field = getattr(component._form, field_name)
component._set_field_data(field_name, payload["value"])
component.updated(field)
component.errors[field_name] = field.errors or ""
else:
component.updated(payload["name"])
elif "callMethod" in action["type"]:
call_method_name = payload.get("name", "")
method_name, params = parse_call_method_name(call_method_name)
message = payload.get("message")
if method_name is not None and hasattr(component, method_name):
func = getattr(component, method_name)
if params:
return_data = func(*params)
elif message:
return_data = func(**message)
else:
return_data = func()
if component._form:
component._bind_form(component._attributes())
rendered_component = component.render(component_name)
res = {
"id": meld_id,
"dom": rendered_component,
"data": orjson.dumps(jsonify(component._attributes()).json).decode("utf-8"),
}
if type(return_data) is Response and return_data.status_code == 302:
res["redirect"] = {"url": return_data.location}
return res | null |
157,722 | import ast
from werkzeug.wrappers.response import Response
import functools
from .component import get_component_class
from flask import jsonify, current_app
import orjson
def get_component_class(component_name):
"""
Get a component class based on a component name.
"""
module_name = convert_to_snake_case(component_name)
class_name = convert_to_camel_case(module_name)
module = get_component_module(module_name)
component_class = getattr(module, class_name)
return component_class
def process_init(component_name):
Component = get_component_class(component_name)
return Component._listeners() | null |
157,723 | import ast
from werkzeug.wrappers.response import Response
import functools
from .component import get_component_class
from flask import jsonify, current_app
import orjson
The provided code snippet includes necessary dependencies for implementing the `listen` function. Write a Python function `def listen(*event_names: str)` to solve the following problem:
Decorator to indicate that the decorated method should listen for custom events. It can be called using `flask_meld.emit`. Keyword arguments from `flask_meld.emit` will be passed as keyword arguments to the decorated method. Params: *event_names (str): One or more event names to listen for.
Here is the function:
def listen(*event_names: str):
"""
Decorator to indicate that the decorated method should listen for custom events.
It can be called using `flask_meld.emit`. Keyword arguments from `flask_meld.emit`
will be passed as keyword arguments to the decorated method.
Params:
*event_names (str): One or more event names to listen for.
"""
def dec(func):
func._meld_event_names = event_names
return func
return dec | Decorator to indicate that the decorated method should listen for custom events. It can be called using `flask_meld.emit`. Keyword arguments from `flask_meld.emit` will be passed as keyword arguments to the decorated method. Params: *event_names (str): One or more event names to listen for. |
157,724 | import ast
from werkzeug.wrappers.response import Response
import functools
from .component import get_component_class
from flask import jsonify, current_app
import orjson
The provided code snippet includes necessary dependencies for implementing the `emit` function. Write a Python function `def emit(event_name: str, **kwargs)` to solve the following problem:
Emit a custom event which will call any Component methods with the `@listen` decorator that are listening for the given event. Keyword arguments to this function are passed as keyword arguments to each of the decorated methods. Params: event_name (str): The name of the custom event to emit. **kwargs: Arguments to be passed as keyword arguments to the listening methods.
Here is the function:
def emit(event_name: str, **kwargs):
"""
Emit a custom event which will call any Component methods with the `@listen`
decorator that are listening for the given event. Keyword arguments to this
function are passed as keyword arguments to each of the decorated methods.
Params:
event_name (str): The name of the custom event to emit.
**kwargs: Arguments to be passed as keyword arguments to the listening
methods.
"""
current_app.socketio.emit("meld-event", {"event": event_name, "message": kwargs}) | Emit a custom event which will call any Component methods with the `@listen` decorator that are listening for the given event. Keyword arguments to this function are passed as keyword arguments to each of the decorated methods. Params: event_name (str): The name of the custom event to emit. **kwargs: Arguments to be passed as keyword arguments to the listening methods. |
157,725 | import binascii
import codecs
import os
from io import BytesIO
from typing import Iterable, Mapping, Optional, Sequence, Tuple, Union
from .fields import _TYPE_FIELD_VALUE_TUPLE, RequestField
writer = codecs.lookup("utf-8")[3]
_TYPE_FIELDS = Union[
_TYPE_FIELDS_SEQUENCE,
Mapping[str, _TYPE_FIELD_VALUE_TUPLE],
]
def choose_boundary() -> str:
"""
Our embarrassingly-simple replacement for mimetools.choose_boundary.
"""
return binascii.hexlify(os.urandom(16)).decode()
def iter_field_objects(fields: _TYPE_FIELDS) -> Iterable[RequestField]:
"""
Iterate over fields.
Supports list of (k, v) tuples and dicts, and lists of
:class:`~urllib3.fields.RequestField`.
"""
iterable: Iterable[Union[RequestField, Tuple[str, _TYPE_FIELD_VALUE_TUPLE]]]
if isinstance(fields, Mapping):
iterable = fields.items()
else:
iterable = fields
for field in iterable:
if isinstance(field, RequestField):
yield field
else:
yield RequestField.from_tuples(*field)
The provided code snippet includes necessary dependencies for implementing the `encode_multipart_formdata` function. Write a Python function `def encode_multipart_formdata( fields: _TYPE_FIELDS, boundary: Optional[str] = None ) -> Tuple[bytes, str]` to solve the following problem:
Encode a dictionary of ``fields`` using the multipart/form-data MIME format. :param fields: Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). :param boundary: If not specified, then a random boundary will be generated using :func:`urllib3.filepost.choose_boundary`.
Here is the function:
def encode_multipart_formdata(
fields: _TYPE_FIELDS, boundary: Optional[str] = None
) -> Tuple[bytes, str]:
"""
Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
:param fields:
Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
:param boundary:
If not specified, then a random boundary will be generated using
:func:`urllib3.filepost.choose_boundary`.
"""
body = BytesIO()
if boundary is None:
boundary = choose_boundary()
for field in iter_field_objects(fields):
body.write(f"--{boundary}\r\n".encode("latin-1"))
writer(body).write(field.render_headers())
data = field.data
if isinstance(data, int):
data = str(data) # Backwards compatibility
if isinstance(data, str):
writer(body).write(data)
else:
body.write(data)
body.write(b"\r\n")
body.write(f"--{boundary}--\r\n".encode("latin-1"))
content_type = f"multipart/form-data; boundary={boundary}"
return body.getvalue(), content_type | Encode a dictionary of ``fields`` using the multipart/form-data MIME format. :param fields: Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). :param boundary: If not specified, then a random boundary will be generated using :func:`urllib3.filepost.choose_boundary`. |
157,726 | from collections import OrderedDict
from enum import Enum, auto
from threading import RLock
from typing import (
TYPE_CHECKING,
Callable,
Generic,
Iterable,
Iterator,
List,
Mapping,
MutableMapping,
NoReturn,
Optional,
)
from typing import OrderedDict as OrderedDictType
from typing import Set, Tuple, TypeVar, Union, cast, overload
ValidHTTPHeaderSource = Union[
"HTTPHeaderDict",
Mapping[str, str],
Iterable[Tuple[str, str]],
"HasGettableStringKeys",
]
class HTTPHeaderDict(MutableMapping[str, str]):
"""
:param headers:
An iterable of field-value pairs. Must not contain multiple field names
when compared case-insensitively.
:param kwargs:
Additional field-value pairs to pass in to ``dict.update``.
A ``dict`` like container for storing HTTP Headers.
Field names are stored and compared case-insensitively in compliance with
RFC 7230. Iteration provides the first case-sensitive key seen for each
case-insensitive pair.
Using ``__setitem__`` syntax overwrites fields that compare equal
case-insensitively in order to maintain ``dict``'s api. For fields that
compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
in a loop.
If multiple fields that are equal case-insensitively are passed to the
constructor or ``.update``, the behavior is undefined and some will be
lost.
>>> headers = HTTPHeaderDict()
>>> headers.add('Set-Cookie', 'foo=bar')
>>> headers.add('set-cookie', 'baz=quxx')
>>> headers['content-length'] = '7'
>>> headers['SET-cookie']
'foo=bar, baz=quxx'
>>> headers['Content-Length']
'7'
"""
_container: MutableMapping[str, List[str]]
def __init__(self, headers: Optional[ValidHTTPHeaderSource] = None, **kwargs: str):
super().__init__()
self._container = {} # 'dict' is insert-ordered in Python 3.7+
if headers is not None:
if isinstance(headers, HTTPHeaderDict):
self._copy_from(headers)
else:
self.extend(headers)
if kwargs:
self.extend(kwargs)
def __setitem__(self, key: str, val: str) -> None:
# avoid a bytes/str comparison by decoding before httplib
if isinstance(key, bytes):
key = key.decode("latin-1")
self._container[key.lower()] = [key, val]
def __getitem__(self, key: str) -> str:
val = self._container[key.lower()]
return ", ".join(val[1:])
def __delitem__(self, key: str) -> None:
del self._container[key.lower()]
def __contains__(self, key: object) -> bool:
if isinstance(key, str):
return key.lower() in self._container
return False
def __eq__(self, other: object) -> bool:
maybe_constructable = ensure_can_construct_http_header_dict(other)
if maybe_constructable is None:
return False
else:
other_as_http_header_dict = type(self)(maybe_constructable)
return {k.lower(): v for k, v in self.itermerged()} == {
k.lower(): v for k, v in other_as_http_header_dict.itermerged()
}
def __ne__(self, other: object) -> bool:
return not self.__eq__(other)
def __len__(self) -> int:
return len(self._container)
def __iter__(self) -> Iterator[str]:
# Only provide the originally cased names
for vals in self._container.values():
yield vals[0]
def discard(self, key: str) -> None:
try:
del self[key]
except KeyError:
pass
def add(self, key: str, val: str) -> None:
"""Adds a (name, value) pair, doesn't overwrite the value if it already
exists.
>>> headers = HTTPHeaderDict(foo='bar')
>>> headers.add('Foo', 'baz')
>>> headers['foo']
'bar, baz'
"""
# avoid a bytes/str comparison by decoding before httplib
if isinstance(key, bytes):
key = key.decode("latin-1")
key_lower = key.lower()
new_vals = [key, val]
# Keep the common case aka no item present as fast as possible
vals = self._container.setdefault(key_lower, new_vals)
if new_vals is not vals:
vals.append(val)
def extend(self, *args: ValidHTTPHeaderSource, **kwargs: str) -> None:
"""Generic import function for any type of header-like object.
Adapted version of MutableMapping.update in order to insert items
with self.add instead of self.__setitem__
"""
if len(args) > 1:
raise TypeError(
f"extend() takes at most 1 positional arguments ({len(args)} given)"
)
other = args[0] if len(args) >= 1 else ()
if isinstance(other, HTTPHeaderDict):
for key, val in other.iteritems():
self.add(key, val)
elif isinstance(other, Mapping):
for key, val in other.items():
self.add(key, val)
elif isinstance(other, Iterable):
other = cast(Iterable[Tuple[str, str]], other)
for key, value in other:
self.add(key, value)
elif hasattr(other, "keys") and hasattr(other, "__getitem__"):
# THIS IS NOT A TYPESAFE BRANCH
# In this branch, the object has a `keys` attr but is not a Mapping or any of
# the other types indicated in the method signature. We do some stuff with
# it as though it partially implements the Mapping interface, but we're not
# doing that stuff safely AT ALL.
for key in other.keys():
self.add(key, other[key])
for key, value in kwargs.items():
self.add(key, value)
def getlist(self, key: str) -> List[str]:
...
def getlist(self, key: str, default: _DT) -> Union[List[str], _DT]:
...
def getlist(
self, key: str, default: Union[_Sentinel, _DT] = _Sentinel.not_passed
) -> Union[List[str], _DT]:
"""Returns a list of all the values for the named field. Returns an
empty list if the key doesn't exist."""
try:
vals = self._container[key.lower()]
except KeyError:
if default is _Sentinel.not_passed:
# _DT is unbound; empty list is instance of List[str]
return []
# _DT is bound; default is instance of _DT
return default
else:
# _DT may or may not be bound; vals[1:] is instance of List[str], which
# meets our external interface requirement of `Union[List[str], _DT]`.
return vals[1:]
# Backwards compatibility for httplib
getheaders = getlist
getallmatchingheaders = getlist
iget = getlist
# Backwards compatibility for http.cookiejar
get_all = getlist
def __repr__(self) -> str:
return f"{type(self).__name__}({dict(self.itermerged())})"
def _copy_from(self, other: "HTTPHeaderDict") -> None:
for key in other:
val = other.getlist(key)
self._container[key.lower()] = [key, *val]
def copy(self) -> "HTTPHeaderDict":
clone = type(self)()
clone._copy_from(self)
return clone
def iteritems(self) -> Iterator[Tuple[str, str]]:
"""Iterate over all header lines, including duplicate ones."""
for key in self:
vals = self._container[key.lower()]
for val in vals[1:]:
yield vals[0], val
def itermerged(self) -> Iterator[Tuple[str, str]]:
"""Iterate over all headers, merging duplicate ones together."""
for key in self:
val = self._container[key.lower()]
yield val[0], ", ".join(val[1:])
def items(self) -> HTTPHeaderDictItemView:
return HTTPHeaderDictItemView(self)
def _has_value_for_header(self, header_name: str, potential_value: str) -> bool:
if header_name in self:
return potential_value in self._container[header_name.lower()][1:]
return False
def ensure_can_construct_http_header_dict(
potential: object,
) -> Optional[ValidHTTPHeaderSource]:
if isinstance(potential, HTTPHeaderDict):
return potential
elif isinstance(potential, Mapping):
# Full runtime checking of the contents of a Mapping is expensive, so for the
# purposes of typechecking, we assume that any Mapping is the right shape.
return cast(Mapping[str, str], potential)
elif isinstance(potential, Iterable):
# Similarly to Mapping, full runtime checking of the contents of an Iterable is
# expensive, so for the purposes of typechecking, we assume that any Iterable
# is the right shape.
return cast(Iterable[Tuple[str, str]], potential)
elif hasattr(potential, "keys") and hasattr(potential, "__getitem__"):
return cast("HasGettableStringKeys", potential)
else:
return None | null |
157,727 | import platform
from ctypes import (
CDLL,
CFUNCTYPE,
POINTER,
c_bool,
c_byte,
c_char_p,
c_int32,
c_long,
c_size_t,
c_uint32,
c_ulong,
c_void_p,
)
from ctypes.util import find_library
from typing import Optional
version_info = tuple(map(int, version.split(".")))
if version_info < (10, 8):
raise OSError(
f"Only OS X 10.8 and newer are supported, not {version_info[0]}.{version_info[1]}"
)
The provided code snippet includes necessary dependencies for implementing the `load_cdll` function. Write a Python function `def load_cdll(name: str, macos10_16_path: str) -> CDLL` to solve the following problem:
Loads a CDLL by name, falling back to known path on 10.16+
Here is the function:
def load_cdll(name: str, macos10_16_path: str) -> CDLL:
"""Loads a CDLL by name, falling back to known path on 10.16+"""
try:
# Big Sur is technically 11 but we use 10.16 due to the Big Sur
# beta being labeled as 10.16.
path: Optional[str]
if version_info >= (10, 16):
path = macos10_16_path
else:
path = find_library(name)
if not path:
raise OSError # Caught and reraised as 'ImportError'
return CDLL(path, use_errno=True)
except OSError:
raise ImportError(f"The library {name} failed to load") from None | Loads a CDLL by name, falling back to known path on 10.16+ |
157,728 | import base64
import ctypes
import itertools
import os
import re
import ssl
import struct
import tempfile
from typing import Any, List, Optional, Tuple, Type
from .bindings import ( # type: ignore[attr-defined]
CFArray,
CFConst,
CFData,
CFDictionary,
CFMutableArray,
CFString,
CFTypeRef,
CoreFoundation,
SecKeychainRef,
Security,
)
CoreFoundation = load_cdll(
"CoreFoundation",
"/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
)
CFDictionary = c_void_p
CFTypeRef = POINTER(CFType)
The provided code snippet includes necessary dependencies for implementing the `_cf_dictionary_from_tuples` function. Write a Python function `def _cf_dictionary_from_tuples(tuples: List[Tuple[Any, Any]]) -> CFDictionary` to solve the following problem:
Given a list of Python tuples, create an associated CFDictionary.
Here is the function:
def _cf_dictionary_from_tuples(tuples: List[Tuple[Any, Any]]) -> CFDictionary:
"""
Given a list of Python tuples, create an associated CFDictionary.
"""
dictionary_size = len(tuples)
# We need to get the dictionary keys and values out in the same order.
keys = (t[0] for t in tuples)
values = (t[1] for t in tuples)
cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys)
cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values)
return CoreFoundation.CFDictionaryCreate(
CoreFoundation.kCFAllocatorDefault,
cf_keys,
cf_values,
dictionary_size,
CoreFoundation.kCFTypeDictionaryKeyCallBacks,
CoreFoundation.kCFTypeDictionaryValueCallBacks,
) | Given a list of Python tuples, create an associated CFDictionary. |
157,729 | import base64
import ctypes
import itertools
import os
import re
import ssl
import struct
import tempfile
from typing import Any, List, Optional, Tuple, Type
from .bindings import ( # type: ignore[attr-defined]
CFArray,
CFConst,
CFData,
CFDictionary,
CFMutableArray,
CFString,
CFTypeRef,
CoreFoundation,
SecKeychainRef,
Security,
)
def _cfstr(py_bstr: bytes) -> CFString:
"""
Given a Python binary data, create a CFString.
The string must be CFReleased by the caller.
"""
c_str = ctypes.c_char_p(py_bstr)
cf_str = CoreFoundation.CFStringCreateWithCString(
CoreFoundation.kCFAllocatorDefault,
c_str,
CFConst.kCFStringEncodingUTF8,
)
return cf_str
CoreFoundation = load_cdll(
"CoreFoundation",
"/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
)
CFMutableArray = c_void_p
The provided code snippet includes necessary dependencies for implementing the `_create_cfstring_array` function. Write a Python function `def _create_cfstring_array(lst: List[bytes]) -> CFMutableArray` to solve the following problem:
Given a list of Python binary data, create an associated CFMutableArray. The array must be CFReleased by the caller. Raises an ssl.SSLError on failure.
Here is the function:
def _create_cfstring_array(lst: List[bytes]) -> CFMutableArray:
"""
Given a list of Python binary data, create an associated CFMutableArray.
The array must be CFReleased by the caller.
Raises an ssl.SSLError on failure.
"""
cf_arr = None
try:
cf_arr = CoreFoundation.CFArrayCreateMutable(
CoreFoundation.kCFAllocatorDefault,
0,
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
)
if not cf_arr:
raise MemoryError("Unable to allocate memory!")
for item in lst:
cf_str = _cfstr(item)
if not cf_str:
raise MemoryError("Unable to allocate memory!")
try:
CoreFoundation.CFArrayAppendValue(cf_arr, cf_str)
finally:
CoreFoundation.CFRelease(cf_str)
except BaseException as e:
if cf_arr:
CoreFoundation.CFRelease(cf_arr)
raise ssl.SSLError(f"Unable to allocate array: {e}") from None
return cf_arr | Given a list of Python binary data, create an associated CFMutableArray. The array must be CFReleased by the caller. Raises an ssl.SSLError on failure. |
157,730 | import base64
import ctypes
import itertools
import os
import re
import ssl
import struct
import tempfile
from typing import Any, List, Optional, Tuple, Type
from .bindings import ( # type: ignore[attr-defined]
CFArray,
CFConst,
CFData,
CFDictionary,
CFMutableArray,
CFString,
CFTypeRef,
CoreFoundation,
SecKeychainRef,
Security,
)
_PEM_CERTS_RE = re.compile(
b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL
)
def _cf_data_from_bytes(bytestring: bytes) -> CFData:
"""
Given a bytestring, create a CFData object from it. This CFData object must
be CFReleased by the caller.
"""
return CoreFoundation.CFDataCreate(
CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring)
)
Security = load_cdll(
"Security", "/System/Library/Frameworks/Security.framework/Security"
)
CoreFoundation = load_cdll(
"CoreFoundation",
"/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
)
try:
Security.SecItemImport.argtypes = [
CFDataRef,
CFStringRef,
POINTER(SecExternalFormat),
POINTER(SecExternalItemType),
SecItemImportExportFlags,
POINTER(SecItemImportExportKeyParameters),
SecKeychainRef,
POINTER(CFArrayRef),
]
Security.SecItemImport.restype = OSStatus
Security.SecCertificateGetTypeID.argtypes = []
Security.SecCertificateGetTypeID.restype = CFTypeID
Security.SecIdentityGetTypeID.argtypes = []
Security.SecIdentityGetTypeID.restype = CFTypeID
Security.SecKeyGetTypeID.argtypes = []
Security.SecKeyGetTypeID.restype = CFTypeID
Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef]
Security.SecCertificateCreateWithData.restype = SecCertificateRef
Security.SecCertificateCopyData.argtypes = [SecCertificateRef]
Security.SecCertificateCopyData.restype = CFDataRef
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
Security.SecCopyErrorMessageString.restype = CFStringRef
Security.SecIdentityCreateWithCertificate.argtypes = [
CFTypeRef,
SecCertificateRef,
POINTER(SecIdentityRef),
]
Security.SecIdentityCreateWithCertificate.restype = OSStatus
Security.SecKeychainCreate.argtypes = [
c_char_p,
c_uint32,
c_void_p,
Boolean,
c_void_p,
POINTER(SecKeychainRef),
]
Security.SecKeychainCreate.restype = OSStatus
Security.SecKeychainDelete.argtypes = [SecKeychainRef]
Security.SecKeychainDelete.restype = OSStatus
Security.SecPKCS12Import.argtypes = [
CFDataRef,
CFDictionaryRef,
POINTER(CFArrayRef),
]
Security.SecPKCS12Import.restype = OSStatus
SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t))
SSLWriteFunc = CFUNCTYPE(
OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)
)
Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc]
Security.SSLSetIOFuncs.restype = OSStatus
Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t]
Security.SSLSetPeerID.restype = OSStatus
Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef]
Security.SSLSetCertificate.restype = OSStatus
Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean]
Security.SSLSetCertificateAuthorities.restype = OSStatus
Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef]
Security.SSLSetConnection.restype = OSStatus
Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t]
Security.SSLSetPeerDomainName.restype = OSStatus
Security.SSLHandshake.argtypes = [SSLContextRef]
Security.SSLHandshake.restype = OSStatus
Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
Security.SSLRead.restype = OSStatus
Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
Security.SSLWrite.restype = OSStatus
Security.SSLClose.argtypes = [SSLContextRef]
Security.SSLClose.restype = OSStatus
Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)]
Security.SSLGetNumberSupportedCiphers.restype = OSStatus
Security.SSLGetSupportedCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
POINTER(c_size_t),
]
Security.SSLGetSupportedCiphers.restype = OSStatus
Security.SSLSetEnabledCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
c_size_t,
]
Security.SSLSetEnabledCiphers.restype = OSStatus
Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)]
Security.SSLGetNumberEnabledCiphers.restype = OSStatus
Security.SSLGetEnabledCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
POINTER(c_size_t),
]
Security.SSLGetEnabledCiphers.restype = OSStatus
Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)]
Security.SSLGetNegotiatedCipher.restype = OSStatus
Security.SSLGetNegotiatedProtocolVersion.argtypes = [
SSLContextRef,
POINTER(SSLProtocol),
]
Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus
Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)]
Security.SSLCopyPeerTrust.restype = OSStatus
Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef]
Security.SecTrustSetAnchorCertificates.restype = OSStatus
Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean]
Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus
Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)]
Security.SecTrustEvaluate.restype = OSStatus
Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef]
Security.SecTrustGetCertificateCount.restype = CFIndex
Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex]
Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef
Security.SSLCreateContext.argtypes = [
CFAllocatorRef,
SSLProtocolSide,
SSLConnectionType,
]
Security.SSLCreateContext.restype = SSLContextRef
Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean]
Security.SSLSetSessionOption.restype = OSStatus
Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol]
Security.SSLSetProtocolVersionMin.restype = OSStatus
Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
Security.SSLSetProtocolVersionMax.restype = OSStatus
try:
Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
Security.SSLSetALPNProtocols.restype = OSStatus
except AttributeError:
# Supported only in 10.12+
pass
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
Security.SecCopyErrorMessageString.restype = CFStringRef
Security.SSLReadFunc = SSLReadFunc
Security.SSLWriteFunc = SSLWriteFunc
Security.SSLContextRef = SSLContextRef
Security.SSLProtocol = SSLProtocol
Security.SSLCipherSuite = SSLCipherSuite
Security.SecIdentityRef = SecIdentityRef
Security.SecKeychainRef = SecKeychainRef
Security.SecTrustRef = SecTrustRef
Security.SecTrustResultType = SecTrustResultType
Security.SecExternalFormat = SecExternalFormat
Security.OSStatus = OSStatus
Security.kSecImportExportPassphrase = CFStringRef.in_dll(
Security, "kSecImportExportPassphrase"
)
Security.kSecImportItemIdentity = CFStringRef.in_dll(
Security, "kSecImportItemIdentity"
)
# CoreFoundation time!
CoreFoundation.CFRetain.argtypes = [CFTypeRef]
CoreFoundation.CFRetain.restype = CFTypeRef
CoreFoundation.CFRelease.argtypes = [CFTypeRef]
CoreFoundation.CFRelease.restype = None
CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef]
CoreFoundation.CFGetTypeID.restype = CFTypeID
CoreFoundation.CFStringCreateWithCString.argtypes = [
CFAllocatorRef,
c_char_p,
CFStringEncoding,
]
CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding]
CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
CoreFoundation.CFStringGetCString.argtypes = [
CFStringRef,
c_char_p,
CFIndex,
CFStringEncoding,
]
CoreFoundation.CFStringGetCString.restype = c_bool
CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex]
CoreFoundation.CFDataCreate.restype = CFDataRef
CoreFoundation.CFDataGetLength.argtypes = [CFDataRef]
CoreFoundation.CFDataGetLength.restype = CFIndex
CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef]
CoreFoundation.CFDataGetBytePtr.restype = c_void_p
CoreFoundation.CFDictionaryCreate.argtypes = [
CFAllocatorRef,
POINTER(CFTypeRef),
POINTER(CFTypeRef),
CFIndex,
CFDictionaryKeyCallBacks,
CFDictionaryValueCallBacks,
]
CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef
CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef]
CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef
CoreFoundation.CFArrayCreate.argtypes = [
CFAllocatorRef,
POINTER(CFTypeRef),
CFIndex,
CFArrayCallBacks,
]
CoreFoundation.CFArrayCreate.restype = CFArrayRef
CoreFoundation.CFArrayCreateMutable.argtypes = [
CFAllocatorRef,
CFIndex,
CFArrayCallBacks,
]
CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef
CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p]
CoreFoundation.CFArrayAppendValue.restype = None
CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef]
CoreFoundation.CFArrayGetCount.restype = CFIndex
CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex]
CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p
CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll(
CoreFoundation, "kCFAllocatorDefault"
)
CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeArrayCallBacks"
)
CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeDictionaryKeyCallBacks"
)
CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeDictionaryValueCallBacks"
)
CoreFoundation.CFTypeRef = CFTypeRef
CoreFoundation.CFArrayRef = CFArrayRef
CoreFoundation.CFStringRef = CFStringRef
CoreFoundation.CFDictionaryRef = CFDictionaryRef
except AttributeError:
raise ImportError("Error initializing ctypes") from None
The provided code snippet includes necessary dependencies for implementing the `_cert_array_from_pem` function. Write a Python function `def _cert_array_from_pem(pem_bundle: bytes) -> CFArray` to solve the following problem:
Given a bundle of certs in PEM format, turns them into a CFArray of certs that can be used to validate a cert chain.
Here is the function:
def _cert_array_from_pem(pem_bundle: bytes) -> CFArray:
"""
Given a bundle of certs in PEM format, turns them into a CFArray of certs
that can be used to validate a cert chain.
"""
# Normalize the PEM bundle's line endings.
pem_bundle = pem_bundle.replace(b"\r\n", b"\n")
der_certs = [
base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle)
]
if not der_certs:
raise ssl.SSLError("No root certificates specified")
cert_array = CoreFoundation.CFArrayCreateMutable(
CoreFoundation.kCFAllocatorDefault,
0,
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
)
if not cert_array:
raise ssl.SSLError("Unable to allocate memory!")
try:
for der_bytes in der_certs:
certdata = _cf_data_from_bytes(der_bytes)
if not certdata:
raise ssl.SSLError("Unable to allocate memory!")
cert = Security.SecCertificateCreateWithData(
CoreFoundation.kCFAllocatorDefault, certdata
)
CoreFoundation.CFRelease(certdata)
if not cert:
raise ssl.SSLError("Unable to build cert object!")
CoreFoundation.CFArrayAppendValue(cert_array, cert)
CoreFoundation.CFRelease(cert)
except Exception:
# We need to free the array before the exception bubbles further.
# We only want to do that if an error occurs: otherwise, the caller
# should free.
CoreFoundation.CFRelease(cert_array)
raise
return cert_array | Given a bundle of certs in PEM format, turns them into a CFArray of certs that can be used to validate a cert chain. |
157,731 | import base64
import ctypes
import itertools
import os
import re
import ssl
import struct
import tempfile
from typing import Any, List, Optional, Tuple, Type
from .bindings import ( # type: ignore[attr-defined]
CFArray,
CFConst,
CFData,
CFDictionary,
CFMutableArray,
CFString,
CFTypeRef,
CoreFoundation,
SecKeychainRef,
Security,
)
def _assert_no_error(
error: int, exception_class: Optional[Type[BaseException]] = None
) -> None:
"""
Checks the return code and throws an exception if there is an error to
report
"""
if error == 0:
return
cf_error_string = Security.SecCopyErrorMessageString(error, None)
output = _cf_string_to_unicode(cf_error_string)
CoreFoundation.CFRelease(cf_error_string)
if output is None or output == "":
output = f"OSStatus {error}"
if exception_class is None:
exception_class = ssl.SSLError
raise exception_class(output)
Security = load_cdll(
"Security", "/System/Library/Frameworks/Security.framework/Security"
)
SecKeychainRef = POINTER(c_void_p)
try:
Security.SecItemImport.argtypes = [
CFDataRef,
CFStringRef,
POINTER(SecExternalFormat),
POINTER(SecExternalItemType),
SecItemImportExportFlags,
POINTER(SecItemImportExportKeyParameters),
SecKeychainRef,
POINTER(CFArrayRef),
]
Security.SecItemImport.restype = OSStatus
Security.SecCertificateGetTypeID.argtypes = []
Security.SecCertificateGetTypeID.restype = CFTypeID
Security.SecIdentityGetTypeID.argtypes = []
Security.SecIdentityGetTypeID.restype = CFTypeID
Security.SecKeyGetTypeID.argtypes = []
Security.SecKeyGetTypeID.restype = CFTypeID
Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef]
Security.SecCertificateCreateWithData.restype = SecCertificateRef
Security.SecCertificateCopyData.argtypes = [SecCertificateRef]
Security.SecCertificateCopyData.restype = CFDataRef
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
Security.SecCopyErrorMessageString.restype = CFStringRef
Security.SecIdentityCreateWithCertificate.argtypes = [
CFTypeRef,
SecCertificateRef,
POINTER(SecIdentityRef),
]
Security.SecIdentityCreateWithCertificate.restype = OSStatus
Security.SecKeychainCreate.argtypes = [
c_char_p,
c_uint32,
c_void_p,
Boolean,
c_void_p,
POINTER(SecKeychainRef),
]
Security.SecKeychainCreate.restype = OSStatus
Security.SecKeychainDelete.argtypes = [SecKeychainRef]
Security.SecKeychainDelete.restype = OSStatus
Security.SecPKCS12Import.argtypes = [
CFDataRef,
CFDictionaryRef,
POINTER(CFArrayRef),
]
Security.SecPKCS12Import.restype = OSStatus
SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t))
SSLWriteFunc = CFUNCTYPE(
OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)
)
Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc]
Security.SSLSetIOFuncs.restype = OSStatus
Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t]
Security.SSLSetPeerID.restype = OSStatus
Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef]
Security.SSLSetCertificate.restype = OSStatus
Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean]
Security.SSLSetCertificateAuthorities.restype = OSStatus
Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef]
Security.SSLSetConnection.restype = OSStatus
Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t]
Security.SSLSetPeerDomainName.restype = OSStatus
Security.SSLHandshake.argtypes = [SSLContextRef]
Security.SSLHandshake.restype = OSStatus
Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
Security.SSLRead.restype = OSStatus
Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
Security.SSLWrite.restype = OSStatus
Security.SSLClose.argtypes = [SSLContextRef]
Security.SSLClose.restype = OSStatus
Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)]
Security.SSLGetNumberSupportedCiphers.restype = OSStatus
Security.SSLGetSupportedCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
POINTER(c_size_t),
]
Security.SSLGetSupportedCiphers.restype = OSStatus
Security.SSLSetEnabledCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
c_size_t,
]
Security.SSLSetEnabledCiphers.restype = OSStatus
Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)]
Security.SSLGetNumberEnabledCiphers.restype = OSStatus
Security.SSLGetEnabledCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
POINTER(c_size_t),
]
Security.SSLGetEnabledCiphers.restype = OSStatus
Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)]
Security.SSLGetNegotiatedCipher.restype = OSStatus
Security.SSLGetNegotiatedProtocolVersion.argtypes = [
SSLContextRef,
POINTER(SSLProtocol),
]
Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus
Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)]
Security.SSLCopyPeerTrust.restype = OSStatus
Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef]
Security.SecTrustSetAnchorCertificates.restype = OSStatus
Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean]
Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus
Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)]
Security.SecTrustEvaluate.restype = OSStatus
Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef]
Security.SecTrustGetCertificateCount.restype = CFIndex
Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex]
Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef
Security.SSLCreateContext.argtypes = [
CFAllocatorRef,
SSLProtocolSide,
SSLConnectionType,
]
Security.SSLCreateContext.restype = SSLContextRef
Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean]
Security.SSLSetSessionOption.restype = OSStatus
Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol]
Security.SSLSetProtocolVersionMin.restype = OSStatus
Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
Security.SSLSetProtocolVersionMax.restype = OSStatus
try:
Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
Security.SSLSetALPNProtocols.restype = OSStatus
except AttributeError:
# Supported only in 10.12+
pass
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
Security.SecCopyErrorMessageString.restype = CFStringRef
Security.SSLReadFunc = SSLReadFunc
Security.SSLWriteFunc = SSLWriteFunc
Security.SSLContextRef = SSLContextRef
Security.SSLProtocol = SSLProtocol
Security.SSLCipherSuite = SSLCipherSuite
Security.SecIdentityRef = SecIdentityRef
Security.SecKeychainRef = SecKeychainRef
Security.SecTrustRef = SecTrustRef
Security.SecTrustResultType = SecTrustResultType
Security.SecExternalFormat = SecExternalFormat
Security.OSStatus = OSStatus
Security.kSecImportExportPassphrase = CFStringRef.in_dll(
Security, "kSecImportExportPassphrase"
)
Security.kSecImportItemIdentity = CFStringRef.in_dll(
Security, "kSecImportItemIdentity"
)
# CoreFoundation time!
CoreFoundation.CFRetain.argtypes = [CFTypeRef]
CoreFoundation.CFRetain.restype = CFTypeRef
CoreFoundation.CFRelease.argtypes = [CFTypeRef]
CoreFoundation.CFRelease.restype = None
CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef]
CoreFoundation.CFGetTypeID.restype = CFTypeID
CoreFoundation.CFStringCreateWithCString.argtypes = [
CFAllocatorRef,
c_char_p,
CFStringEncoding,
]
CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding]
CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
CoreFoundation.CFStringGetCString.argtypes = [
CFStringRef,
c_char_p,
CFIndex,
CFStringEncoding,
]
CoreFoundation.CFStringGetCString.restype = c_bool
CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex]
CoreFoundation.CFDataCreate.restype = CFDataRef
CoreFoundation.CFDataGetLength.argtypes = [CFDataRef]
CoreFoundation.CFDataGetLength.restype = CFIndex
CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef]
CoreFoundation.CFDataGetBytePtr.restype = c_void_p
CoreFoundation.CFDictionaryCreate.argtypes = [
CFAllocatorRef,
POINTER(CFTypeRef),
POINTER(CFTypeRef),
CFIndex,
CFDictionaryKeyCallBacks,
CFDictionaryValueCallBacks,
]
CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef
CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef]
CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef
CoreFoundation.CFArrayCreate.argtypes = [
CFAllocatorRef,
POINTER(CFTypeRef),
CFIndex,
CFArrayCallBacks,
]
CoreFoundation.CFArrayCreate.restype = CFArrayRef
CoreFoundation.CFArrayCreateMutable.argtypes = [
CFAllocatorRef,
CFIndex,
CFArrayCallBacks,
]
CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef
CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p]
CoreFoundation.CFArrayAppendValue.restype = None
CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef]
CoreFoundation.CFArrayGetCount.restype = CFIndex
CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex]
CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p
CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll(
CoreFoundation, "kCFAllocatorDefault"
)
CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeArrayCallBacks"
)
CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeDictionaryKeyCallBacks"
)
CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeDictionaryValueCallBacks"
)
CoreFoundation.CFTypeRef = CFTypeRef
CoreFoundation.CFArrayRef = CFArrayRef
CoreFoundation.CFStringRef = CFStringRef
CoreFoundation.CFDictionaryRef = CFDictionaryRef
except AttributeError:
raise ImportError("Error initializing ctypes") from None
The provided code snippet includes necessary dependencies for implementing the `_temporary_keychain` function. Write a Python function `def _temporary_keychain() -> Tuple[SecKeychainRef, str]` to solve the following problem:
This function creates a temporary Mac keychain that we can use to work with credentials. This keychain uses a one-time password and a temporary file to store the data. We expect to have one keychain per socket. The returned SecKeychainRef must be freed by the caller, including calling SecKeychainDelete. Returns a tuple of the SecKeychainRef and the path to the temporary directory that contains it.
Here is the function:
def _temporary_keychain() -> Tuple[SecKeychainRef, str]:
"""
This function creates a temporary Mac keychain that we can use to work with
credentials. This keychain uses a one-time password and a temporary file to
store the data. We expect to have one keychain per socket. The returned
SecKeychainRef must be freed by the caller, including calling
SecKeychainDelete.
Returns a tuple of the SecKeychainRef and the path to the temporary
directory that contains it.
"""
# Unfortunately, SecKeychainCreate requires a path to a keychain. This
# means we cannot use mkstemp to use a generic temporary file. Instead,
# we're going to create a temporary directory and a filename to use there.
# This filename will be 8 random bytes expanded into base64. We also need
# some random bytes to password-protect the keychain we're creating, so we
# ask for 40 random bytes.
random_bytes = os.urandom(40)
filename = base64.b16encode(random_bytes[:8]).decode("utf-8")
password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8
tempdirectory = tempfile.mkdtemp()
keychain_path = os.path.join(tempdirectory, filename).encode("utf-8")
# We now want to create the keychain itself.
keychain = Security.SecKeychainRef()
status = Security.SecKeychainCreate(
keychain_path, len(password), password, False, None, ctypes.byref(keychain)
)
_assert_no_error(status)
# Having created the keychain, we want to pass it off to the caller.
return keychain, tempdirectory | This function creates a temporary Mac keychain that we can use to work with credentials. This keychain uses a one-time password and a temporary file to store the data. We expect to have one keychain per socket. The returned SecKeychainRef must be freed by the caller, including calling SecKeychainDelete. Returns a tuple of the SecKeychainRef and the path to the temporary directory that contains it. |
157,732 | import base64
import ctypes
import itertools
import os
import re
import ssl
import struct
import tempfile
from typing import Any, List, Optional, Tuple, Type
from .bindings import ( # type: ignore[attr-defined]
CFArray,
CFConst,
CFData,
CFDictionary,
CFMutableArray,
CFString,
CFTypeRef,
CoreFoundation,
SecKeychainRef,
Security,
)
def _assert_no_error(
error: int, exception_class: Optional[Type[BaseException]] = None
) -> None:
"""
Checks the return code and throws an exception if there is an error to
report
"""
if error == 0:
return
cf_error_string = Security.SecCopyErrorMessageString(error, None)
output = _cf_string_to_unicode(cf_error_string)
CoreFoundation.CFRelease(cf_error_string)
if output is None or output == "":
output = f"OSStatus {error}"
if exception_class is None:
exception_class = ssl.SSLError
raise exception_class(output)
def _load_items_from_file(
keychain: SecKeychainRef, path: str
) -> Tuple[List[CFTypeRef], List[CFTypeRef]]:
"""
Given a single file, loads all the trust objects from it into arrays and
the keychain.
Returns a tuple of lists: the first list is a list of identities, the
second a list of certs.
"""
certificates = []
identities = []
result_array = None
with open(path, "rb") as f:
raw_filedata = f.read()
try:
filedata = CoreFoundation.CFDataCreate(
CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata)
)
result_array = CoreFoundation.CFArrayRef()
result = Security.SecItemImport(
filedata, # cert data
None, # Filename, leaving it out for now
None, # What the type of the file is, we don't care
None, # what's in the file, we don't care
0, # import flags
None, # key params, can include passphrase in the future
keychain, # The keychain to insert into
ctypes.byref(result_array), # Results
)
_assert_no_error(result)
# A CFArray is not very useful to us as an intermediary
# representation, so we are going to extract the objects we want
# and then free the array. We don't need to keep hold of keys: the
# keychain already has them!
result_count = CoreFoundation.CFArrayGetCount(result_array)
for index in range(result_count):
item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index)
item = ctypes.cast(item, CoreFoundation.CFTypeRef)
if _is_cert(item):
CoreFoundation.CFRetain(item)
certificates.append(item)
elif _is_identity(item):
CoreFoundation.CFRetain(item)
identities.append(item)
finally:
if result_array:
CoreFoundation.CFRelease(result_array)
CoreFoundation.CFRelease(filedata)
return (identities, certificates)
Security = load_cdll(
"Security", "/System/Library/Frameworks/Security.framework/Security"
)
CoreFoundation = load_cdll(
"CoreFoundation",
"/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
)
SecKeychainRef = POINTER(c_void_p)
try:
Security.SecItemImport.argtypes = [
CFDataRef,
CFStringRef,
POINTER(SecExternalFormat),
POINTER(SecExternalItemType),
SecItemImportExportFlags,
POINTER(SecItemImportExportKeyParameters),
SecKeychainRef,
POINTER(CFArrayRef),
]
Security.SecItemImport.restype = OSStatus
Security.SecCertificateGetTypeID.argtypes = []
Security.SecCertificateGetTypeID.restype = CFTypeID
Security.SecIdentityGetTypeID.argtypes = []
Security.SecIdentityGetTypeID.restype = CFTypeID
Security.SecKeyGetTypeID.argtypes = []
Security.SecKeyGetTypeID.restype = CFTypeID
Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef]
Security.SecCertificateCreateWithData.restype = SecCertificateRef
Security.SecCertificateCopyData.argtypes = [SecCertificateRef]
Security.SecCertificateCopyData.restype = CFDataRef
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
Security.SecCopyErrorMessageString.restype = CFStringRef
Security.SecIdentityCreateWithCertificate.argtypes = [
CFTypeRef,
SecCertificateRef,
POINTER(SecIdentityRef),
]
Security.SecIdentityCreateWithCertificate.restype = OSStatus
Security.SecKeychainCreate.argtypes = [
c_char_p,
c_uint32,
c_void_p,
Boolean,
c_void_p,
POINTER(SecKeychainRef),
]
Security.SecKeychainCreate.restype = OSStatus
Security.SecKeychainDelete.argtypes = [SecKeychainRef]
Security.SecKeychainDelete.restype = OSStatus
Security.SecPKCS12Import.argtypes = [
CFDataRef,
CFDictionaryRef,
POINTER(CFArrayRef),
]
Security.SecPKCS12Import.restype = OSStatus
SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t))
SSLWriteFunc = CFUNCTYPE(
OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)
)
Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc]
Security.SSLSetIOFuncs.restype = OSStatus
Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t]
Security.SSLSetPeerID.restype = OSStatus
Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef]
Security.SSLSetCertificate.restype = OSStatus
Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean]
Security.SSLSetCertificateAuthorities.restype = OSStatus
Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef]
Security.SSLSetConnection.restype = OSStatus
Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t]
Security.SSLSetPeerDomainName.restype = OSStatus
Security.SSLHandshake.argtypes = [SSLContextRef]
Security.SSLHandshake.restype = OSStatus
Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
Security.SSLRead.restype = OSStatus
Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
Security.SSLWrite.restype = OSStatus
Security.SSLClose.argtypes = [SSLContextRef]
Security.SSLClose.restype = OSStatus
Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)]
Security.SSLGetNumberSupportedCiphers.restype = OSStatus
Security.SSLGetSupportedCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
POINTER(c_size_t),
]
Security.SSLGetSupportedCiphers.restype = OSStatus
Security.SSLSetEnabledCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
c_size_t,
]
Security.SSLSetEnabledCiphers.restype = OSStatus
Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)]
Security.SSLGetNumberEnabledCiphers.restype = OSStatus
Security.SSLGetEnabledCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
POINTER(c_size_t),
]
Security.SSLGetEnabledCiphers.restype = OSStatus
Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)]
Security.SSLGetNegotiatedCipher.restype = OSStatus
Security.SSLGetNegotiatedProtocolVersion.argtypes = [
SSLContextRef,
POINTER(SSLProtocol),
]
Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus
Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)]
Security.SSLCopyPeerTrust.restype = OSStatus
Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef]
Security.SecTrustSetAnchorCertificates.restype = OSStatus
Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean]
Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus
Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)]
Security.SecTrustEvaluate.restype = OSStatus
Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef]
Security.SecTrustGetCertificateCount.restype = CFIndex
Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex]
Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef
Security.SSLCreateContext.argtypes = [
CFAllocatorRef,
SSLProtocolSide,
SSLConnectionType,
]
Security.SSLCreateContext.restype = SSLContextRef
Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean]
Security.SSLSetSessionOption.restype = OSStatus
Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol]
Security.SSLSetProtocolVersionMin.restype = OSStatus
Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
Security.SSLSetProtocolVersionMax.restype = OSStatus
try:
Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
Security.SSLSetALPNProtocols.restype = OSStatus
except AttributeError:
# Supported only in 10.12+
pass
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
Security.SecCopyErrorMessageString.restype = CFStringRef
Security.SSLReadFunc = SSLReadFunc
Security.SSLWriteFunc = SSLWriteFunc
Security.SSLContextRef = SSLContextRef
Security.SSLProtocol = SSLProtocol
Security.SSLCipherSuite = SSLCipherSuite
Security.SecIdentityRef = SecIdentityRef
Security.SecKeychainRef = SecKeychainRef
Security.SecTrustRef = SecTrustRef
Security.SecTrustResultType = SecTrustResultType
Security.SecExternalFormat = SecExternalFormat
Security.OSStatus = OSStatus
Security.kSecImportExportPassphrase = CFStringRef.in_dll(
Security, "kSecImportExportPassphrase"
)
Security.kSecImportItemIdentity = CFStringRef.in_dll(
Security, "kSecImportItemIdentity"
)
# CoreFoundation time!
CoreFoundation.CFRetain.argtypes = [CFTypeRef]
CoreFoundation.CFRetain.restype = CFTypeRef
CoreFoundation.CFRelease.argtypes = [CFTypeRef]
CoreFoundation.CFRelease.restype = None
CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef]
CoreFoundation.CFGetTypeID.restype = CFTypeID
CoreFoundation.CFStringCreateWithCString.argtypes = [
CFAllocatorRef,
c_char_p,
CFStringEncoding,
]
CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding]
CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
CoreFoundation.CFStringGetCString.argtypes = [
CFStringRef,
c_char_p,
CFIndex,
CFStringEncoding,
]
CoreFoundation.CFStringGetCString.restype = c_bool
CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex]
CoreFoundation.CFDataCreate.restype = CFDataRef
CoreFoundation.CFDataGetLength.argtypes = [CFDataRef]
CoreFoundation.CFDataGetLength.restype = CFIndex
CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef]
CoreFoundation.CFDataGetBytePtr.restype = c_void_p
CoreFoundation.CFDictionaryCreate.argtypes = [
CFAllocatorRef,
POINTER(CFTypeRef),
POINTER(CFTypeRef),
CFIndex,
CFDictionaryKeyCallBacks,
CFDictionaryValueCallBacks,
]
CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef
CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef]
CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef
CoreFoundation.CFArrayCreate.argtypes = [
CFAllocatorRef,
POINTER(CFTypeRef),
CFIndex,
CFArrayCallBacks,
]
CoreFoundation.CFArrayCreate.restype = CFArrayRef
CoreFoundation.CFArrayCreateMutable.argtypes = [
CFAllocatorRef,
CFIndex,
CFArrayCallBacks,
]
CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef
CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p]
CoreFoundation.CFArrayAppendValue.restype = None
CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef]
CoreFoundation.CFArrayGetCount.restype = CFIndex
CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex]
CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p
CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll(
CoreFoundation, "kCFAllocatorDefault"
)
CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeArrayCallBacks"
)
CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeDictionaryKeyCallBacks"
)
CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeDictionaryValueCallBacks"
)
CoreFoundation.CFTypeRef = CFTypeRef
CoreFoundation.CFArrayRef = CFArrayRef
CoreFoundation.CFStringRef = CFStringRef
CoreFoundation.CFDictionaryRef = CFDictionaryRef
except AttributeError:
raise ImportError("Error initializing ctypes") from None
The provided code snippet includes necessary dependencies for implementing the `_load_client_cert_chain` function. Write a Python function `def _load_client_cert_chain(keychain: SecKeychainRef, *paths: Optional[str]) -> CFArray` to solve the following problem:
Load certificates and maybe keys from a number of files. Has the end goal of returning a CFArray containing one SecIdentityRef, and then zero or more SecCertificateRef objects, suitable for use as a client certificate trust chain.
Here is the function:
def _load_client_cert_chain(keychain: SecKeychainRef, *paths: Optional[str]) -> CFArray:
"""
Load certificates and maybe keys from a number of files. Has the end goal
of returning a CFArray containing one SecIdentityRef, and then zero or more
SecCertificateRef objects, suitable for use as a client certificate trust
chain.
"""
# Ok, the strategy.
#
# This relies on knowing that macOS will not give you a SecIdentityRef
# unless you have imported a key into a keychain. This is a somewhat
# artificial limitation of macOS (for example, it doesn't necessarily
# affect iOS), but there is nothing inside Security.framework that lets you
# get a SecIdentityRef without having a key in a keychain.
#
# So the policy here is we take all the files and iterate them in order.
# Each one will use SecItemImport to have one or more objects loaded from
# it. We will also point at a keychain that macOS can use to work with the
# private key.
#
# Once we have all the objects, we'll check what we actually have. If we
# already have a SecIdentityRef in hand, fab: we'll use that. Otherwise,
# we'll take the first certificate (which we assume to be our leaf) and
# ask the keychain to give us a SecIdentityRef with that cert's associated
# key.
#
# We'll then return a CFArray containing the trust chain: one
# SecIdentityRef and then zero-or-more SecCertificateRef objects. The
# responsibility for freeing this CFArray will be with the caller. This
# CFArray must remain alive for the entire connection, so in practice it
# will be stored with a single SSLSocket, along with the reference to the
# keychain.
certificates = []
identities = []
# Filter out bad paths.
filtered_paths = (path for path in paths if path)
try:
for file_path in filtered_paths:
new_identities, new_certs = _load_items_from_file(keychain, file_path)
identities.extend(new_identities)
certificates.extend(new_certs)
# Ok, we have everything. The question is: do we have an identity? If
# not, we want to grab one from the first cert we have.
if not identities:
new_identity = Security.SecIdentityRef()
status = Security.SecIdentityCreateWithCertificate(
keychain, certificates[0], ctypes.byref(new_identity)
)
_assert_no_error(status)
identities.append(new_identity)
# We now want to release the original certificate, as we no longer
# need it.
CoreFoundation.CFRelease(certificates.pop(0))
# We now need to build a new CFArray that holds the trust chain.
trust_chain = CoreFoundation.CFArrayCreateMutable(
CoreFoundation.kCFAllocatorDefault,
0,
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
)
for item in itertools.chain(identities, certificates):
# ArrayAppendValue does a CFRetain on the item. That's fine,
# because the finally block will release our other refs to them.
CoreFoundation.CFArrayAppendValue(trust_chain, item)
return trust_chain
finally:
for obj in itertools.chain(identities, certificates):
CoreFoundation.CFRelease(obj) | Load certificates and maybe keys from a number of files. Has the end goal of returning a CFArray containing one SecIdentityRef, and then zero or more SecCertificateRef objects, suitable for use as a client certificate trust chain. |
157,733 | import base64
import ctypes
import itertools
import os
import re
import ssl
import struct
import tempfile
from typing import Any, List, Optional, Tuple, Type
from .bindings import ( # type: ignore[attr-defined]
CFArray,
CFConst,
CFData,
CFDictionary,
CFMutableArray,
CFString,
CFTypeRef,
CoreFoundation,
SecKeychainRef,
Security,
)
TLS_PROTOCOL_VERSIONS = {
"SSLv2": (0, 2),
"SSLv3": (3, 0),
"TLSv1": (3, 1),
"TLSv1.1": (3, 2),
"TLSv1.2": (3, 3),
}
The provided code snippet includes necessary dependencies for implementing the `_build_tls_unknown_ca_alert` function. Write a Python function `def _build_tls_unknown_ca_alert(version: str) -> bytes` to solve the following problem:
Builds a TLS alert record for an unknown CA.
Here is the function:
def _build_tls_unknown_ca_alert(version: str) -> bytes:
"""
Builds a TLS alert record for an unknown CA.
"""
ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
severity_fatal = 0x02
description_unknown_ca = 0x30
msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
msg_len = len(msg)
record_type_alert = 0x15
record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
return record | Builds a TLS alert record for an unknown CA. |
157,734 | import contextlib
import ctypes
import errno
import os.path
import shutil
import socket
import ssl
import struct
import threading
import weakref
from socket import socket as socket_cls
from typing import (
TYPE_CHECKING,
Any,
BinaryIO,
Dict,
Generator,
List,
Optional,
TextIO,
Union,
cast,
)
from .. import util
from ._securetransport.bindings import ( # type: ignore[attr-defined]
CoreFoundation,
Security,
)
from ._securetransport.low_level import (
SecurityConst,
_assert_no_error,
_build_tls_unknown_ca_alert,
_cert_array_from_pem,
_create_cfstring_array,
_load_client_cert_chain,
_temporary_keychain,
)
HAS_SNI = True
class SecureTransportContext:
"""
I am a wrapper class for the SecureTransport library, to translate the
interface of the standard library ``SSLContext`` object to calls into
SecureTransport.
"""
def __init__(self, protocol: int) -> None:
self._minimum_version: int = ssl.TLSVersion.MINIMUM_SUPPORTED
self._maximum_version: int = ssl.TLSVersion.MAXIMUM_SUPPORTED
if protocol not in (None, ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLS_CLIENT):
self._min_version, self._max_version = _protocol_to_min_max[protocol]
self._options = 0
self._verify = False
self._trust_bundle: Optional[bytes] = None
self._client_cert: Optional[str] = None
self._client_key: Optional[str] = None
self._client_key_passphrase = None
self._alpn_protocols: Optional[List[bytes]] = None
def check_hostname(self) -> "Literal[True]":
"""
SecureTransport cannot have its hostname checking disabled. For more,
see the comment on getpeercert() in this file.
"""
return True
def check_hostname(self, value: Any) -> None:
"""
SecureTransport cannot have its hostname checking disabled. For more,
see the comment on getpeercert() in this file.
"""
pass
def options(self) -> int:
# TODO: Well, crap.
#
# So this is the bit of the code that is the most likely to cause us
# trouble. Essentially we need to enumerate all of the SSL options that
# users might want to use and try to see if we can sensibly translate
# them, or whether we should just ignore them.
return self._options
def options(self, value: int) -> None:
# TODO: Update in line with above.
self._options = value
def verify_mode(self) -> int:
return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE
def verify_mode(self, value: int) -> None:
self._verify = value == ssl.CERT_REQUIRED
def set_default_verify_paths(self) -> None:
# So, this has to do something a bit weird. Specifically, what it does
# is nothing.
#
# This means that, if we had previously had load_verify_locations
# called, this does not undo that. We need to do that because it turns
# out that the rest of the urllib3 code will attempt to load the
# default verify paths if it hasn't been told about any paths, even if
# the context itself was sometime earlier. We resolve that by just
# ignoring it.
pass
def load_default_certs(self) -> None:
return self.set_default_verify_paths()
def set_ciphers(self, ciphers: Any) -> None:
raise ValueError("SecureTransport doesn't support custom cipher strings")
def load_verify_locations(
self,
cafile: Optional[str] = None,
capath: Optional[str] = None,
cadata: Optional[bytes] = None,
) -> None:
# OK, we only really support cadata and cafile.
if capath is not None:
raise ValueError("SecureTransport does not support cert directories")
# Raise if cafile does not exist.
if cafile is not None:
with open(cafile):
pass
self._trust_bundle = cafile or cadata # type: ignore[assignment]
def load_cert_chain(
self,
certfile: str,
keyfile: Optional[str] = None,
password: Optional[str] = None,
) -> None:
self._client_cert = certfile
self._client_key = keyfile
self._client_cert_passphrase = password
def set_alpn_protocols(self, protocols: List[Union[str, bytes]]) -> None:
"""
Sets the ALPN protocols that will later be set on the context.
Raises a NotImplementedError if ALPN is not supported.
"""
if not hasattr(Security, "SSLSetALPNProtocols"):
raise NotImplementedError(
"SecureTransport supports ALPN only in macOS 10.12+"
)
self._alpn_protocols = [util.util.to_bytes(p, "ascii") for p in protocols]
def wrap_socket(
self,
sock: socket_cls,
server_side: bool = False,
do_handshake_on_connect: bool = True,
suppress_ragged_eofs: bool = True,
server_hostname: Optional[Union[bytes, str]] = None,
) -> WrappedSocket:
# So, what do we do here? Firstly, we assert some properties. This is a
# stripped down shim, so there is some functionality we don't support.
# See PEP 543 for the real deal.
assert not server_side
assert do_handshake_on_connect
assert suppress_ragged_eofs
# Ok, we're good to go. Now we want to create the wrapped socket object
# and store it in the appropriate place.
wrapped_socket = WrappedSocket(sock)
# Now we can handshake
wrapped_socket.handshake(
server_hostname,
self._verify,
self._trust_bundle,
_tls_version_to_st[self._minimum_version],
_tls_version_to_st[self._maximum_version],
self._client_cert,
self._client_key,
self._client_key_passphrase,
self._alpn_protocols,
)
return wrapped_socket
def minimum_version(self) -> int:
return self._minimum_version
def minimum_version(self, minimum_version: int) -> None:
self._minimum_version = minimum_version
def maximum_version(self) -> int:
return self._maximum_version
def maximum_version(self, maximum_version: int) -> None:
self._maximum_version = maximum_version
from .util.request import make_headers
from .util.retry import Retry
from .util.timeout import Timeout
The provided code snippet includes necessary dependencies for implementing the `inject_into_urllib3` function. Write a Python function `def inject_into_urllib3() -> None` to solve the following problem:
Monkey-patch urllib3 with SecureTransport-backed SSL-support.
Here is the function:
def inject_into_urllib3() -> None:
"""
Monkey-patch urllib3 with SecureTransport-backed SSL-support.
"""
util.SSLContext = SecureTransportContext # type: ignore[assignment]
util.ssl_.SSLContext = SecureTransportContext # type: ignore[assignment]
util.HAS_SNI = HAS_SNI
util.ssl_.HAS_SNI = HAS_SNI
util.IS_SECURETRANSPORT = True
util.ssl_.IS_SECURETRANSPORT = True
util.ssl_.USE_DEFAULT_SSLCONTEXT_CIPHERS = True | Monkey-patch urllib3 with SecureTransport-backed SSL-support. |
157,735 | import contextlib
import ctypes
import errno
import os.path
import shutil
import socket
import ssl
import struct
import threading
import weakref
from socket import socket as socket_cls
from typing import (
TYPE_CHECKING,
Any,
BinaryIO,
Dict,
Generator,
List,
Optional,
TextIO,
Union,
cast,
)
from .. import util
from ._securetransport.bindings import ( # type: ignore[attr-defined]
CoreFoundation,
Security,
)
from ._securetransport.low_level import (
SecurityConst,
_assert_no_error,
_build_tls_unknown_ca_alert,
_cert_array_from_pem,
_create_cfstring_array,
_load_client_cert_chain,
_temporary_keychain,
)
HAS_SNI = True
orig_util_HAS_SNI = util.HAS_SNI
orig_util_SSLContext = util.ssl_.SSLContext
orig_util_USE_SYSTEM_SSL_CIPHERS = util.ssl_.USE_DEFAULT_SSLCONTEXT_CIPHERS
from .util.request import make_headers
from .util.retry import Retry
from .util.timeout import Timeout
The provided code snippet includes necessary dependencies for implementing the `extract_from_urllib3` function. Write a Python function `def extract_from_urllib3() -> None` to solve the following problem:
Undo monkey-patching by :func:`inject_into_urllib3`.
Here is the function:
def extract_from_urllib3() -> None:
"""
Undo monkey-patching by :func:`inject_into_urllib3`.
"""
util.SSLContext = orig_util_SSLContext
util.ssl_.SSLContext = orig_util_SSLContext
util.HAS_SNI = orig_util_HAS_SNI
util.ssl_.HAS_SNI = orig_util_HAS_SNI
util.IS_SECURETRANSPORT = False
util.ssl_.IS_SECURETRANSPORT = False
util.ssl_.USE_DEFAULT_SSLCONTEXT_CIPHERS = orig_util_USE_SYSTEM_SSL_CIPHERS | Undo monkey-patching by :func:`inject_into_urllib3`. |
157,736 | import contextlib
import ctypes
import errno
import os.path
import shutil
import socket
import ssl
import struct
import threading
import weakref
from socket import socket as socket_cls
from typing import (
TYPE_CHECKING,
Any,
BinaryIO,
Dict,
Generator,
List,
Optional,
TextIO,
Union,
cast,
)
from .. import util
from ._securetransport.bindings import ( # type: ignore[attr-defined]
CoreFoundation,
Security,
)
from ._securetransport.low_level import (
SecurityConst,
_assert_no_error,
_build_tls_unknown_ca_alert,
_cert_array_from_pem,
_create_cfstring_array,
_load_client_cert_chain,
_temporary_keychain,
)
_connection_refs: "weakref.WeakValueDictionary[int, 'WrappedSocket']" = (
weakref.WeakValueDictionary()
)
from .util.request import make_headers
from .util.retry import Retry
from .util.timeout import Timeout
class SecurityConst:
"""
A class object that acts as essentially a namespace for Security constants.
"""
kSSLSessionOptionBreakOnServerAuth = 0
kSSLProtocol2 = 1
kSSLProtocol3 = 2
kTLSProtocol1 = 4
kTLSProtocol11 = 7
kTLSProtocol12 = 8
# SecureTransport does not support TLS 1.3 even if there's a constant for it
kTLSProtocol13 = 10
kTLSProtocolMaxSupported = 999
kSSLClientSide = 1
kSSLStreamType = 0
kSecFormatPEMSequence = 10
kSecTrustResultInvalid = 0
kSecTrustResultProceed = 1
# This gap is present on purpose: this was kSecTrustResultConfirm, which
# is deprecated.
kSecTrustResultDeny = 3
kSecTrustResultUnspecified = 4
kSecTrustResultRecoverableTrustFailure = 5
kSecTrustResultFatalTrustFailure = 6
kSecTrustResultOtherError = 7
errSSLProtocol = -9800
errSSLWouldBlock = -9803
errSSLClosedGraceful = -9805
errSSLClosedNoNotify = -9816
errSSLClosedAbort = -9806
errSSLXCertChainInvalid = -9807
errSSLCrypto = -9809
errSSLInternal = -9810
errSSLCertExpired = -9814
errSSLCertNotYetValid = -9815
errSSLUnknownRootCert = -9812
errSSLNoRootCert = -9813
errSSLHostNameMismatch = -9843
errSSLPeerHandshakeFail = -9824
errSSLPeerUserCancelled = -9839
errSSLWeakPeerEphemeralDHKey = -9850
errSSLServerAuthCompleted = -9841
errSSLRecordOverflow = -9847
errSecVerifyFailed = -67808
errSecNoTrustSettings = -25263
errSecItemNotFound = -25300
errSecInvalidTrustSettings = -25262
The provided code snippet includes necessary dependencies for implementing the `_read_callback` function. Write a Python function `def _read_callback( connection_id: int, data_buffer: int, data_length_pointer: bytearray ) -> int` to solve the following problem:
SecureTransport read callback. This is called by ST to request that data be returned from the socket.
Here is the function:
def _read_callback(
connection_id: int, data_buffer: int, data_length_pointer: bytearray
) -> int:
"""
SecureTransport read callback. This is called by ST to request that data
be returned from the socket.
"""
wrapped_socket = None
try:
wrapped_socket = _connection_refs.get(connection_id)
if wrapped_socket is None:
return SecurityConst.errSSLInternal
base_socket = wrapped_socket.socket
requested_length = data_length_pointer[0]
timeout = wrapped_socket.gettimeout()
error = None
read_count = 0
try:
while read_count < requested_length:
if timeout is None or timeout >= 0:
if not util.wait_for_read(base_socket, timeout):
raise OSError(errno.EAGAIN, "timed out")
remaining = requested_length - read_count
buffer = (ctypes.c_char * remaining).from_address(
data_buffer + read_count
)
chunk_size = base_socket.recv_into(buffer, remaining) # type: ignore[arg-type]
read_count += chunk_size
if not chunk_size:
if not read_count:
return SecurityConst.errSSLClosedGraceful
break
except OSError as e:
error = e.errno
if error is not None and error != errno.EAGAIN:
data_length_pointer[0] = read_count
if error == errno.ECONNRESET or error == errno.EPIPE:
return SecurityConst.errSSLClosedAbort
raise
data_length_pointer[0] = read_count
if read_count != requested_length:
return SecurityConst.errSSLWouldBlock
return 0
except Exception as e:
if wrapped_socket is not None:
wrapped_socket._exception = e
return SecurityConst.errSSLInternal | SecureTransport read callback. This is called by ST to request that data be returned from the socket. |
157,737 | import contextlib
import ctypes
import errno
import os.path
import shutil
import socket
import ssl
import struct
import threading
import weakref
from socket import socket as socket_cls
from typing import (
TYPE_CHECKING,
Any,
BinaryIO,
Dict,
Generator,
List,
Optional,
TextIO,
Union,
cast,
)
from .. import util
from ._securetransport.bindings import ( # type: ignore[attr-defined]
CoreFoundation,
Security,
)
from ._securetransport.low_level import (
SecurityConst,
_assert_no_error,
_build_tls_unknown_ca_alert,
_cert_array_from_pem,
_create_cfstring_array,
_load_client_cert_chain,
_temporary_keychain,
)
_connection_refs: "weakref.WeakValueDictionary[int, 'WrappedSocket']" = (
weakref.WeakValueDictionary()
)
from .util.request import make_headers
from .util.retry import Retry
from .util.timeout import Timeout
class SecurityConst:
"""
A class object that acts as essentially a namespace for Security constants.
"""
kSSLSessionOptionBreakOnServerAuth = 0
kSSLProtocol2 = 1
kSSLProtocol3 = 2
kTLSProtocol1 = 4
kTLSProtocol11 = 7
kTLSProtocol12 = 8
# SecureTransport does not support TLS 1.3 even if there's a constant for it
kTLSProtocol13 = 10
kTLSProtocolMaxSupported = 999
kSSLClientSide = 1
kSSLStreamType = 0
kSecFormatPEMSequence = 10
kSecTrustResultInvalid = 0
kSecTrustResultProceed = 1
# This gap is present on purpose: this was kSecTrustResultConfirm, which
# is deprecated.
kSecTrustResultDeny = 3
kSecTrustResultUnspecified = 4
kSecTrustResultRecoverableTrustFailure = 5
kSecTrustResultFatalTrustFailure = 6
kSecTrustResultOtherError = 7
errSSLProtocol = -9800
errSSLWouldBlock = -9803
errSSLClosedGraceful = -9805
errSSLClosedNoNotify = -9816
errSSLClosedAbort = -9806
errSSLXCertChainInvalid = -9807
errSSLCrypto = -9809
errSSLInternal = -9810
errSSLCertExpired = -9814
errSSLCertNotYetValid = -9815
errSSLUnknownRootCert = -9812
errSSLNoRootCert = -9813
errSSLHostNameMismatch = -9843
errSSLPeerHandshakeFail = -9824
errSSLPeerUserCancelled = -9839
errSSLWeakPeerEphemeralDHKey = -9850
errSSLServerAuthCompleted = -9841
errSSLRecordOverflow = -9847
errSecVerifyFailed = -67808
errSecNoTrustSettings = -25263
errSecItemNotFound = -25300
errSecInvalidTrustSettings = -25262
The provided code snippet includes necessary dependencies for implementing the `_write_callback` function. Write a Python function `def _write_callback( connection_id: int, data_buffer: int, data_length_pointer: bytearray ) -> int` to solve the following problem:
SecureTransport write callback. This is called by ST to request that data actually be sent on the network.
Here is the function:
def _write_callback(
connection_id: int, data_buffer: int, data_length_pointer: bytearray
) -> int:
"""
SecureTransport write callback. This is called by ST to request that data
actually be sent on the network.
"""
wrapped_socket = None
try:
wrapped_socket = _connection_refs.get(connection_id)
if wrapped_socket is None:
return SecurityConst.errSSLInternal
base_socket = wrapped_socket.socket
bytes_to_write = data_length_pointer[0]
data = ctypes.string_at(data_buffer, bytes_to_write)
timeout = wrapped_socket.gettimeout()
error = None
sent = 0
try:
while sent < bytes_to_write:
if timeout is None or timeout >= 0:
if not util.wait_for_write(base_socket, timeout):
raise OSError(errno.EAGAIN, "timed out")
chunk_sent = base_socket.send(data)
sent += chunk_sent
# This has some needless copying here, but I'm not sure there's
# much value in optimising this data path.
data = data[chunk_sent:]
except OSError as e:
error = e.errno
if error is not None and error != errno.EAGAIN:
data_length_pointer[0] = sent
if error == errno.ECONNRESET or error == errno.EPIPE:
return SecurityConst.errSSLClosedAbort
raise
data_length_pointer[0] = sent
if sent != bytes_to_write:
return SecurityConst.errSSLWouldBlock
return 0
except Exception as e:
if wrapped_socket is not None:
wrapped_socket._exception = e
return SecurityConst.errSSLInternal | SecureTransport write callback. This is called by ST to request that data actually be sent on the network. |
157,738 | import contextlib
import ctypes
import errno
import os.path
import shutil
import socket
import ssl
import struct
import threading
import weakref
from socket import socket as socket_cls
from typing import (
TYPE_CHECKING,
Any,
BinaryIO,
Dict,
Generator,
List,
Optional,
TextIO,
Union,
cast,
)
from .. import util
from ._securetransport.bindings import ( # type: ignore[attr-defined]
CoreFoundation,
Security,
)
from ._securetransport.low_level import (
SecurityConst,
_assert_no_error,
_build_tls_unknown_ca_alert,
_cert_array_from_pem,
_create_cfstring_array,
_load_client_cert_chain,
_temporary_keychain,
)
def makefile(
self: socket_cls,
mode: Union[
"Literal['r']", "Literal['w']", "Literal['rw']", "Literal['wr']", "Literal['']"
] = "r",
buffering: Optional[int] = None,
*args: Any,
**kwargs: Any,
) -> Union[BinaryIO, TextIO]:
# We disable buffering with SecureTransport because it conflicts with
# the buffering that ST does internally (see issue #1153 for more).
buffering = 0
return socket_cls.makefile(self, mode, buffering, *args, **kwargs) | null |
157,739 | import OpenSSL.SSL
from cryptography import x509
from cryptography.hazmat.backends.openssl import backend as openssl_backend
from cryptography.hazmat.backends.openssl.x509 import _Certificate
import logging
import ssl
from io import BytesIO
from socket import socket as socket_cls
from socket import timeout
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
from .. import util
HAS_SNI = True
USE_DEFAULT_SSLCONTEXT_CIPHERS = util.ssl_._is_ge_openssl_v1_1_1(
openssl_backend.openssl_version_text(), openssl_backend.openssl_version_number() # type: ignore[no-untyped-call]
)
def _validate_dependencies_met() -> None:
"""
Verifies that PyOpenSSL's package-level dependencies have been met.
Throws `ImportError` if they are not met.
"""
# Method added in `cryptography==1.1`; not available in older versions
from cryptography.x509.extensions import Extensions
if getattr(Extensions, "get_extension_for_class", None) is None:
raise ImportError(
"'cryptography' module missing required functionality. "
"Try upgrading to v1.3.4 or newer."
)
# pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509
# attribute is only present on those versions.
from OpenSSL.crypto import X509
x509 = X509()
if getattr(x509, "_x509", None) is None:
raise ImportError(
"'pyOpenSSL' module missing required functionality. "
"Try upgrading to v0.14 or newer."
)
class PyOpenSSLContext:
"""
I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible
for translating the interface of the standard library ``SSLContext`` object
to calls into PyOpenSSL.
"""
def __init__(self, protocol: int) -> None:
self.protocol = _openssl_versions[protocol]
self._ctx = OpenSSL.SSL.Context(self.protocol)
self._options = 0
self.check_hostname = False
self._minimum_version: int = ssl.TLSVersion.MINIMUM_SUPPORTED
self._maximum_version: int = ssl.TLSVersion.MAXIMUM_SUPPORTED
def options(self) -> int:
return self._options
def options(self, value: int) -> None:
self._options = value
self._set_ctx_options()
def verify_mode(self) -> int:
return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()]
def verify_mode(self, value: int) -> None:
self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback)
def set_default_verify_paths(self) -> None:
self._ctx.set_default_verify_paths()
def set_ciphers(self, ciphers: Union[bytes, str]) -> None:
if isinstance(ciphers, str):
ciphers = ciphers.encode("utf-8")
self._ctx.set_cipher_list(ciphers)
def load_verify_locations(
self,
cafile: Optional[str] = None,
capath: Optional[str] = None,
cadata: Optional[bytes] = None,
) -> None:
if cafile is not None:
cafile = cafile.encode("utf-8") # type: ignore[assignment]
if capath is not None:
capath = capath.encode("utf-8") # type: ignore[assignment]
try:
self._ctx.load_verify_locations(cafile, capath)
if cadata is not None:
self._ctx.load_verify_locations(BytesIO(cadata))
except OpenSSL.SSL.Error as e:
raise ssl.SSLError(f"unable to load trusted certificates: {e!r}") from e
def load_cert_chain(
self,
certfile: str,
keyfile: Optional[str] = None,
password: Optional[str] = None,
) -> None:
self._ctx.use_certificate_chain_file(certfile)
if password is not None:
if not isinstance(password, bytes):
password = password.encode("utf-8") # type: ignore[assignment]
self._ctx.set_passwd_cb(lambda *_: password)
self._ctx.use_privatekey_file(keyfile or certfile)
def set_alpn_protocols(self, protocols: List[Union[bytes, str]]) -> None:
protocols = [util.util.to_bytes(p, "ascii") for p in protocols]
return self._ctx.set_alpn_protos(protocols) # type: ignore[no-any-return]
def wrap_socket(
self,
sock: socket_cls,
server_side: bool = False,
do_handshake_on_connect: bool = True,
suppress_ragged_eofs: bool = True,
server_hostname: Optional[Union[bytes, str]] = None,
) -> WrappedSocket:
cnx = OpenSSL.SSL.Connection(self._ctx, sock)
# If server_hostname is an IP, don't use it for SNI, per RFC6066 Section 3
if server_hostname and not util.ssl_.is_ipaddress(server_hostname):
if isinstance(server_hostname, str):
server_hostname = server_hostname.encode("utf-8")
cnx.set_tlsext_host_name(server_hostname)
cnx.set_connect_state()
while True:
try:
cnx.do_handshake()
except OpenSSL.SSL.WantReadError as e:
if not util.wait_for_read(sock, sock.gettimeout()):
raise timeout("select timed out") from e # type: ignore[arg-type]
continue
except OpenSSL.SSL.Error as e:
raise ssl.SSLError(f"bad handshake: {e!r}") from e
break
return WrappedSocket(cnx, sock)
def _set_ctx_options(self) -> None:
self._ctx.set_options(
self._options
| _openssl_to_ssl_minimum_version[self._minimum_version]
| _openssl_to_ssl_maximum_version[self._maximum_version]
)
def minimum_version(self) -> int:
return self._minimum_version
def minimum_version(self, minimum_version: int) -> None:
self._minimum_version = minimum_version
self._set_ctx_options()
def maximum_version(self) -> int:
return self._maximum_version
def maximum_version(self, maximum_version: int) -> None:
self._maximum_version = maximum_version
self._set_ctx_options()
from .util.request import make_headers
from .util.retry import Retry
from .util.timeout import Timeout
The provided code snippet includes necessary dependencies for implementing the `inject_into_urllib3` function. Write a Python function `def inject_into_urllib3() -> None` to solve the following problem:
Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.
Here is the function:
def inject_into_urllib3() -> None:
"Monkey-patch urllib3 with PyOpenSSL-backed SSL-support."
_validate_dependencies_met()
util.SSLContext = PyOpenSSLContext # type: ignore[assignment]
util.ssl_.SSLContext = PyOpenSSLContext # type: ignore[assignment]
util.HAS_SNI = HAS_SNI
util.ssl_.HAS_SNI = HAS_SNI
util.IS_PYOPENSSL = True
util.ssl_.IS_PYOPENSSL = True
util.ssl_.USE_DEFAULT_SSLCONTEXT_CIPHERS = USE_DEFAULT_SSLCONTEXT_CIPHERS | Monkey-patch urllib3 with PyOpenSSL-backed SSL-support. |
157,740 | import OpenSSL.SSL
from cryptography import x509
from cryptography.hazmat.backends.openssl import backend as openssl_backend
from cryptography.hazmat.backends.openssl.x509 import _Certificate
import logging
import ssl
from io import BytesIO
from socket import socket as socket_cls
from socket import timeout
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
from .. import util
HAS_SNI = True
USE_DEFAULT_SSLCONTEXT_CIPHERS = util.ssl_._is_ge_openssl_v1_1_1(
openssl_backend.openssl_version_text(), openssl_backend.openssl_version_number() # type: ignore[no-untyped-call]
)
orig_util_HAS_SNI = util.HAS_SNI
orig_util_SSLContext = util.ssl_.SSLContext
orig_util_USE_SYSTEM_SSL_CIPHERS = util.ssl_.USE_DEFAULT_SSLCONTEXT_CIPHERS
from .util.request import make_headers
from .util.retry import Retry
from .util.timeout import Timeout
The provided code snippet includes necessary dependencies for implementing the `extract_from_urllib3` function. Write a Python function `def extract_from_urllib3() -> None` to solve the following problem:
Undo monkey-patching by :func:`inject_into_urllib3`.
Here is the function:
def extract_from_urllib3() -> None:
"Undo monkey-patching by :func:`inject_into_urllib3`."
util.SSLContext = orig_util_SSLContext
util.ssl_.SSLContext = orig_util_SSLContext
util.HAS_SNI = orig_util_HAS_SNI
util.ssl_.HAS_SNI = orig_util_HAS_SNI
util.IS_PYOPENSSL = False
util.ssl_.IS_PYOPENSSL = False
util.ssl_.USE_DEFAULT_SSLCONTEXT_CIPHERS = orig_util_USE_SYSTEM_SSL_CIPHERS | Undo monkey-patching by :func:`inject_into_urllib3`. |
157,741 | import OpenSSL.SSL
from cryptography import x509
from cryptography.hazmat.backends.openssl import backend as openssl_backend
from cryptography.hazmat.backends.openssl.x509 import _Certificate
try:
from cryptography.x509 import UnsupportedExtension # type: ignore[attr-defined]
except ImportError:
# UnsupportedExtension is gone in cryptography >= 2.1.0
class UnsupportedExtension(Exception): # type: ignore[no-redef]
pass
import logging
import ssl
from io import BytesIO
from socket import socket as socket_cls
from socket import timeout
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
from .. import util
if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"):
_openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD
if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"):
_openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"):
_openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
log = logging.getLogger(__name__)
def _dnsname_to_stdlib(name: str) -> Optional[str]:
"""
Converts a dNSName SubjectAlternativeName field to the form used by the
standard library on the given Python version.
Cryptography produces a dNSName as a unicode string that was idna-decoded
from ASCII bytes. We need to idna-encode that string to get it back, and
then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib
uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8).
If the name cannot be idna-encoded then we return None signalling that
the name given should be skipped.
"""
def idna_encode(name: str) -> Optional[bytes]:
"""
Borrowed wholesale from the Python Cryptography Project. It turns out
that we can't just safely call `idna.encode`: it can explode for
wildcard names. This avoids that problem.
"""
import idna
try:
for prefix in ["*.", "."]:
if name.startswith(prefix):
name = name[len(prefix) :]
return prefix.encode("ascii") + idna.encode(name)
return idna.encode(name)
except idna.core.IDNAError:
return None
# Don't send IPv6 addresses through the IDNA encoder.
if ":" in name:
return name
encoded_name = idna_encode(name)
if encoded_name is None:
return None
return encoded_name.decode("utf-8")
The provided code snippet includes necessary dependencies for implementing the `get_subj_alt_name` function. Write a Python function `def get_subj_alt_name(peer_cert: "CRL") -> List[Tuple[str, str]]` to solve the following problem:
Given an PyOpenSSL certificate, provides all the subject alternative names.
Here is the function:
def get_subj_alt_name(peer_cert: "CRL") -> List[Tuple[str, str]]:
"""
Given an PyOpenSSL certificate, provides all the subject alternative names.
"""
# Pass the cert to cryptography, which has much better APIs for this.
if hasattr(peer_cert, "to_cryptography"):
cert = peer_cert.to_cryptography()
else:
# This is technically using private APIs, but should work across all
# relevant versions before PyOpenSSL got a proper API for this.
cert = _Certificate(openssl_backend, peer_cert._x509)
# We want to find the SAN extension. Ask Cryptography to locate it (it's
# faster than looping in Python)
try:
ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
except x509.ExtensionNotFound:
# No such extension, return the empty list.
return []
except (
x509.DuplicateExtension,
UnsupportedExtension,
x509.UnsupportedGeneralNameType,
UnicodeError,
) as e:
# A problem has been found with the quality of the certificate. Assume
# no SAN field is present.
log.warning(
"A problem was encountered with the certificate that prevented "
"urllib3 from finding the SubjectAlternativeName field. This can "
"affect certificate validation. The error was %s",
e,
)
return []
# We want to return dNSName and iPAddress fields. We need to cast the IPs
# back to strings because the match_hostname function wants them as
# strings.
# Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8
# decoded. This is pretty frustrating, but that's what the standard library
# does with certificates, and so we need to attempt to do the same.
# We also want to skip over names which cannot be idna encoded.
names = [
("DNS", name)
for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName))
if name is not None
]
names.extend(
("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress)
)
return names | Given an PyOpenSSL certificate, provides all the subject alternative names. |
157,742 | import OpenSSL.SSL
from cryptography import x509
from cryptography.hazmat.backends.openssl import backend as openssl_backend
from cryptography.hazmat.backends.openssl.x509 import _Certificate
import logging
import ssl
from io import BytesIO
from socket import socket as socket_cls
from socket import timeout
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
from .. import util
def _verify_callback(
cnx: OpenSSL.SSL.Connection,
x509: "X509",
err_no: int,
err_depth: int,
return_code: int,
) -> bool:
return err_no == 0 | null |
157,743 | import errno
import logging
import queue
import sys
import warnings
from http.client import HTTPResponse as _HttplibHTTPResponse
from socket import timeout as SocketTimeout
from types import TracebackType
from typing import TYPE_CHECKING, Any, Mapping, Optional, Type, TypeVar, Union, overload
from ._request_methods import RequestMethods
from .connection import (
_TYPE_BODY,
BaseSSLError,
BrokenPipeError,
DummyConnection,
HTTPConnection,
HTTPException,
HTTPSConnection,
ProxyConfig,
VerifiedHTTPSConnection,
_wrap_proxy_error,
)
from .connection import port_by_scheme as port_by_scheme
from .exceptions import (
ClosedPoolError,
EmptyPoolError,
FullPoolError,
HeaderParsingError,
HostChangedError,
InsecureRequestWarning,
LocationValueError,
MaxRetryError,
NewConnectionError,
ProtocolError,
ProxyError,
ReadTimeoutError,
SSLError,
TimeoutError,
)
from .response import BaseHTTPResponse, HTTPResponse
from .util.connection import is_connection_dropped
from .util.proxy import connection_requires_http_tunnel
from .util.request import set_file_position
from .util.response import assert_header_parsing
from .util.retry import Retry
from .util.ssl_match_hostname import CertificateError
from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_DEFAULT, Timeout
from .util.url import Url, _encode_target
from .util.url import _normalize_host as normalize_host
from .util.url import parse_url
from .util.util import to_str
class HTTPConnectionPool(ConnectionPool, RequestMethods):
"""
Thread-safe connection pool for one host.
:param host:
Host used for this HTTP Connection (e.g. "localhost"), passed into
:class:`http.client.HTTPConnection`.
:param port:
Port used for this HTTP Connection (None is equivalent to 80), passed
into :class:`http.client.HTTPConnection`.
:param timeout:
Socket timeout in seconds for each individual connection. This can
be a float or integer, which sets the timeout for the HTTP request,
or an instance of :class:`urllib3.util.Timeout` which gives you more
fine-grained control over request timeouts. After the constructor has
been parsed, this is always a `urllib3.util.Timeout` object.
:param maxsize:
Number of connections to save that can be reused. More than 1 is useful
in multithreaded situations. If ``block`` is set to False, more
connections will be created but they will not be saved once they've
been used.
:param block:
If set to True, no more than ``maxsize`` connections will be used at
a time. When no free connections are available, the call will block
until a connection has been released. This is a useful side effect for
particular multithreaded situations where one does not want to use more
than maxsize connections per host to prevent flooding.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
:param retries:
Retry configuration to use by default with requests in this pool.
:param _proxy:
Parsed proxy URL, should not be used directly, instead, see
:class:`urllib3.ProxyManager`
:param _proxy_headers:
A dictionary with proxy headers, should not be used directly,
instead, see :class:`urllib3.ProxyManager`
:param \\**conn_kw:
Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
:class:`urllib3.connection.HTTPSConnection` instances.
"""
scheme = "http"
ConnectionCls: Type[Union[HTTPConnection, HTTPSConnection]] = HTTPConnection
ResponseCls = HTTPResponse
def __init__(
self,
host: str,
port: Optional[int] = None,
timeout: Optional[_TYPE_TIMEOUT] = _DEFAULT_TIMEOUT,
maxsize: int = 1,
block: bool = False,
headers: Optional[Mapping[str, str]] = None,
retries: Optional[Union[Retry, bool, int]] = None,
_proxy: Optional[Url] = None,
_proxy_headers: Optional[Mapping[str, str]] = None,
_proxy_config: Optional[ProxyConfig] = None,
**conn_kw: Any,
):
ConnectionPool.__init__(self, host, port)
RequestMethods.__init__(self, headers)
if not isinstance(timeout, Timeout):
timeout = Timeout.from_float(timeout)
if retries is None:
retries = Retry.DEFAULT
self.timeout = timeout
self.retries = retries
self.pool: Optional[queue.LifoQueue[Any]] = self.QueueCls(maxsize)
self.block = block
self.proxy = _proxy
self.proxy_headers = _proxy_headers or {}
self.proxy_config = _proxy_config
# Fill the queue up so that doing get() on it will block properly
for _ in range(maxsize):
self.pool.put(None)
# These are mostly for testing and debugging purposes.
self.num_connections = 0
self.num_requests = 0
self.conn_kw = conn_kw
if self.proxy:
# Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
# We cannot know if the user has added default socket options, so we cannot replace the
# list.
self.conn_kw.setdefault("socket_options", [])
self.conn_kw["proxy"] = self.proxy
self.conn_kw["proxy_config"] = self.proxy_config
def _new_conn(self) -> HTTPConnection:
"""
Return a fresh :class:`HTTPConnection`.
"""
self.num_connections += 1
log.debug(
"Starting new HTTP connection (%d): %s:%s",
self.num_connections,
self.host,
self.port or "80",
)
conn = self.ConnectionCls(
host=self.host,
port=self.port,
timeout=self.timeout.connect_timeout,
**self.conn_kw,
)
return conn
def _get_conn(self, timeout: Optional[float] = None) -> HTTPConnection:
"""
Get a connection. Will return a pooled connection if one is available.
If no connections are available and :prop:`.block` is ``False``, then a
fresh connection is returned.
:param timeout:
Seconds to wait before giving up and raising
:class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
:prop:`.block` is ``True``.
"""
conn = None
if self.pool is None:
raise ClosedPoolError(self, "Pool is closed.")
try:
conn = self.pool.get(block=self.block, timeout=timeout)
except AttributeError: # self.pool is None
raise ClosedPoolError(self, "Pool is closed.") from None # Defensive:
except queue.Empty:
if self.block:
raise EmptyPoolError(
self,
"Pool is empty and a new connection can't be opened due to blocking mode.",
) from None
pass # Oh well, we'll create a new connection then
# If this is a persistent connection, check if it got disconnected
if conn and is_connection_dropped(conn):
log.debug("Resetting dropped connection: %s", self.host)
conn.close()
if getattr(conn, "auto_open", 1) == 0:
# This is a proxied connection that has been mutated by
# http.client._tunnel() and cannot be reused (since it would
# attempt to bypass the proxy)
conn = None
return conn or self._new_conn()
def _put_conn(self, conn: Optional[HTTPConnection]) -> None:
"""
Put a connection back into the pool.
:param conn:
Connection object for the current host and port as returned by
:meth:`._new_conn` or :meth:`._get_conn`.
If the pool is already full, the connection is closed and discarded
because we exceeded maxsize. If connections are discarded frequently,
then maxsize should be increased.
If the pool is closed, then the connection will be closed and discarded.
"""
if self.pool is not None:
try:
self.pool.put(conn, block=False)
return # Everything is dandy, done.
except AttributeError:
# self.pool is None.
pass
except queue.Full:
# Connection never got put back into the pool, close it.
if conn:
conn.close()
if self.block:
# This should never happen if you got the conn from self._get_conn
raise FullPoolError(
self,
"Pool reached maximum size and no more connections are allowed.",
) from None
log.warning(
"Connection pool is full, discarding connection: %s", self.host
)
# Connection never got put back into the pool, close it.
if conn:
conn.close()
def _validate_conn(self, conn: HTTPConnection) -> None:
"""
Called right before a request is made, after the socket is created.
"""
pass
def _prepare_proxy(self, conn: HTTPConnection) -> None:
# Nothing to do for HTTP connections.
pass
def _get_timeout(self, timeout: _TYPE_TIMEOUT) -> Timeout:
"""Helper that always returns a :class:`urllib3.util.Timeout`"""
if timeout is _DEFAULT_TIMEOUT:
return self.timeout.clone()
if isinstance(timeout, Timeout):
return timeout.clone()
else:
# User passed us an int/float. This is for backwards compatibility,
# can be removed later
return Timeout.from_float(timeout)
def _raise_timeout(
self,
err: Union[BaseSSLError, OSError, SocketTimeout],
url: str,
timeout_value: Optional[_TYPE_TIMEOUT],
) -> None:
"""Is the error actually a timeout? Will raise a ReadTimeout or pass"""
if isinstance(err, SocketTimeout):
raise ReadTimeoutError(
self, url, f"Read timed out. (read timeout={timeout_value})"
) from err
# See the above comment about EAGAIN in Python 3.
if hasattr(err, "errno") and err.errno in _blocking_errnos:
raise ReadTimeoutError(
self, url, f"Read timed out. (read timeout={timeout_value})"
) from err
def _make_request(
self,
conn: HTTPConnection,
method: str,
url: str,
timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
chunked: bool = False,
**httplib_request_kw: Any,
) -> _HttplibHTTPResponse:
"""
Perform a request on a given urllib connection object taken from our
pool.
:param conn:
a connection from one of our connection pools
:param timeout:
Socket timeout in seconds for the request. This can be a
float or integer, which will set the same timeout value for
the socket connect and the socket read, or an instance of
:class:`urllib3.util.Timeout`, which gives you more fine-grained
control over your timeouts.
"""
self.num_requests += 1
timeout_obj = self._get_timeout(timeout)
timeout_obj.start_connect()
conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment]
try:
# Trigger any extra validation we need to do.
try:
self._validate_conn(conn)
except (SocketTimeout, BaseSSLError) as e:
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
raise
# _validate_conn() starts the connection to an HTTPS proxy
# so we need to wrap errors with 'ProxyError' here too.
except (
OSError,
NewConnectionError,
TimeoutError,
BaseSSLError,
CertificateError,
SSLError,
) as e:
new_e: Exception = e
if isinstance(e, (BaseSSLError, CertificateError)):
new_e = SSLError(e)
if isinstance(
new_e, (OSError, NewConnectionError, TimeoutError, SSLError)
) and (conn and conn._connecting_to_proxy):
new_e = _wrap_proxy_error(new_e)
raise new_e
# conn.request() calls http.client.*.request, not the method in
# urllib3.request. It also calls makefile (recv) on the socket.
try:
if chunked:
conn.request_chunked(method, url, **httplib_request_kw)
else:
conn.request(method, url, **httplib_request_kw)
# We are swallowing BrokenPipeError (errno.EPIPE) since the server is
# legitimately able to close the connection after sending a valid response.
# With this behaviour, the received response is still readable.
except BrokenPipeError:
pass
except OSError as e:
# MacOS/Linux
# EPROTOTYPE is needed on macOS
# https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
if e.errno != errno.EPROTOTYPE:
raise
# Reset the timeout for the recv() on the socket
read_timeout = timeout_obj.read_timeout
if conn.sock:
# In Python 3 socket.py will catch EAGAIN and return None when you
# try and read into the file pointer created by http.client, which
# instead raises a BadStatusLine exception. Instead of catching
# the exception and assuming all BadStatusLine exceptions are read
# timeouts, check for a zero timeout before making the request.
if read_timeout == 0:
raise ReadTimeoutError(
self, url, f"Read timed out. (read timeout={read_timeout})"
)
conn.sock.settimeout(read_timeout)
# Receive the response from the server
try:
httplib_response = conn.getresponse()
except (BaseSSLError, OSError) as e:
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
raise
log.debug(
'%s://%s:%s "%s %s %s" %s %s',
self.scheme,
self.host,
self.port,
method,
url,
# HTTP version
conn._http_vsn_str, # type: ignore[attr-defined]
httplib_response.status,
httplib_response.length, # type: ignore[attr-defined]
)
try:
assert_header_parsing(httplib_response.msg)
except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3
log.warning(
"Failed to parse headers (url=%s): %s",
self._absolute_url(url),
hpe,
exc_info=True,
)
return httplib_response
def _absolute_url(self, path: str) -> str:
return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
def close(self) -> None:
"""
Close all pooled connections and disable the pool.
"""
if self.pool is None:
return
# Disable access to the pool
old_pool, self.pool = self.pool, None
try:
while True:
conn = old_pool.get(block=False)
if conn:
conn.close()
except queue.Empty:
pass # Done.
def is_same_host(self, url: str) -> bool:
"""
Check if the given ``url`` is a member of the same host as this
connection pool.
"""
if url.startswith("/"):
return True
# TODO: Add optional support for socket.gethostbyname checking.
scheme, _, host, port, *_ = parse_url(url)
scheme = scheme or "http"
if host is not None:
host = _normalize_host(host, scheme=scheme)
# Use explicit default port for comparison when none is given
if self.port and not port:
port = port_by_scheme.get(scheme)
elif not self.port and port == port_by_scheme.get(scheme):
port = None
return (scheme, host, port) == (self.scheme, self.host, self.port)
def urlopen( # type: ignore[override]
self,
method: str,
url: str,
body: Optional[_TYPE_BODY] = None,
headers: Optional[Mapping[str, str]] = None,
retries: Optional[Union[Retry, bool, int]] = None,
redirect: bool = True,
assert_same_host: bool = True,
timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
pool_timeout: Optional[int] = None,
release_conn: Optional[bool] = None,
chunked: bool = False,
body_pos: Optional[Union[int, object]] = None,
**response_kw: Any,
) -> BaseHTTPResponse:
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method provided
by :class:`.RequestMethods`, such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param body:
Data to send in the request body, either :class:`str`, :class:`bytes`,
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Configure the number of retries to allow before raising a
:class:`~urllib3.exceptions.MaxRetryError` exception.
Pass ``None`` to retry until you receive a response. Pass a
:class:`~urllib3.util.retry.Retry` object for fine-grained control
over different types of retries.
Pass an integer number to retry connection errors that many times,
but no other types of errors. Pass zero to never retry.
If ``False``, then retries are disabled and any exception is raised
immediately. Also, instead of raising a MaxRetryError on redirects,
the redirect response will be returned.
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307, 308). Each redirect counts as a retry. Disabling retries
will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When ``False``, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one
request. It may be a float (in seconds) or an instance of
:class:`urllib3.util.Timeout`.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of
``response_kw.get('preload_content', True)``.
:param chunked:
If True, urllib3 will send the body using chunked transfer
encoding. Otherwise, urllib3 will send the body using the standard
content-length form. Defaults to False.
:param int body_pos:
Position to seek to in file-like body in the event of a retry or
redirect. Typically this won't need to be set because urllib3 will
auto-populate the value when needed.
:param \\**response_kw:
Additional parameters are passed to
:meth:`urllib3.response.HTTPResponse.from_httplib`
"""
parsed_url = parse_url(url)
destination_scheme = parsed_url.scheme
if headers is None:
headers = self.headers
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
release_conn = response_kw.get("preload_content", True)
# Check host
if assert_same_host and not self.is_same_host(url):
raise HostChangedError(self, url, retries)
# Ensure that the URL we're connecting to is properly encoded
if url.startswith("/"):
url = to_str(_encode_target(url))
else:
url = to_str(parsed_url.url)
conn = None
# Track whether `conn` needs to be released before
# returning/raising/recursing. Update this variable if necessary, and
# leave `release_conn` constant throughout the function. That way, if
# the function recurses, the original value of `release_conn` will be
# passed down into the recursive call, and its value will be respected.
#
# See issue #651 [1] for details.
#
# [1] <https://github.com/urllib3/urllib3/issues/651>
release_this_conn = release_conn
http_tunnel_required = connection_requires_http_tunnel(
self.proxy, self.proxy_config, destination_scheme
)
# Merge the proxy headers. Only done when not using HTTP CONNECT. We
# have to copy the headers dict so we can safely change it without those
# changes being reflected in anyone else's copy.
if not http_tunnel_required:
headers = headers.copy() # type: ignore[attr-defined]
headers.update(self.proxy_headers) # type: ignore[union-attr]
# Must keep the exception bound to a separate variable or else Python 3
# complains about UnboundLocalError.
err = None
# Keep track of whether we cleanly exited the except block. This
# ensures we do proper cleanup in finally.
clean_exit = False
# Rewind body position, if needed. Record current position
# for future rewinds in the event of a redirect/retry.
body_pos = set_file_position(body, body_pos)
try:
# Request a connection from the queue.
timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment]
is_new_proxy_conn = self.proxy is not None and not getattr(
conn, "sock", None
)
if is_new_proxy_conn:
assert isinstance(self.proxy, Url)
conn._connecting_to_proxy = True
if http_tunnel_required:
try:
self._prepare_proxy(conn)
except (BaseSSLError, OSError, SocketTimeout) as e:
self._raise_timeout(
err=e, url=self.proxy.url, timeout_value=conn.timeout
)
raise
# Make the request on the httplib connection object.
httplib_response = self._make_request(
conn,
method,
url,
timeout=timeout_obj,
body=body,
headers=headers,
chunked=chunked,
)
# If we're going to release the connection in ``finally:``, then
# the response doesn't need to know about the connection. Otherwise
# it will also try to release it and we'll have a double-release
# mess.
response_conn = conn if not release_conn else None
# Pass method to Response for length checking
response_kw["request_method"] = method
# Import httplib's response into our own wrapper object
response = self.ResponseCls.from_httplib(
httplib_response,
pool=self,
connection=response_conn,
retries=retries,
**response_kw,
)
# Everything went great!
clean_exit = True
except EmptyPoolError:
# Didn't get a connection from the pool, no need to clean up
clean_exit = True
release_this_conn = False
raise
except (
TimeoutError,
HTTPException,
OSError,
ProtocolError,
BaseSSLError,
SSLError,
CertificateError,
ProxyError,
) as e:
# Discard the connection for these exceptions. It will be
# replaced during the next _get_conn() call.
clean_exit = False
new_e: Exception = e
if isinstance(e, (BaseSSLError, CertificateError)):
new_e = SSLError(e)
if (
isinstance(
new_e,
(
OSError,
NewConnectionError,
TimeoutError,
SSLError,
HTTPException,
),
)
and (conn and conn._connecting_to_proxy)
):
new_e = _wrap_proxy_error(new_e)
elif isinstance(new_e, (OSError, HTTPException)):
new_e = ProtocolError("Connection aborted.", new_e)
retries = retries.increment(
method, url, error=new_e, _pool=self, _stacktrace=sys.exc_info()[2]
)
retries.sleep()
# Keep track of the error for the retry warning.
err = e
finally:
if not clean_exit:
# We hit some kind of exception, handled or otherwise. We need
# to throw the connection away unless explicitly told not to.
# Close the connection, set the variable to None, and make sure
# we put the None back in the pool to avoid leaking it.
if conn:
conn.close()
conn = None
release_this_conn = True
if release_this_conn:
# Put the connection back to be reused. If the connection is
# expired then it will be None, which will get replaced with a
# fresh connection during _get_conn.
self._put_conn(conn)
if not conn:
# Try again
log.warning(
"Retrying (%r) after connection broken by '%r': %s", retries, err, url
)
return self.urlopen(
method,
url,
body,
headers,
retries,
redirect,
assert_same_host,
timeout=timeout,
pool_timeout=pool_timeout,
release_conn=release_conn,
chunked=chunked,
body_pos=body_pos,
**response_kw,
)
# Handle redirect?
redirect_location = redirect and response.get_redirect_location()
if redirect_location:
if response.status == 303:
method = "GET"
try:
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_redirect:
response.drain_conn()
raise
return response
response.drain_conn()
retries.sleep_for_retry(response)
log.debug("Redirecting %s -> %s", url, redirect_location)
return self.urlopen(
method,
redirect_location,
body,
headers,
retries=retries,
redirect=redirect,
assert_same_host=assert_same_host,
timeout=timeout,
pool_timeout=pool_timeout,
release_conn=release_conn,
chunked=chunked,
body_pos=body_pos,
**response_kw,
)
# Check if we should retry the HTTP response.
has_retry_after = bool(response.getheader("Retry-After"))
if retries.is_retry(method, response.status, has_retry_after):
try:
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_status:
response.drain_conn()
raise
return response
response.drain_conn()
retries.sleep(response)
log.debug("Retry: %s", url)
return self.urlopen(
method,
url,
body,
headers,
retries=retries,
redirect=redirect,
assert_same_host=assert_same_host,
timeout=timeout,
pool_timeout=pool_timeout,
release_conn=release_conn,
chunked=chunked,
body_pos=body_pos,
**response_kw,
)
return response
class HTTPSConnectionPool(HTTPConnectionPool):
"""
Same as :class:`.HTTPConnectionPool`, but HTTPS.
:class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
``assert_hostname`` and ``host`` in this order to verify connections.
If ``assert_hostname`` is False, no verification is done.
The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
the connection socket into an SSL socket.
"""
scheme = "https"
ConnectionCls = HTTPSConnection
def __init__(
self,
host: str,
port: Optional[int] = None,
timeout: Optional[_TYPE_TIMEOUT] = _DEFAULT_TIMEOUT,
maxsize: int = 1,
block: bool = False,
headers: Optional[Mapping[str, str]] = None,
retries: Optional[Union[Retry, bool, int]] = None,
_proxy: Optional[Url] = None,
_proxy_headers: Optional[Mapping[str, str]] = None,
key_file: Optional[str] = None,
cert_file: Optional[str] = None,
cert_reqs: Optional[Union[int, str]] = None,
key_password: Optional[str] = None,
ca_certs: Optional[str] = None,
ssl_version: Optional[Union[int, str]] = None,
ssl_minimum_version: Optional["ssl.TLSVersion"] = None,
ssl_maximum_version: Optional["ssl.TLSVersion"] = None,
assert_hostname: Optional[Union[str, "Literal[False]"]] = None,
assert_fingerprint: Optional[str] = None,
ca_cert_dir: Optional[str] = None,
**conn_kw: Any,
) -> None:
super().__init__(
host,
port,
timeout,
maxsize,
block,
headers,
retries,
_proxy,
_proxy_headers,
**conn_kw,
)
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.key_password = key_password
self.ca_certs = ca_certs
self.ca_cert_dir = ca_cert_dir
self.ssl_version = ssl_version
self.ssl_minimum_version = ssl_minimum_version
self.ssl_maximum_version = ssl_maximum_version
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
def _prepare_conn(self, conn: HTTPSConnection) -> HTTPConnection:
"""
Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
and establish the tunnel if proxy is used.
"""
if isinstance(conn, VerifiedHTTPSConnection):
conn.set_cert(
key_file=self.key_file,
key_password=self.key_password,
cert_file=self.cert_file,
cert_reqs=self.cert_reqs,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
assert_hostname=self.assert_hostname,
assert_fingerprint=self.assert_fingerprint,
)
conn.ssl_version = self.ssl_version
conn.ssl_minimum_version = self.ssl_minimum_version
conn.ssl_maximum_version = self.ssl_maximum_version
return conn
def _prepare_proxy(self, conn: HTTPSConnection) -> None: # type: ignore[override]
"""
Establishes a tunnel connection through HTTP CONNECT.
Tunnel connection is established early because otherwise httplib would
improperly set Host: header to proxy's IP:port.
"""
conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
if self.proxy and self.proxy.scheme == "https":
conn.tls_in_tls_required = True
conn.connect()
def _new_conn(self) -> HTTPConnection:
"""
Return a fresh :class:`urllib3.connection.HTTPConnection`.
"""
self.num_connections += 1
log.debug(
"Starting new HTTPS connection (%d): %s:%s",
self.num_connections,
self.host,
self.port or "443",
)
if not self.ConnectionCls or self.ConnectionCls is DummyConnection: # type: ignore[comparison-overlap]
raise SSLError(
"Can't connect to HTTPS URL because the SSL module is not available."
)
actual_host: str = self.host
actual_port = self.port
if self.proxy is not None and self.proxy.host is not None:
actual_host = self.proxy.host
actual_port = self.proxy.port
conn = self.ConnectionCls(
host=actual_host,
port=actual_port,
timeout=self.timeout.connect_timeout,
cert_file=self.cert_file,
key_file=self.key_file,
key_password=self.key_password,
**self.conn_kw,
)
return self._prepare_conn(conn)
def _validate_conn(self, conn: HTTPConnection) -> None:
"""
Called right before a request is made, after the socket is created.
"""
super()._validate_conn(conn)
# Force connect early to allow us to validate the connection.
if not conn.sock:
conn.connect()
if not conn.is_verified:
warnings.warn(
(
f"Unverified HTTPS request is being made to host '{conn.host}'. "
"Adding certificate verification is strongly advised. See: "
"https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
"#tls-warnings"
),
InsecureRequestWarning,
)
port_by_scheme = {"http": 80, "https": 443}
def parse_url(url: str) -> Url:
"""
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
performed to parse incomplete urls. Fields not provided will be None.
This parser is RFC 3986 compliant.
The parser logic and helper functions are based heavily on
work done in the ``rfc3986`` module.
:param str url: URL to parse into a :class:`.Url` namedtuple.
Partly backwards-compatible with :mod:`urlparse`.
Example:
.. code-block:: python
import urllib3
print( urllib3.util.parse_url('http://google.com/mail/'))
# Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
print( urllib3.util.parse_url('google.com:80'))
# Url(scheme=None, host='google.com', port=80, path=None, ...)
print( urllib3.util.parse_url('/foo?bar'))
# Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
"""
if not url:
# Empty
return Url()
source_url = url
if not _SCHEME_RE.search(url):
url = "//" + url
scheme: Optional[str]
authority: Optional[str]
auth: Optional[str]
host: Optional[str]
port: Optional[str]
port_int: Optional[int]
path: Optional[str]
query: Optional[str]
fragment: Optional[str]
try:
scheme, authority, path, query, fragment = _URI_RE.match(url).groups() # type: ignore[union-attr]
normalize_uri = scheme is None or scheme.lower() in _NORMALIZABLE_SCHEMES
if scheme:
scheme = scheme.lower()
if authority:
auth, _, host_port = authority.rpartition("@")
auth = auth or None
host, port = _HOST_PORT_RE.match(host_port).groups() # type: ignore[union-attr]
if auth and normalize_uri:
auth = _encode_invalid_chars(auth, _USERINFO_CHARS)
if port == "":
port = None
else:
auth, host, port = None, None, None
if port is not None:
port_int = int(port)
if not (0 <= port_int <= 65535):
raise LocationParseError(url)
else:
port_int = None
host = _normalize_host(host, scheme)
if normalize_uri and path:
path = _remove_path_dot_segments(path)
path = _encode_invalid_chars(path, _PATH_CHARS)
if normalize_uri and query:
query = _encode_invalid_chars(query, _QUERY_CHARS)
if normalize_uri and fragment:
fragment = _encode_invalid_chars(fragment, _FRAGMENT_CHARS)
except (ValueError, AttributeError) as e:
raise LocationParseError(source_url) from e
# For the sake of backwards compatibility we put empty
# string values for path if there are any defined values
# beyond the path in the URL.
# TODO: Remove this when we break backwards compatibility.
if not path:
if query is not None or fragment is not None:
path = ""
else:
path = None
return Url(
scheme=scheme,
auth=auth,
host=host,
port=port_int,
path=path,
query=query,
fragment=fragment,
)
The provided code snippet includes necessary dependencies for implementing the `connection_from_url` function. Write a Python function `def connection_from_url(url: str, **kw: Any) -> HTTPConnectionPool` to solve the following problem:
Given a url, return an :class:`.ConnectionPool` instance of its host. This is a shortcut for not having to parse out the scheme, host, and port of the url before creating an :class:`.ConnectionPool` instance. :param url: Absolute URL string that must include the scheme. Port is optional. :param \\**kw: Passes additional parameters to the constructor of the appropriate :class:`.ConnectionPool`. Useful for specifying things like timeout, maxsize, headers, etc. Example:: >>> conn = connection_from_url('http://google.com/') >>> r = conn.request('GET', '/')
Here is the function:
def connection_from_url(url: str, **kw: Any) -> HTTPConnectionPool:
"""
Given a url, return an :class:`.ConnectionPool` instance of its host.
This is a shortcut for not having to parse out the scheme, host, and port
of the url before creating an :class:`.ConnectionPool` instance.
:param url:
Absolute URL string that must include the scheme. Port is optional.
:param \\**kw:
Passes additional parameters to the constructor of the appropriate
:class:`.ConnectionPool`. Useful for specifying things like
timeout, maxsize, headers, etc.
Example::
>>> conn = connection_from_url('http://google.com/')
>>> r = conn.request('GET', '/')
"""
scheme, _, host, port, *_ = parse_url(url)
scheme = scheme or "http"
port = port or port_by_scheme.get(scheme, 80)
if scheme == "https":
return HTTPSConnectionPool(host, port=port, **kw) # type: ignore[arg-type]
else:
return HTTPConnectionPool(host, port=port, **kw) # type: ignore[arg-type] | Given a url, return an :class:`.ConnectionPool` instance of its host. This is a shortcut for not having to parse out the scheme, host, and port of the url before creating an :class:`.ConnectionPool` instance. :param url: Absolute URL string that must include the scheme. Port is optional. :param \\**kw: Passes additional parameters to the constructor of the appropriate :class:`.ConnectionPool`. Useful for specifying things like timeout, maxsize, headers, etc. Example:: >>> conn = connection_from_url('http://google.com/') >>> r = conn.request('GET', '/') |
157,744 | import errno
import logging
import queue
import sys
import warnings
from http.client import HTTPResponse as _HttplibHTTPResponse
from socket import timeout as SocketTimeout
from types import TracebackType
from typing import TYPE_CHECKING, Any, Mapping, Optional, Type, TypeVar, Union, overload
from ._request_methods import RequestMethods
from .connection import (
_TYPE_BODY,
BaseSSLError,
BrokenPipeError,
DummyConnection,
HTTPConnection,
HTTPException,
HTTPSConnection,
ProxyConfig,
VerifiedHTTPSConnection,
_wrap_proxy_error,
)
from .connection import port_by_scheme as port_by_scheme
from .exceptions import (
ClosedPoolError,
EmptyPoolError,
FullPoolError,
HeaderParsingError,
HostChangedError,
InsecureRequestWarning,
LocationValueError,
MaxRetryError,
NewConnectionError,
ProtocolError,
ProxyError,
ReadTimeoutError,
SSLError,
TimeoutError,
)
from .response import BaseHTTPResponse, HTTPResponse
from .util.connection import is_connection_dropped
from .util.proxy import connection_requires_http_tunnel
from .util.request import set_file_position
from .util.response import assert_header_parsing
from .util.retry import Retry
from .util.ssl_match_hostname import CertificateError
from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_DEFAULT, Timeout
from .util.url import Url, _encode_target
from .util.url import _normalize_host as normalize_host
from .util.url import parse_url
from .util.util import to_str
def _normalize_host(host: None, scheme: Optional[str]) -> None:
... | null |
157,745 | import errno
import logging
import queue
import sys
import warnings
from http.client import HTTPResponse as _HttplibHTTPResponse
from socket import timeout as SocketTimeout
from types import TracebackType
from typing import TYPE_CHECKING, Any, Mapping, Optional, Type, TypeVar, Union, overload
from ._request_methods import RequestMethods
from .connection import (
_TYPE_BODY,
BaseSSLError,
BrokenPipeError,
DummyConnection,
HTTPConnection,
HTTPException,
HTTPSConnection,
ProxyConfig,
VerifiedHTTPSConnection,
_wrap_proxy_error,
)
from .connection import port_by_scheme as port_by_scheme
from .exceptions import (
ClosedPoolError,
EmptyPoolError,
FullPoolError,
HeaderParsingError,
HostChangedError,
InsecureRequestWarning,
LocationValueError,
MaxRetryError,
NewConnectionError,
ProtocolError,
ProxyError,
ReadTimeoutError,
SSLError,
TimeoutError,
)
from .response import BaseHTTPResponse, HTTPResponse
from .util.connection import is_connection_dropped
from .util.proxy import connection_requires_http_tunnel
from .util.request import set_file_position
from .util.response import assert_header_parsing
from .util.retry import Retry
from .util.ssl_match_hostname import CertificateError
from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_DEFAULT, Timeout
from .util.url import Url, _encode_target
from .util.url import _normalize_host as normalize_host
from .util.url import parse_url
from .util.util import to_str
def _normalize_host(host: str, scheme: Optional[str]) -> str:
... | null |
157,746 | import errno
import logging
import queue
import sys
import warnings
from http.client import HTTPResponse as _HttplibHTTPResponse
from socket import timeout as SocketTimeout
from types import TracebackType
from typing import TYPE_CHECKING, Any, Mapping, Optional, Type, TypeVar, Union, overload
from ._request_methods import RequestMethods
from .connection import (
_TYPE_BODY,
BaseSSLError,
BrokenPipeError,
DummyConnection,
HTTPConnection,
HTTPException,
HTTPSConnection,
ProxyConfig,
VerifiedHTTPSConnection,
_wrap_proxy_error,
)
from .connection import port_by_scheme as port_by_scheme
from .exceptions import (
ClosedPoolError,
EmptyPoolError,
FullPoolError,
HeaderParsingError,
HostChangedError,
InsecureRequestWarning,
LocationValueError,
MaxRetryError,
NewConnectionError,
ProtocolError,
ProxyError,
ReadTimeoutError,
SSLError,
TimeoutError,
)
from .response import BaseHTTPResponse, HTTPResponse
from .util.connection import is_connection_dropped
from .util.proxy import connection_requires_http_tunnel
from .util.request import set_file_position
from .util.response import assert_header_parsing
from .util.retry import Retry
from .util.ssl_match_hostname import CertificateError
from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_DEFAULT, Timeout
from .util.url import Url, _encode_target
from .util.url import _normalize_host as normalize_host
from .util.url import parse_url
from .util.util import to_str
The provided code snippet includes necessary dependencies for implementing the `_normalize_host` function. Write a Python function `def _normalize_host(host: Optional[str], scheme: Optional[str]) -> Optional[str]` to solve the following problem:
Normalize hosts for comparisons and use with sockets.
Here is the function:
def _normalize_host(host: Optional[str], scheme: Optional[str]) -> Optional[str]:
"""
Normalize hosts for comparisons and use with sockets.
"""
host = normalize_host(host, scheme)
# httplib doesn't like it when we include brackets in IPv6 addresses
# Specifically, if we include brackets but also pass the port then
# httplib crazily doubles up the square brackets on the Host header.
# Instead, we need to make sure we never pass ``None`` as the port.
# However, for backward compatibility reasons we can't actually
# *assert* that. See http://bugs.python.org/issue28539
if host and host.startswith("[") and host.endswith("]"):
host = host[1:-1]
return host | Normalize hosts for comparisons and use with sockets. |
157,747 | import email.utils
import mimetypes
from typing import (
Callable,
Dict,
Iterable,
Mapping,
Optional,
Sequence,
Tuple,
Union,
cast,
)
The provided code snippet includes necessary dependencies for implementing the `guess_content_type` function. Write a Python function `def guess_content_type( filename: Optional[str], default: str = "application/octet-stream" ) -> str` to solve the following problem:
Guess the "Content-Type" of a file. :param filename: The filename to guess the "Content-Type" of using :mod:`mimetypes`. :param default: If no "Content-Type" can be guessed, default to `default`.
Here is the function:
def guess_content_type(
filename: Optional[str], default: str = "application/octet-stream"
) -> str:
"""
Guess the "Content-Type" of a file.
:param filename:
The filename to guess the "Content-Type" of using :mod:`mimetypes`.
:param default:
If no "Content-Type" can be guessed, default to `default`.
"""
if filename:
return mimetypes.guess_type(filename)[0] or default
return default | Guess the "Content-Type" of a file. :param filename: The filename to guess the "Content-Type" of using :mod:`mimetypes`. :param default: If no "Content-Type" can be guessed, default to `default`. |
157,748 | import email.utils
import mimetypes
from typing import (
Callable,
Dict,
Iterable,
Mapping,
Optional,
Sequence,
Tuple,
Union,
cast,
)
import warnings
warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True)
The provided code snippet includes necessary dependencies for implementing the `format_header_param_rfc2231` function. Write a Python function `def format_header_param_rfc2231(name: str, value: Union[str, bytes]) -> str` to solve the following problem:
Helper function to format and quote a single header parameter using the strategy defined in RFC 2231. Particularly useful for header parameters which might contain non-ASCII values, like file names. This follows `RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as ``bytes`` or `str``. :returns: An RFC-2231-formatted unicode string. .. deprecated:: 2.0.0 Will be removed in urllib3 v3.0.0. This is not valid for ``multipart/form-data`` header parameters.
Here is the function:
def format_header_param_rfc2231(name: str, value: Union[str, bytes]) -> str:
"""
Helper function to format and quote a single header parameter using the
strategy defined in RFC 2231.
Particularly useful for header parameters which might contain
non-ASCII values, like file names. This follows
`RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_.
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as ``bytes`` or `str``.
:returns:
An RFC-2231-formatted unicode string.
.. deprecated:: 2.0.0
Will be removed in urllib3 v3.0.0. This is not valid for
``multipart/form-data`` header parameters.
"""
import warnings
warnings.warn(
"'format_header_param_rfc2231' is deprecated and will be "
"removed in urllib3 v3.0.0. This is not valid for "
"multipart/form-data header parameters.",
DeprecationWarning,
stacklevel=2,
)
if isinstance(value, bytes):
value = value.decode("utf-8")
if not any(ch in value for ch in '"\\\r\n'):
result = f'{name}="{value}"'
try:
result.encode("ascii")
except (UnicodeEncodeError, UnicodeDecodeError):
pass
else:
return result
value = email.utils.encode_rfc2231(value, "utf-8")
value = f"{name}*={value}"
return value | Helper function to format and quote a single header parameter using the strategy defined in RFC 2231. Particularly useful for header parameters which might contain non-ASCII values, like file names. This follows `RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as ``bytes`` or `str``. :returns: An RFC-2231-formatted unicode string. .. deprecated:: 2.0.0 Will be removed in urllib3 v3.0.0. This is not valid for ``multipart/form-data`` header parameters. |
157,749 | import email.utils
import mimetypes
from typing import (
Callable,
Dict,
Iterable,
Mapping,
Optional,
Sequence,
Tuple,
Union,
cast,
)
_TYPE_FIELD_VALUE = Union[str, bytes]
def format_multipart_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str:
"""
Format and quote a single multipart header parameter.
This follows the `WHATWG HTML Standard`_ as of 2021/06/10, matching
the behavior of current browser and curl versions. Values are
assumed to be UTF-8. The ``\\n``, ``\\r``, and ``"`` characters are
percent encoded.
.. _WHATWG HTML Standard:
https://html.spec.whatwg.org/multipage/
form-control-infrastructure.html#multipart-form-data
:param name:
The name of the parameter, an ASCII-only ``str``.
:param value:
The value of the parameter, a ``str`` or UTF-8 encoded
``bytes``.
:returns:
A string ``name="value"`` with the escaped value.
.. versionchanged:: 2.0.0
Matches the WHATWG HTML Standard as of 2021/06/10. Control
characters are no longer percent encoded.
.. versionchanged:: 2.0.0
Renamed from ``format_header_param_html5`` and
``format_header_param``. The old names will be removed in
urllib3 v3.0.0.
"""
if isinstance(value, bytes):
value = value.decode("utf-8")
# percent encode \n \r "
value = value.translate({10: "%0A", 13: "%0D", 34: "%22"})
return f'{name}="{value}"'
import warnings
warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True)
The provided code snippet includes necessary dependencies for implementing the `format_header_param_html5` function. Write a Python function `def format_header_param_html5(name: str, value: _TYPE_FIELD_VALUE) -> str` to solve the following problem:
.. deprecated:: 2.0.0 Renamed to :func:`format_multipart_header_param`. Will be removed in urllib3 v3.0.0.
Here is the function:
def format_header_param_html5(name: str, value: _TYPE_FIELD_VALUE) -> str:
"""
.. deprecated:: 2.0.0
Renamed to :func:`format_multipart_header_param`. Will be
removed in urllib3 v3.0.0.
"""
import warnings
warnings.warn(
"'format_header_param_html5' has been renamed to "
"'format_multipart_header_param'. The old name will be "
"removed in urllib3 v3.0.0.",
DeprecationWarning,
stacklevel=2,
)
return format_multipart_header_param(name, value) | .. deprecated:: 2.0.0 Renamed to :func:`format_multipart_header_param`. Will be removed in urllib3 v3.0.0. |
157,750 | import email.utils
import mimetypes
from typing import (
Callable,
Dict,
Iterable,
Mapping,
Optional,
Sequence,
Tuple,
Union,
cast,
)
_TYPE_FIELD_VALUE = Union[str, bytes]
def format_multipart_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str:
"""
Format and quote a single multipart header parameter.
This follows the `WHATWG HTML Standard`_ as of 2021/06/10, matching
the behavior of current browser and curl versions. Values are
assumed to be UTF-8. The ``\\n``, ``\\r``, and ``"`` characters are
percent encoded.
.. _WHATWG HTML Standard:
https://html.spec.whatwg.org/multipage/
form-control-infrastructure.html#multipart-form-data
:param name:
The name of the parameter, an ASCII-only ``str``.
:param value:
The value of the parameter, a ``str`` or UTF-8 encoded
``bytes``.
:returns:
A string ``name="value"`` with the escaped value.
.. versionchanged:: 2.0.0
Matches the WHATWG HTML Standard as of 2021/06/10. Control
characters are no longer percent encoded.
.. versionchanged:: 2.0.0
Renamed from ``format_header_param_html5`` and
``format_header_param``. The old names will be removed in
urllib3 v3.0.0.
"""
if isinstance(value, bytes):
value = value.decode("utf-8")
# percent encode \n \r "
value = value.translate({10: "%0A", 13: "%0D", 34: "%22"})
return f'{name}="{value}"'
import warnings
warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True)
The provided code snippet includes necessary dependencies for implementing the `format_header_param` function. Write a Python function `def format_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str` to solve the following problem:
.. deprecated:: 2.0.0 Renamed to :func:`format_multipart_header_param`. Will be removed in urllib3 v3.0.0.
Here is the function:
def format_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str:
"""
.. deprecated:: 2.0.0
Renamed to :func:`format_multipart_header_param`. Will be
removed in urllib3 v3.0.0.
"""
import warnings
warnings.warn(
"'format_header_param' has been renamed to "
"'format_multipart_header_param'. The old name will be "
"removed in urllib3 v3.0.0.",
DeprecationWarning,
stacklevel=2,
)
return format_multipart_header_param(name, value) | .. deprecated:: 2.0.0 Renamed to :func:`format_multipart_header_param`. Will be removed in urllib3 v3.0.0. |
157,751 | import io
import json as _json
import logging
import zlib
from contextlib import contextmanager
from http.client import HTTPMessage as _HttplibHTTPMessage
from http.client import HTTPResponse as _HttplibHTTPResponse
from socket import timeout as SocketTimeout
from typing import (
TYPE_CHECKING,
Any,
Generator,
Iterator,
List,
Mapping,
Optional,
Tuple,
Type,
Union,
)
from ._collections import HTTPHeaderDict
from .connection import _TYPE_BODY, BaseSSLError, HTTPConnection, HTTPException
from .exceptions import (
BodyNotHttplibCompatible,
DecodeError,
HTTPError,
IncompleteRead,
InvalidChunkLength,
InvalidHeader,
ProtocolError,
ReadTimeoutError,
ResponseNotChunked,
SSLError,
)
from .util.response import is_fp_closed, is_response_to_head
from .util.retry import Retry
class ContentDecoder:
def decompress(self, data: bytes) -> bytes:
raise NotImplementedError()
def flush(self) -> bytes:
raise NotImplementedError()
class DeflateDecoder(ContentDecoder):
def __init__(self) -> None:
self._first_try = True
self._data = b""
self._obj = zlib.decompressobj()
def decompress(self, data: bytes) -> bytes:
if not data:
return data
if not self._first_try:
return self._obj.decompress(data)
self._data += data
try:
decompressed = self._obj.decompress(data)
if decompressed:
self._first_try = False
self._data = None # type: ignore[assignment]
return decompressed
except zlib.error:
self._first_try = False
self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self.decompress(self._data)
finally:
self._data = None # type: ignore[assignment]
def flush(self) -> bytes:
return self._obj.flush()
class GzipDecoder(ContentDecoder):
def __init__(self) -> None:
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
self._state = GzipDecoderState.FIRST_MEMBER
def decompress(self, data: bytes) -> bytes:
ret = bytearray()
if self._state == GzipDecoderState.SWALLOW_DATA or not data:
return bytes(ret)
while True:
try:
ret += self._obj.decompress(data)
except zlib.error:
previous_state = self._state
# Ignore data after the first error
self._state = GzipDecoderState.SWALLOW_DATA
if previous_state == GzipDecoderState.OTHER_MEMBERS:
# Allow trailing garbage acceptable in other gzip clients
return bytes(ret)
raise
data = self._obj.unused_data
if not data:
return bytes(ret)
self._state = GzipDecoderState.OTHER_MEMBERS
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
def flush(self) -> bytes:
return self._obj.flush()
if brotli is not None:
class BrotliDecoder(ContentDecoder):
# Supports both 'brotlipy' and 'Brotli' packages
# since they share an import name. The top branches
# are for 'brotlipy' and bottom branches for 'Brotli'
def __init__(self) -> None:
self._obj = brotli.Decompressor()
if hasattr(self._obj, "decompress"):
setattr(self, "decompress", self._obj.decompress)
else:
setattr(self, "decompress", self._obj.process)
def flush(self) -> bytes:
if hasattr(self._obj, "flush"):
return self._obj.flush() # type: ignore[no-any-return]
return b""
class MultiDecoder(ContentDecoder):
"""
From RFC7231:
If one or more encodings have been applied to a representation, the
sender that applied the encodings MUST generate a Content-Encoding
header field that lists the content codings in the order in which
they were applied.
"""
def __init__(self, modes: str) -> None:
self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
def flush(self) -> bytes:
return self._decoders[0].flush()
def decompress(self, data: bytes) -> bytes:
for d in reversed(self._decoders):
data = d.decompress(data)
return data
def _get_decoder(mode: str) -> ContentDecoder:
if "," in mode:
return MultiDecoder(mode)
if mode == "gzip":
return GzipDecoder()
if brotli is not None and mode == "br":
return BrotliDecoder()
return DeflateDecoder() | null |
157,752 | import functools
import logging
import warnings
from types import TracebackType
from typing import (
TYPE_CHECKING,
Any,
Dict,
FrozenSet,
Mapping,
NamedTuple,
Optional,
Tuple,
Type,
TypeVar,
Union,
)
from urllib.parse import urljoin
from ._collections import RecentlyUsedContainer
from ._request_methods import RequestMethods
from .connection import ProxyConfig
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
from .exceptions import (
LocationValueError,
MaxRetryError,
ProxySchemeUnknown,
URLSchemeUnknown,
)
from .response import BaseHTTPResponse
from .util.connection import _TYPE_SOCKET_OPTIONS
from .util.proxy import connection_requires_http_tunnel
from .util.retry import Retry
from .util.timeout import Timeout
from .util.url import Url, parse_url
_DEFAULT_BLOCKSIZE = 16384
class PoolKey(NamedTuple):
"""
All known keyword arguments that could be provided to the pool manager, its
pools, or the underlying connections.
All custom key schemes should include the fields in this key at a minimum.
"""
key_scheme: str
key_host: str
key_port: Optional[int]
key_timeout: Optional[Union[Timeout, float, int]]
key_retries: Optional[Union[Retry, int]]
key_block: Optional[bool]
key_source_address: Optional[Tuple[str, int]]
key_key_file: Optional[str]
key_key_password: Optional[str]
key_cert_file: Optional[str]
key_cert_reqs: Optional[str]
key_ca_certs: Optional[str]
key_ssl_version: Optional[Union[int, str]]
key_ssl_minimum_version: Optional["ssl.TLSVersion"]
key_ssl_maximum_version: Optional["ssl.TLSVersion"]
key_ca_cert_dir: Optional[str]
key_ssl_context: Optional["ssl.SSLContext"]
key_maxsize: Optional[int]
key_headers: Optional[FrozenSet[Tuple[str, str]]]
key__proxy: Optional[Url]
key__proxy_headers: Optional[FrozenSet[Tuple[str, str]]]
key__proxy_config: Optional[ProxyConfig]
key_socket_options: Optional[_TYPE_SOCKET_OPTIONS]
key__socks_options: Optional[FrozenSet[Tuple[str, str]]]
key_assert_hostname: Optional[Union[bool, str]]
key_assert_fingerprint: Optional[str]
key_server_hostname: Optional[str]
key_blocksize: Optional[int]
The provided code snippet includes necessary dependencies for implementing the `_default_key_normalizer` function. Write a Python function `def _default_key_normalizer( key_class: Type[PoolKey], request_context: Dict[str, Any] ) -> PoolKey` to solve the following problem:
Create a pool key out of a request context dictionary. According to RFC 3986, both the scheme and host are case-insensitive. Therefore, this function normalizes both before constructing the pool key for an HTTPS request. If you wish to change this behaviour, provide alternate callables to ``key_fn_by_scheme``. :param key_class: The class to use when constructing the key. This should be a namedtuple with the ``scheme`` and ``host`` keys at a minimum. :type key_class: namedtuple :param request_context: A dictionary-like object that contain the context for a request. :type request_context: dict :return: A namedtuple that can be used as a connection pool key. :rtype: PoolKey
Here is the function:
def _default_key_normalizer(
key_class: Type[PoolKey], request_context: Dict[str, Any]
) -> PoolKey:
"""
Create a pool key out of a request context dictionary.
According to RFC 3986, both the scheme and host are case-insensitive.
Therefore, this function normalizes both before constructing the pool
key for an HTTPS request. If you wish to change this behaviour, provide
alternate callables to ``key_fn_by_scheme``.
:param key_class:
The class to use when constructing the key. This should be a namedtuple
with the ``scheme`` and ``host`` keys at a minimum.
:type key_class: namedtuple
:param request_context:
A dictionary-like object that contain the context for a request.
:type request_context: dict
:return: A namedtuple that can be used as a connection pool key.
:rtype: PoolKey
"""
# Since we mutate the dictionary, make a copy first
context = request_context.copy()
context["scheme"] = context["scheme"].lower()
context["host"] = context["host"].lower()
# These are both dictionaries and need to be transformed into frozensets
for key in ("headers", "_proxy_headers", "_socks_options"):
if key in context and context[key] is not None:
context[key] = frozenset(context[key].items())
# The socket_options key may be a list and needs to be transformed into a
# tuple.
socket_opts = context.get("socket_options")
if socket_opts is not None:
context["socket_options"] = tuple(socket_opts)
# Map the kwargs to the names in the namedtuple - this is necessary since
# namedtuples can't have fields starting with '_'.
for key in list(context.keys()):
context["key_" + key] = context.pop(key)
# Default to ``None`` for keys missing from the context
for field in key_class._fields:
if field not in context:
context[field] = None
# Default key_blocksize to _DEFAULT_BLOCKSIZE if missing from the context
if context.get("key_blocksize") is None:
context["key_blocksize"] = _DEFAULT_BLOCKSIZE
return key_class(**context) | Create a pool key out of a request context dictionary. According to RFC 3986, both the scheme and host are case-insensitive. Therefore, this function normalizes both before constructing the pool key for an HTTPS request. If you wish to change this behaviour, provide alternate callables to ``key_fn_by_scheme``. :param key_class: The class to use when constructing the key. This should be a namedtuple with the ``scheme`` and ``host`` keys at a minimum. :type key_class: namedtuple :param request_context: A dictionary-like object that contain the context for a request. :type request_context: dict :return: A namedtuple that can be used as a connection pool key. :rtype: PoolKey |
157,753 | import functools
import logging
import warnings
from types import TracebackType
from typing import (
TYPE_CHECKING,
Any,
Dict,
FrozenSet,
Mapping,
NamedTuple,
Optional,
Tuple,
Type,
TypeVar,
Union,
)
from urllib.parse import urljoin
from ._collections import RecentlyUsedContainer
from ._request_methods import RequestMethods
from .connection import ProxyConfig
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
from .exceptions import (
LocationValueError,
MaxRetryError,
ProxySchemeUnknown,
URLSchemeUnknown,
)
from .response import BaseHTTPResponse
from .util.connection import _TYPE_SOCKET_OPTIONS
from .util.proxy import connection_requires_http_tunnel
from .util.retry import Retry
from .util.timeout import Timeout
from .util.url import Url, parse_url
class ProxyManager(PoolManager):
"""
Behaves just like :class:`PoolManager`, but sends all requests through
the defined proxy, using the CONNECT method for HTTPS URLs.
:param proxy_url:
The URL of the proxy to be used.
:param proxy_headers:
A dictionary containing headers that will be sent to the proxy. In case
of HTTP they are being sent with each request, while in the
HTTPS/CONNECT case they are sent only once. Could be used for proxy
authentication.
:param proxy_ssl_context:
The proxy SSL context is used to establish the TLS connection to the
proxy when using HTTPS proxies.
:param use_forwarding_for_https:
(Defaults to False) If set to True will forward requests to the HTTPS
proxy to be made on behalf of the client instead of creating a TLS
tunnel via the CONNECT method. **Enabling this flag means that request
and response headers and content will be visible from the HTTPS proxy**
whereas tunneling keeps request and response headers and content
private. IP address, target hostname, SNI, and port are always visible
to an HTTPS proxy even when this flag is disabled.
Example:
.. code-block:: python
import urllib3
proxy = urllib3.ProxyManager("https://localhost:3128/")
resp1 = proxy.request("GET", "https://google.com/")
resp2 = proxy.request("GET", "https://httpbin.org/")
print(len(proxy.pools))
# 1
resp3 = proxy.request("GET", "https://httpbin.org/")
resp4 = proxy.request("GET", "https://twitter.com/")
print(len(proxy.pools))
# 3
"""
def __init__(
self,
proxy_url: str,
num_pools: int = 10,
headers: Optional[Mapping[str, str]] = None,
proxy_headers: Optional[Mapping[str, str]] = None,
proxy_ssl_context: Optional["ssl.SSLContext"] = None,
use_forwarding_for_https: bool = False,
**connection_pool_kw: Any,
) -> None:
if isinstance(proxy_url, HTTPConnectionPool):
str_proxy_url = f"{proxy_url.scheme}://{proxy_url.host}:{proxy_url.port}"
else:
str_proxy_url = proxy_url
proxy = parse_url(str_proxy_url)
if proxy.scheme not in ("http", "https"):
raise ProxySchemeUnknown(proxy.scheme)
if not proxy.port:
port = port_by_scheme.get(proxy.scheme, 80)
proxy = proxy._replace(port=port)
self.proxy = proxy
self.proxy_headers = proxy_headers or {}
self.proxy_ssl_context = proxy_ssl_context
self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
connection_pool_kw["_proxy"] = self.proxy
connection_pool_kw["_proxy_headers"] = self.proxy_headers
connection_pool_kw["_proxy_config"] = self.proxy_config
super().__init__(num_pools, headers, **connection_pool_kw)
def connection_from_host(
self,
host: Optional[str],
port: Optional[int] = None,
scheme: Optional[str] = "http",
pool_kwargs: Optional[Dict[str, Any]] = None,
) -> HTTPConnectionPool:
if scheme == "https":
return super().connection_from_host(
host, port, scheme, pool_kwargs=pool_kwargs
)
return super().connection_from_host(
self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs # type: ignore[union-attr]
)
def _set_proxy_headers(
self, url: str, headers: Optional[Mapping[str, str]] = None
) -> Mapping[str, str]:
"""
Sets headers needed by proxies: specifically, the Accept and Host
headers. Only sets headers not provided by the user.
"""
headers_ = {"Accept": "*/*"}
netloc = parse_url(url).netloc
if netloc:
headers_["Host"] = netloc
if headers:
headers_.update(headers)
return headers_
def urlopen( # type: ignore[override]
self, method: str, url: str, redirect: bool = True, **kw: Any
) -> BaseHTTPResponse:
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
u = parse_url(url)
if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
# For connections using HTTP CONNECT, httplib sets the necessary
# headers on the CONNECT to the proxy. If we're not using CONNECT,
# we'll definitely need to set 'Host' at the very least.
headers = kw.get("headers", self.headers)
kw["headers"] = self._set_proxy_headers(url, headers)
return super().urlopen(method, url, redirect=redirect, **kw)
def proxy_from_url(url: str, **kw: Any) -> ProxyManager:
return ProxyManager(proxy_url=url, **kw) | null |
157,754 | import datetime
import logging
import os
import re
import socket
import warnings
from copy import copy
from http.client import HTTPConnection as _HTTPConnection
from http.client import HTTPException as HTTPException
from socket import timeout as SocketTimeout
from typing import (
IO,
TYPE_CHECKING,
Any,
Callable,
Iterable,
Mapping,
NamedTuple,
Optional,
Tuple,
Union,
cast,
)
from .util.proxy import create_proxy_ssl_context
from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT, Timeout
from .util.util import to_bytes, to_str
from ._version import __version__
from .exceptions import (
ConnectTimeoutError,
NameResolutionError,
NewConnectionError,
ProxyError,
SystemTimeWarning,
)
from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection, ssl_
from .util.ssl_ import (
_TYPE_PEER_CERT_RET,
assert_fingerprint,
create_urllib3_context,
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
)
from .util.ssl_match_hostname import CertificateError, match_hostname
log = logging.getLogger(__name__)
_TYPE_PEER_CERT_RET = Union[_TYPE_PEER_CERT_RET_DICT, bytes, None]
class CertificateError(ValueError):
pass
def match_hostname(cert: _TYPE_PEER_CERT_RET, hostname: str) -> None:
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError(
"empty or no certificate, match_hostname needs a "
"SSL socket or SSL context with either "
"CERT_OPTIONAL or CERT_REQUIRED"
)
try:
# Divergence from upstream: ipaddress can't handle byte str
host_ip = ipaddress.ip_address(hostname.strip("[]"))
except ValueError:
# Not an IP address (common case)
host_ip = None
dnsnames = []
san = cert.get("subjectAltName", ()) # type: ignore[union-attr]
for key, value in san: # type: ignore[misc]
if key == "DNS":
if host_ip is None and _dnsname_match(value, hostname):
return
dnsnames.append(value)
elif key == "IP Address":
if host_ip is not None and _ipaddress_match(value, host_ip):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError(
"hostname %r "
"doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames)))
)
elif len(dnsnames) == 1:
raise CertificateError(f"hostname {hostname!r} doesn't match {dnsnames[0]!r}")
else:
raise CertificateError("no appropriate subjectAltName fields were found")
def _match_hostname(cert: _TYPE_PEER_CERT_RET, asserted_hostname: str) -> None:
try:
match_hostname(cert, asserted_hostname)
except CertificateError as e:
log.warning(
"Certificate did not match expected hostname: %s. Certificate: %s",
asserted_hostname,
cert,
)
# Add cert to exception and reraise so client code can inspect
# the cert when catching the exception, if they want to
e._peer_cert = cert # type: ignore[attr-defined]
raise | null |
157,755 | import datetime
import logging
import os
import re
import socket
import warnings
from copy import copy
from http.client import HTTPConnection as _HTTPConnection
from http.client import HTTPException as HTTPException
from socket import timeout as SocketTimeout
from typing import (
IO,
TYPE_CHECKING,
Any,
Callable,
Iterable,
Mapping,
NamedTuple,
Optional,
Tuple,
Union,
cast,
)
from .util.proxy import create_proxy_ssl_context
from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT, Timeout
from .util.util import to_bytes, to_str
from ._version import __version__
from .exceptions import (
ConnectTimeoutError,
NameResolutionError,
NewConnectionError,
ProxyError,
SystemTimeWarning,
)
from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection, ssl_
from .util.ssl_ import (
_TYPE_PEER_CERT_RET,
assert_fingerprint,
create_urllib3_context,
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
)
from .util.ssl_match_hostname import CertificateError, match_hostname
class ProxyError(HTTPError):
"""Raised when the connection to a proxy fails."""
# The original error is also available as __cause__.
original_error: Exception
def __init__(self, message: str, error: Exception) -> None:
super().__init__(message, error)
self.original_error = error
def _wrap_proxy_error(err: Exception) -> ProxyError:
# Look for the phrase 'wrong version number', if found
# then we should warn the user that we're very sure that
# this proxy is HTTP-only and they have a configuration issue.
error_normalized = " ".join(re.split("[^a-z]", str(err).lower()))
is_likely_http_proxy = "wrong version number" in error_normalized
http_proxy_warning = (
". Your proxy appears to only use HTTP and not HTTPS, "
"did you intend to set a proxy URL using HTTPS instead of HTTP?"
)
new_err = ProxyError(
f"Unable to connect to proxy"
f"{http_proxy_warning if is_likely_http_proxy else ''}",
err,
)
new_err.__cause__ = err
return new_err | null |
157,756 | import datetime
import logging
import os
import re
import socket
import warnings
from copy import copy
from http.client import HTTPConnection as _HTTPConnection
from http.client import HTTPException as HTTPException
from socket import timeout as SocketTimeout
from typing import (
IO,
TYPE_CHECKING,
Any,
Callable,
Iterable,
Mapping,
NamedTuple,
Optional,
Tuple,
Union,
cast,
)
from .util.proxy import create_proxy_ssl_context
from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT, Timeout
from .util.util import to_bytes, to_str
from ._version import __version__
from .exceptions import (
ConnectTimeoutError,
NameResolutionError,
NewConnectionError,
ProxyError,
SystemTimeWarning,
)
from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection, ssl_
from .util.ssl_ import (
_TYPE_PEER_CERT_RET,
assert_fingerprint,
create_urllib3_context,
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
)
from .util.ssl_match_hostname import CertificateError, match_hostname
__version__ = "2.0.0.dev0"
def _get_default_user_agent() -> str:
return f"python-urllib3/{__version__}" | null |
157,757 | from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def to_bytes(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> bytes:
if isinstance(x, bytes):
return x
elif not isinstance(x, str):
raise TypeError(f"not expecting type {type(x).__name__}")
if encoding or errors:
return x.encode(encoding or "utf-8", errors=errors or "strict")
return x.encode() | null |
157,758 | from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def reraise(
tp: Optional[Type[BaseException]],
value: BaseException,
tb: Optional[TracebackType] = None,
) -> NoReturn:
try:
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None # type: ignore[assignment]
tb = None | null |
157,759 | import re
from typing import Container, NamedTuple, Optional, overload
from ..exceptions import LocationParseError
from .util import to_str
_TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$")
_PATH_CHARS = _USERINFO_CHARS | {"@", "/"}
_QUERY_CHARS = _FRAGMENT_CHARS = _PATH_CHARS | {"?"}
def _encode_invalid_chars(
component: str, allowed_chars: Container[str]
) -> str: # Abstract
...
def _encode_invalid_chars(
component: None, allowed_chars: Container[str]
) -> None: # Abstract
...
def _encode_invalid_chars(
component: Optional[str], allowed_chars: Container[str]
) -> Optional[str]:
"""Percent-encodes a URI component without reapplying
onto an already percent-encoded component.
"""
if component is None:
return component
component = to_str(component)
# Normalize existing percent-encoded bytes.
# Try to see if the component we're encoding is already percent-encoded
# so we can skip all '%' characters but still encode all others.
component, percent_encodings = _PERCENT_RE.subn(
lambda match: match.group(0).upper(), component
)
uri_bytes = component.encode("utf-8", "surrogatepass")
is_percent_encoded = percent_encodings == uri_bytes.count(b"%")
encoded_component = bytearray()
for i in range(0, len(uri_bytes)):
# Will return a single character bytestring
byte = uri_bytes[i : i + 1]
byte_ord = ord(byte)
if (is_percent_encoded and byte == b"%") or (
byte_ord < 128 and byte.decode() in allowed_chars
):
encoded_component += byte
continue
encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper()))
return encoded_component.decode()
class LocationParseError(LocationValueError):
"""Raised when get_host or similar fails to parse the URL input."""
def __init__(self, location: str) -> None:
message = f"Failed to parse: {location}"
super().__init__(message)
self.location = location
The provided code snippet includes necessary dependencies for implementing the `_encode_target` function. Write a Python function `def _encode_target(target: str) -> str` to solve the following problem:
Percent-encodes a request target so that there are no invalid characters Pre-condition for this function is that 'target' must start with '/'. If that is the case then _TARGET_RE will always produce a match.
Here is the function:
def _encode_target(target: str) -> str:
"""Percent-encodes a request target so that there are no invalid characters
Pre-condition for this function is that 'target' must start with '/'.
If that is the case then _TARGET_RE will always produce a match.
"""
match = _TARGET_RE.match(target)
if not match: # Defensive:
raise LocationParseError(f"{target!r} is not a valid request URI")
path, query = match.groups()
encoded_target = _encode_invalid_chars(path, _PATH_CHARS)
if query is not None:
query = _encode_invalid_chars(query, _QUERY_CHARS)
encoded_target += "?" + query
return encoded_target | Percent-encodes a request target so that there are no invalid characters Pre-condition for this function is that 'target' must start with '/'. If that is the case then _TARGET_RE will always produce a match. |
157,760 | import http.client as httplib
from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
from ..exceptions import HeaderParsingError
The provided code snippet includes necessary dependencies for implementing the `is_fp_closed` function. Write a Python function `def is_fp_closed(obj: object) -> bool` to solve the following problem:
Checks whether a given file-like object is closed. :param obj: The file-like object to check.
Here is the function:
def is_fp_closed(obj: object) -> bool:
"""
Checks whether a given file-like object is closed.
:param obj:
The file-like object to check.
"""
try:
# Check `isclosed()` first, in case Python3 doesn't set `closed`.
# GH Issue #928
return obj.isclosed() # type: ignore[no-any-return, attr-defined]
except AttributeError:
pass
try:
# Check via the official file-like-object way.
return obj.closed # type: ignore[no-any-return, attr-defined]
except AttributeError:
pass
try:
# Check if the object is a container for another file-like object that
# gets released on exhaustion (e.g. HTTPResponse).
return obj.fp is None # type: ignore[attr-defined]
except AttributeError:
pass
raise ValueError("Unable to determine whether fp is closed.") | Checks whether a given file-like object is closed. :param obj: The file-like object to check. |
157,761 | import http.client as httplib
from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
from ..exceptions import HeaderParsingError
class HeaderParsingError(HTTPError):
"""Raised by assert_header_parsing, but we convert it to a log.warning statement."""
def __init__(
self, defects: List[MessageDefect], unparsed_data: Optional[Union[bytes, str]]
) -> None:
message = f"{defects or 'Unknown'}, unparsed data: {unparsed_data!r}"
super().__init__(message)
The provided code snippet includes necessary dependencies for implementing the `assert_header_parsing` function. Write a Python function `def assert_header_parsing(headers: httplib.HTTPMessage) -> None` to solve the following problem:
Asserts whether all headers have been successfully parsed. Extracts encountered errors from the result of parsing headers. Only works on Python 3. :param http.client.HTTPMessage headers: Headers to verify. :raises urllib3.exceptions.HeaderParsingError: If parsing errors are found.
Here is the function:
def assert_header_parsing(headers: httplib.HTTPMessage) -> None:
"""
Asserts whether all headers have been successfully parsed.
Extracts encountered errors from the result of parsing headers.
Only works on Python 3.
:param http.client.HTTPMessage headers: Headers to verify.
:raises urllib3.exceptions.HeaderParsingError:
If parsing errors are found.
"""
# This will fail silently if we pass in the wrong kind of parameter.
# To make debugging easier add an explicit check.
if not isinstance(headers, httplib.HTTPMessage):
raise TypeError(f"expected httplib.Message, got {type(headers)}.")
unparsed_data = None
# get_payload is actually email.message.Message.get_payload;
# we're only interested in the result if it's not a multipart message
if not headers.is_multipart():
payload = headers.get_payload()
if isinstance(payload, (bytes, str)):
unparsed_data = payload
# httplib is assuming a response body is available
# when parsing headers even when httplib only sends
# header data to parse_headers() This results in
# defects on multipart responses in particular.
# See: https://github.com/urllib3/urllib3/issues/800
# So we ignore the following defects:
# - StartBoundaryNotFoundDefect:
# The claimed start boundary was never found.
# - MultipartInvariantViolationDefect:
# A message claimed to be a multipart but no subparts were found.
defects = [
defect
for defect in headers.defects
if not isinstance(
defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect)
)
]
if defects or unparsed_data:
raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) | Asserts whether all headers have been successfully parsed. Extracts encountered errors from the result of parsing headers. Only works on Python 3. :param http.client.HTTPMessage headers: Headers to verify. :raises urllib3.exceptions.HeaderParsingError: If parsing errors are found. |
157,762 | import http.client as httplib
from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
from ..exceptions import HeaderParsingError
The provided code snippet includes necessary dependencies for implementing the `is_response_to_head` function. Write a Python function `def is_response_to_head(response: httplib.HTTPResponse) -> bool` to solve the following problem:
Checks whether the request of a response has been a HEAD-request. :param http.client.HTTPResponse response: Response to check if the originating request used 'HEAD' as a method.
Here is the function:
def is_response_to_head(response: httplib.HTTPResponse) -> bool:
"""
Checks whether the request of a response has been a HEAD-request.
:param http.client.HTTPResponse response:
Response to check if the originating request
used 'HEAD' as a method.
"""
# FIXME: Can we do this somehow without accessing private httplib _method?
method_str = response._method # type: str # type: ignore[attr-defined]
return method_str.upper() == "HEAD" | Checks whether the request of a response has been a HEAD-request. :param http.client.HTTPResponse response: Response to check if the originating request used 'HEAD' as a method. |
157,763 | from base64 import b64encode
from typing import IO, Any, AnyStr, Dict, List, Optional, Union
from ..exceptions import UnrewindableBodyError
ACCEPT_ENCODING = "gzip,deflate"
The provided code snippet includes necessary dependencies for implementing the `make_headers` function. Write a Python function `def make_headers( keep_alive: Optional[bool] = None, accept_encoding: Optional[Union[bool, List[str], str]] = None, user_agent: Optional[str] = None, basic_auth: Optional[str] = None, proxy_basic_auth: Optional[str] = None, disable_cache: Optional[bool] = None, ) -> Dict[str, str]` to solve the following problem:
Shortcuts for generating request headers. :param keep_alive: If ``True``, adds 'connection: keep-alive' header. :param accept_encoding: Can be a boolean, list, or string. ``True`` translates to 'gzip,deflate'. If either the ``brotli`` or ``brotlicffi`` package is installed 'gzip,deflate,br' is used instead. List will get joined by comma. String will be used as provided. :param user_agent: String representing the user-agent you want, such as "python-urllib3/0.6" :param basic_auth: Colon-separated username:password string for 'authorization: basic ...' auth header. :param proxy_basic_auth: Colon-separated username:password string for 'proxy-authorization: basic ...' auth header. :param disable_cache: If ``True``, adds 'cache-control: no-cache' header. Example: .. code-block:: python import urllib3 print(urllib3.util.make_headers(keep_alive=True, user_agent="Batman/1.0")) # {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} print(urllib3.util.make_headers(accept_encoding=True)) # {'accept-encoding': 'gzip,deflate'}
Here is the function:
def make_headers(
keep_alive: Optional[bool] = None,
accept_encoding: Optional[Union[bool, List[str], str]] = None,
user_agent: Optional[str] = None,
basic_auth: Optional[str] = None,
proxy_basic_auth: Optional[str] = None,
disable_cache: Optional[bool] = None,
) -> Dict[str, str]:
"""
Shortcuts for generating request headers.
:param keep_alive:
If ``True``, adds 'connection: keep-alive' header.
:param accept_encoding:
Can be a boolean, list, or string.
``True`` translates to 'gzip,deflate'. If either the ``brotli`` or
``brotlicffi`` package is installed 'gzip,deflate,br' is used instead.
List will get joined by comma.
String will be used as provided.
:param user_agent:
String representing the user-agent you want, such as
"python-urllib3/0.6"
:param basic_auth:
Colon-separated username:password string for 'authorization: basic ...'
auth header.
:param proxy_basic_auth:
Colon-separated username:password string for 'proxy-authorization: basic ...'
auth header.
:param disable_cache:
If ``True``, adds 'cache-control: no-cache' header.
Example:
.. code-block:: python
import urllib3
print(urllib3.util.make_headers(keep_alive=True, user_agent="Batman/1.0"))
# {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
print(urllib3.util.make_headers(accept_encoding=True))
# {'accept-encoding': 'gzip,deflate'}
"""
headers: Dict[str, str] = {}
if accept_encoding:
if isinstance(accept_encoding, str):
pass
elif isinstance(accept_encoding, list):
accept_encoding = ",".join(accept_encoding)
else:
accept_encoding = ACCEPT_ENCODING
headers["accept-encoding"] = accept_encoding
if user_agent:
headers["user-agent"] = user_agent
if keep_alive:
headers["connection"] = "keep-alive"
if basic_auth:
headers[
"authorization"
] = f"Basic {b64encode(basic_auth.encode('latin-1')).decode()}"
if proxy_basic_auth:
headers[
"proxy-authorization"
] = f"Basic {b64encode(proxy_basic_auth.encode('latin-1')).decode()}"
if disable_cache:
headers["cache-control"] = "no-cache"
return headers | Shortcuts for generating request headers. :param keep_alive: If ``True``, adds 'connection: keep-alive' header. :param accept_encoding: Can be a boolean, list, or string. ``True`` translates to 'gzip,deflate'. If either the ``brotli`` or ``brotlicffi`` package is installed 'gzip,deflate,br' is used instead. List will get joined by comma. String will be used as provided. :param user_agent: String representing the user-agent you want, such as "python-urllib3/0.6" :param basic_auth: Colon-separated username:password string for 'authorization: basic ...' auth header. :param proxy_basic_auth: Colon-separated username:password string for 'proxy-authorization: basic ...' auth header. :param disable_cache: If ``True``, adds 'cache-control: no-cache' header. Example: .. code-block:: python import urllib3 print(urllib3.util.make_headers(keep_alive=True, user_agent="Batman/1.0")) # {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} print(urllib3.util.make_headers(accept_encoding=True)) # {'accept-encoding': 'gzip,deflate'} |
157,764 | from base64 import b64encode
from typing import IO, Any, AnyStr, Dict, List, Optional, Union
from ..exceptions import UnrewindableBodyError
_FAILEDTELL = object()
def rewind_body(body: IO[AnyStr], body_pos: Optional[Union[int, object]]) -> None:
"""
Attempt to rewind body to a certain position.
Primarily used for request redirects and retries.
:param body:
File-like object that supports seek.
:param int pos:
Position to seek to in file.
"""
body_seek = getattr(body, "seek", None)
if body_seek is not None and isinstance(body_pos, int):
try:
body_seek(body_pos)
except OSError as e:
raise UnrewindableBodyError(
"An error occurred when rewinding request body for redirect/retry."
) from e
elif body_pos is _FAILEDTELL:
raise UnrewindableBodyError(
"Unable to record file position for rewinding "
"request body during a redirect/retry."
)
else:
raise ValueError(
f"body_pos must be of type integer, instead it was {type(body_pos)}."
)
The provided code snippet includes necessary dependencies for implementing the `set_file_position` function. Write a Python function `def set_file_position( body: Any, pos: Optional[Union[int, object]] ) -> Optional[Union[int, object]]` to solve the following problem:
If a position is provided, move file to that point. Otherwise, we'll attempt to record a position for future use.
Here is the function:
def set_file_position(
body: Any, pos: Optional[Union[int, object]]
) -> Optional[Union[int, object]]:
"""
If a position is provided, move file to that point.
Otherwise, we'll attempt to record a position for future use.
"""
if pos is not None:
rewind_body(body, pos)
elif getattr(body, "tell", None) is not None:
try:
pos = body.tell()
except OSError:
# This differentiates from None, allowing us to catch
# a failed `tell()` later when trying to rewind the body.
pos = _FAILEDTELL
return pos | If a position is provided, move file to that point. Otherwise, we'll attempt to record a position for future use. |
157,765 | import select
import socket
from functools import partial
from typing import List, Optional, Tuple
def wait_for_socket(
sock: socket.socket,
read: bool = False,
write: bool = False,
timeout: Optional[float] = None,
) -> bool:
# We delay choosing which implementation to use until the first time we're
# called. We could do it at import time, but then we might make the wrong
# decision if someone goes wild with monkeypatching select.poll after
# we're imported.
global wait_for_socket
if _have_working_poll():
wait_for_socket = poll_wait_for_socket
elif hasattr(select, "select"):
wait_for_socket = select_wait_for_socket
return wait_for_socket(sock, read, write, timeout)
The provided code snippet includes necessary dependencies for implementing the `wait_for_write` function. Write a Python function `def wait_for_write(sock: socket.socket, timeout: Optional[float] = None) -> bool` to solve the following problem:
Waits for writing to be available on a given socket. Returns True if the socket is readable, or False if the timeout expired.
Here is the function:
def wait_for_write(sock: socket.socket, timeout: Optional[float] = None) -> bool:
"""Waits for writing to be available on a given socket.
Returns True if the socket is readable, or False if the timeout expired.
"""
return wait_for_socket(sock, write=True, timeout=timeout) | Waits for writing to be available on a given socket. Returns True if the socket is readable, or False if the timeout expired. |
157,766 | import hmac
import os
import socket
import sys
import warnings
from binascii import unhexlify
from hashlib import md5, sha1, sha256
from typing import TYPE_CHECKING, Dict, Mapping, Optional, Tuple, Union, cast, overload
from ..exceptions import ProxySchemeUnsupported, SNIMissingWarning, SSLError
from .url import _BRACELESS_IPV6_ADDRZ_RE, _IPV4_RE
The provided code snippet includes necessary dependencies for implementing the `_is_ge_openssl_v1_1_1` function. Write a Python function `def _is_ge_openssl_v1_1_1( openssl_version_text: str, openssl_version_number: int ) -> bool` to solve the following problem:
Returns True for OpenSSL 1.1.1+ (>=0x10101000) LibreSSL reports a version number of 0x20000000 for OpenSSL version number so we need to filter out LibreSSL.
Here is the function:
def _is_ge_openssl_v1_1_1(
openssl_version_text: str, openssl_version_number: int
) -> bool:
"""Returns True for OpenSSL 1.1.1+ (>=0x10101000)
LibreSSL reports a version number of 0x20000000 for
OpenSSL version number so we need to filter out LibreSSL.
"""
return (
not openssl_version_text.startswith("LibreSSL")
and openssl_version_number >= 0x10101000
) | Returns True for OpenSSL 1.1.1+ (>=0x10101000) LibreSSL reports a version number of 0x20000000 for OpenSSL version number so we need to filter out LibreSSL. |
157,767 | import hmac
import os
import socket
import sys
import warnings
from binascii import unhexlify
from hashlib import md5, sha1, sha256
from typing import TYPE_CHECKING, Dict, Mapping, Optional, Tuple, Union, cast, overload
from ..exceptions import ProxySchemeUnsupported, SNIMissingWarning, SSLError
from .url import _BRACELESS_IPV6_ADDRZ_RE, _IPV4_RE
_TYPE_VERSION_INFO = Tuple[int, int, int, str, int]
def _is_openssl_issue_14579_fixed(
openssl_version_text: str, openssl_version_number: int
) -> bool:
"""
Returns True for OpenSSL 1.1.1l+ (>=0x101010cf) where this issue was fixed.
Before the fix, the SSL_new() API was not copying hostflags like
X509_CHECK_FLAG_NEVER_CHECK_SUBJECT, which tripped up CPython.
https://github.com/openssl/openssl/issues/14579
LibreSSL reports a version number of 0x20000000 for
OpenSSL version number so we need to filter out LibreSSL.
"""
return (
not openssl_version_text.startswith("LibreSSL")
and openssl_version_number >= 0x101010CF
)
def _is_bpo_43522_fixed(
implementation_name: str, version_info: _TYPE_VERSION_INFO
) -> bool:
"""Return True if PyPy or CPython 3.8.9+, 3.9.3+ or 3.10+ where setting
SSLContext.hostname_checks_common_name to False works.
https://github.com/urllib3/urllib3/issues/2192#issuecomment-821832963
https://foss.heptapod.net/pypy/pypy/-/issues/3539#
"""
if implementation_name != "cpython":
return True
major_minor = version_info[:2]
micro = version_info[2]
return (
(major_minor == (3, 8) and micro >= 9)
or (major_minor == (3, 9) and micro >= 3)
or major_minor >= (3, 10)
)
def _is_has_never_check_common_name_reliable(
openssl_version: str,
openssl_version_number: int,
implementation_name: str,
version_info: _TYPE_VERSION_INFO,
) -> bool:
return _is_openssl_issue_14579_fixed(
openssl_version, openssl_version_number
) or _is_bpo_43522_fixed(implementation_name, version_info) | null |
157,768 | import hmac
import os
import socket
import sys
import warnings
from binascii import unhexlify
from hashlib import md5, sha1, sha256
from typing import TYPE_CHECKING, Dict, Mapping, Optional, Tuple, Union, cast, overload
from ..exceptions import ProxySchemeUnsupported, SNIMissingWarning, SSLError
from .url import _BRACELESS_IPV6_ADDRZ_RE, _IPV4_RE
HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
class SSLError(HTTPError):
"""Raised when SSL certificate fails in an HTTPS connection."""
pass
The provided code snippet includes necessary dependencies for implementing the `assert_fingerprint` function. Write a Python function `def assert_fingerprint(cert: Optional[bytes], fingerprint: str) -> None` to solve the following problem:
Checks if given fingerprint matches the supplied certificate. :param cert: Certificate as bytes object. :param fingerprint: Fingerprint as string of hexdigits, can be interspersed by colons.
Here is the function:
def assert_fingerprint(cert: Optional[bytes], fingerprint: str) -> None:
"""
Checks if given fingerprint matches the supplied certificate.
:param cert:
Certificate as bytes object.
:param fingerprint:
Fingerprint as string of hexdigits, can be interspersed by colons.
"""
if cert is None:
raise SSLError("No certificate for the peer.")
fingerprint = fingerprint.replace(":", "").lower()
digest_length = len(fingerprint)
hashfunc = HASHFUNC_MAP.get(digest_length)
if not hashfunc:
raise SSLError(f"Fingerprint of invalid length: {fingerprint}")
# We need encode() here for py32; works on py2 and p33.
fingerprint_bytes = unhexlify(fingerprint.encode())
cert_digest = hashfunc(cert).digest()
if not hmac.compare_digest(cert_digest, fingerprint_bytes):
raise SSLError(
f'Fingerprints did not match. Expected "{fingerprint}", got "{cert_digest.hex()}"'
) | Checks if given fingerprint matches the supplied certificate. :param cert: Certificate as bytes object. :param fingerprint: Fingerprint as string of hexdigits, can be interspersed by colons. |
157,769 | import hmac
import os
import socket
import sys
import warnings
from binascii import unhexlify
from hashlib import md5, sha1, sha256
from typing import TYPE_CHECKING, Dict, Mapping, Optional, Tuple, Union, cast, overload
from ..exceptions import ProxySchemeUnsupported, SNIMissingWarning, SSLError
from .url import _BRACELESS_IPV6_ADDRZ_RE, _IPV4_RE
def ssl_wrap_socket(
sock: socket.socket,
keyfile: Optional[str] = ...,
certfile: Optional[str] = ...,
cert_reqs: Optional[int] = ...,
ca_certs: Optional[str] = ...,
server_hostname: Optional[str] = ...,
ssl_version: Optional[int] = ...,
ciphers: Optional[str] = ...,
ssl_context: Optional["ssl.SSLContext"] = ...,
ca_cert_dir: Optional[str] = ...,
key_password: Optional[str] = ...,
ca_cert_data: Union[None, str, bytes] = ...,
tls_in_tls: "Literal[False]" = ...,
) -> "ssl.SSLSocket":
... | null |
157,770 | import hmac
import os
import socket
import sys
import warnings
from binascii import unhexlify
from hashlib import md5, sha1, sha256
from typing import TYPE_CHECKING, Dict, Mapping, Optional, Tuple, Union, cast, overload
from ..exceptions import ProxySchemeUnsupported, SNIMissingWarning, SSLError
from .url import _BRACELESS_IPV6_ADDRZ_RE, _IPV4_RE
def ssl_wrap_socket(
sock: socket.socket,
keyfile: Optional[str] = ...,
certfile: Optional[str] = ...,
cert_reqs: Optional[int] = ...,
ca_certs: Optional[str] = ...,
server_hostname: Optional[str] = ...,
ssl_version: Optional[int] = ...,
ciphers: Optional[str] = ...,
ssl_context: Optional["ssl.SSLContext"] = ...,
ca_cert_dir: Optional[str] = ...,
key_password: Optional[str] = ...,
ca_cert_data: Union[None, str, bytes] = ...,
tls_in_tls: bool = ...,
) -> Union["ssl.SSLSocket", "SSLTransportType"]:
... | null |
157,771 | import hmac
import os
import socket
import sys
import warnings
from binascii import unhexlify
from hashlib import md5, sha1, sha256
from typing import TYPE_CHECKING, Dict, Mapping, Optional, Tuple, Union, cast, overload
from ..exceptions import ProxySchemeUnsupported, SNIMissingWarning, SSLError
from .url import _BRACELESS_IPV6_ADDRZ_RE, _IPV4_RE
HAS_SNI = False
ALPN_PROTOCOLS = ["http/1.1"]
def create_urllib3_context(
ssl_version: Optional[int] = None,
cert_reqs: Optional[int] = None,
options: Optional[int] = None,
ciphers: Optional[str] = None,
ssl_minimum_version: Optional[int] = None,
ssl_maximum_version: Optional[int] = None,
) -> "ssl.SSLContext":
"""All arguments have the same meaning as ``ssl_wrap_socket``.
By default, this function does a lot of the same work that
``ssl.create_default_context`` does on Python 3.4+. It:
- Disables SSLv2, SSLv3, and compression
- Sets a restricted set of server ciphers
If you wish to enable SSLv3, you can do::
from urllib3.util import ssl_
context = ssl_.create_urllib3_context()
context.options &= ~ssl_.OP_NO_SSLv3
You can do the same to enable compression (substituting ``COMPRESSION``
for ``SSLv3`` in the last line above).
:param ssl_version:
The desired protocol version to use. This will default to
PROTOCOL_SSLv23 which will negotiate the highest protocol that both
the server and your installation of OpenSSL support.
This parameter is deprecated instead use 'ssl_minimum_version'.
:param ssl_minimum_version:
The minimum version of TLS to be used. Use the 'ssl.TLSVersion' enum for specifying the value.
:param ssl_maximum_version:
The maximum version of TLS to be used. Use the 'ssl.TLSVersion' enum for specifying the value.
Not recommended to set to anything other than 'ssl.TLSVersion.MAXIMUM_SUPPORTED' which is the
default value.
:param cert_reqs:
Whether to require the certificate verification. This defaults to
``ssl.CERT_REQUIRED``.
:param options:
Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
:param ciphers:
Which cipher suites to allow the server to select. Defaults to either system configured
ciphers if OpenSSL 1.1.1+, otherwise uses a secure default set of ciphers.
:returns:
Constructed SSLContext object with specified options
:rtype: SSLContext
"""
if SSLContext is None:
raise TypeError("Can't create an SSLContext object without an ssl module")
# This means 'ssl_version' was specified as an exact value.
if ssl_version not in (None, PROTOCOL_TLS, PROTOCOL_TLS_CLIENT):
# Disallow setting 'ssl_version' and 'ssl_minimum|maximum_version'
# to avoid conflicts.
if ssl_minimum_version is not None or ssl_maximum_version is not None:
raise ValueError(
"Can't specify both 'ssl_version' and either "
"'ssl_minimum_version' or 'ssl_maximum_version'"
)
# 'ssl_version' is deprecated and will be removed in the future.
else:
# Use 'ssl_minimum_version' and 'ssl_maximum_version' instead.
ssl_minimum_version = _SSL_VERSION_TO_TLS_VERSION.get(
ssl_version, TLSVersion.MINIMUM_SUPPORTED
)
ssl_maximum_version = _SSL_VERSION_TO_TLS_VERSION.get(
ssl_version, TLSVersion.MAXIMUM_SUPPORTED
)
# This warning message is pushing users to use 'ssl_minimum_version'
# instead of both min/max. Best practice is to only set the minimum version and
# keep the maximum version to be it's default value: 'TLSVersion.MAXIMUM_SUPPORTED'
warnings.warn(
"'ssl_version' option is deprecated and will be "
"removed in a future release of urllib3 2.x. Instead "
"use 'ssl_minimum_version'",
category=DeprecationWarning,
stacklevel=2,
)
# PROTOCOL_TLS is deprecated in Python 3.10 so we always use PROTOCOL_TLS_CLIENT
context = SSLContext(PROTOCOL_TLS_CLIENT)
if ssl_minimum_version is not None:
context.minimum_version = ssl_minimum_version
else: # Python <3.10 defaults to 'MINIMUM_SUPPORTED' so explicitly set TLSv1.2 here
context.minimum_version = TLSVersion.TLSv1_2
if ssl_maximum_version is not None:
context.maximum_version = ssl_maximum_version
# Unless we're given ciphers defer to either system ciphers in
# the case of OpenSSL 1.1.1+ or use our own secure default ciphers.
if ciphers is not None or not USE_DEFAULT_SSLCONTEXT_CIPHERS:
context.set_ciphers(ciphers or DEFAULT_CIPHERS)
# Setting the default here, as we may have no ssl module on import
cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
if options is None:
options = 0
# SSLv2 is easily broken and is considered harmful and dangerous
options |= OP_NO_SSLv2
# SSLv3 has several problems and is now dangerous
options |= OP_NO_SSLv3
# Disable compression to prevent CRIME attacks for OpenSSL 1.0+
# (issue #309)
options |= OP_NO_COMPRESSION
# TLSv1.2 only. Unless set explicitly, do not request tickets.
# This may save some bandwidth on wire, and although the ticket is encrypted,
# there is a risk associated with it being on wire,
# if the server is not rotating its ticketing keys properly.
options |= OP_NO_TICKET
context.options |= options
# Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is
# necessary for conditional client cert authentication with TLS 1.3.
# The attribute is None for OpenSSL <= 1.1.0 or does not exist in older
# versions of Python. We only enable on Python 3.7.4+ or if certificate
# verification is enabled to work around Python issue #37428
# See: https://bugs.python.org/issue37428
if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr(
context, "post_handshake_auth", None
) is not None:
context.post_handshake_auth = True
# The order of the below lines setting verify_mode and check_hostname
# matter due to safe-guards SSLContext has to prevent an SSLContext with
# check_hostname=True, verify_mode=NONE/OPTIONAL.
if cert_reqs == ssl.CERT_REQUIRED:
context.verify_mode = cert_reqs
context.check_hostname = True
else:
context.check_hostname = False
context.verify_mode = cert_reqs
if HAS_NEVER_CHECK_COMMON_NAME:
context.hostname_checks_common_name = False
# Enable logging of TLS session keys via defacto standard environment variable
# 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
if hasattr(context, "keylog_filename"):
sslkeylogfile = os.environ.get("SSLKEYLOGFILE")
if sslkeylogfile:
context.keylog_filename = sslkeylogfile
return context
def is_ipaddress(hostname: Union[str, bytes]) -> bool:
"""Detects whether the hostname given is an IPv4 or IPv6 address.
Also detects IPv6 addresses with Zone IDs.
:param str hostname: Hostname to examine.
:return: True if the hostname is an IP address, False otherwise.
"""
if isinstance(hostname, bytes):
# IDN A-label bytes are ASCII compatible.
hostname = hostname.decode("ascii")
return bool(_IPV4_RE.match(hostname) or _BRACELESS_IPV6_ADDRZ_RE.match(hostname))
def _is_key_file_encrypted(key_file: str) -> bool:
"""Detects if a key file is encrypted or not."""
with open(key_file) as f:
for line in f:
# Look for Proc-Type: 4,ENCRYPTED
if "ENCRYPTED" in line:
return True
return False
def _ssl_wrap_socket_impl(
sock: socket.socket,
ssl_context: "ssl.SSLContext",
tls_in_tls: bool,
server_hostname: Optional[str] = None,
) -> Union["ssl.SSLSocket", "SSLTransportType"]:
if tls_in_tls:
if not SSLTransport:
# Import error, ssl is not available.
raise ProxySchemeUnsupported(
"TLS in TLS requires support for the 'ssl' module"
)
SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
return SSLTransport(sock, ssl_context, server_hostname)
return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
class SSLError(HTTPError):
"""Raised when SSL certificate fails in an HTTPS connection."""
pass
class SNIMissingWarning(HTTPWarning):
"""Warned when making a HTTPS request without SNI available."""
pass
The provided code snippet includes necessary dependencies for implementing the `ssl_wrap_socket` function. Write a Python function `def ssl_wrap_socket( sock: socket.socket, keyfile: Optional[str] = None, certfile: Optional[str] = None, cert_reqs: Optional[int] = None, ca_certs: Optional[str] = None, server_hostname: Optional[str] = None, ssl_version: Optional[int] = None, ciphers: Optional[str] = None, ssl_context: Optional["ssl.SSLContext"] = None, ca_cert_dir: Optional[str] = None, key_password: Optional[str] = None, ca_cert_data: Union[None, str, bytes] = None, tls_in_tls: bool = False, ) -> Union["ssl.SSLSocket", "SSLTransportType"]` to solve the following problem:
All arguments except for server_hostname, ssl_context, and ca_cert_dir have the same meaning as they do when using :func:`ssl.wrap_socket`. :param server_hostname: When SNI is supported, the expected hostname of the certificate :param ssl_context: A pre-made :class:`SSLContext` object. If none is provided, one will be created using :func:`create_urllib3_context`. :param ciphers: A string of ciphers we wish the client to support. :param ca_cert_dir: A directory containing CA certificates in multiple separate files, as supported by OpenSSL's -CApath flag or the capath argument to SSLContext.load_verify_locations(). :param key_password: Optional password if the keyfile is encrypted. :param ca_cert_data: Optional string containing CA certificates in PEM format suitable for passing as the cadata parameter to SSLContext.load_verify_locations() :param tls_in_tls: Use SSLTransport to wrap the existing socket.
Here is the function:
def ssl_wrap_socket(
sock: socket.socket,
keyfile: Optional[str] = None,
certfile: Optional[str] = None,
cert_reqs: Optional[int] = None,
ca_certs: Optional[str] = None,
server_hostname: Optional[str] = None,
ssl_version: Optional[int] = None,
ciphers: Optional[str] = None,
ssl_context: Optional["ssl.SSLContext"] = None,
ca_cert_dir: Optional[str] = None,
key_password: Optional[str] = None,
ca_cert_data: Union[None, str, bytes] = None,
tls_in_tls: bool = False,
) -> Union["ssl.SSLSocket", "SSLTransportType"]:
"""
All arguments except for server_hostname, ssl_context, and ca_cert_dir have
the same meaning as they do when using :func:`ssl.wrap_socket`.
:param server_hostname:
When SNI is supported, the expected hostname of the certificate
:param ssl_context:
A pre-made :class:`SSLContext` object. If none is provided, one will
be created using :func:`create_urllib3_context`.
:param ciphers:
A string of ciphers we wish the client to support.
:param ca_cert_dir:
A directory containing CA certificates in multiple separate files, as
supported by OpenSSL's -CApath flag or the capath argument to
SSLContext.load_verify_locations().
:param key_password:
Optional password if the keyfile is encrypted.
:param ca_cert_data:
Optional string containing CA certificates in PEM format suitable for
passing as the cadata parameter to SSLContext.load_verify_locations()
:param tls_in_tls:
Use SSLTransport to wrap the existing socket.
"""
context = ssl_context
if context is None:
# Note: This branch of code and all the variables in it are only used in tests.
# We should consider deprecating and removing this code.
context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
if ca_certs or ca_cert_dir or ca_cert_data:
try:
context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
except OSError as e:
raise SSLError(e) from e
elif ssl_context is None and hasattr(context, "load_default_certs"):
# try to load OS default certs; works well on Windows.
context.load_default_certs()
# Attempt to detect if we get the goofy behavior of the
# keyfile being encrypted and OpenSSL asking for the
# passphrase via the terminal and instead error out.
if keyfile and key_password is None and _is_key_file_encrypted(keyfile):
raise SSLError("Client private key is encrypted, password is required")
if certfile:
if key_password is None:
context.load_cert_chain(certfile, keyfile)
else:
context.load_cert_chain(certfile, keyfile, key_password)
try:
if hasattr(context, "set_alpn_protocols"):
context.set_alpn_protocols(ALPN_PROTOCOLS)
except NotImplementedError: # Defensive: in CI, we always have set_alpn_protocols
pass
if not HAS_SNI and server_hostname and not is_ipaddress(server_hostname):
warnings.warn(
"An HTTPS request has been made, but the SNI (Server Name "
"Indication) extension to TLS is not available on this platform. "
"This may cause the server to present an incorrect TLS "
"certificate, which can cause validation failures. You can upgrade to "
"a newer version of Python to solve this. For more information, see "
"https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
"#tls-warnings",
SNIMissingWarning,
)
ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls, server_hostname)
return ssl_sock | All arguments except for server_hostname, ssl_context, and ca_cert_dir have the same meaning as they do when using :func:`ssl.wrap_socket`. :param server_hostname: When SNI is supported, the expected hostname of the certificate :param ssl_context: A pre-made :class:`SSLContext` object. If none is provided, one will be created using :func:`create_urllib3_context`. :param ciphers: A string of ciphers we wish the client to support. :param ca_cert_dir: A directory containing CA certificates in multiple separate files, as supported by OpenSSL's -CApath flag or the capath argument to SSLContext.load_verify_locations(). :param key_password: Optional password if the keyfile is encrypted. :param ca_cert_data: Optional string containing CA certificates in PEM format suitable for passing as the cadata parameter to SSLContext.load_verify_locations() :param tls_in_tls: Use SSLTransport to wrap the existing socket. |
157,772 | from typing import TYPE_CHECKING, Optional, Union
from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version
from .url import Url
class Url(
NamedTuple(
"Url",
[
("scheme", Optional[str]),
("auth", Optional[str]),
("host", Optional[str]),
("port", Optional[int]),
("path", Optional[str]),
("query", Optional[str]),
("fragment", Optional[str]),
],
)
):
"""
Data structure for representing an HTTP URL. Used as a return value for
:func:`parse_url`. Both the scheme and host are normalized as they are
both case-insensitive according to RFC 3986.
"""
def __new__( # type: ignore[no-untyped-def]
cls,
scheme: Optional[str] = None,
auth: Optional[str] = None,
host: Optional[str] = None,
port: Optional[int] = None,
path: Optional[str] = None,
query: Optional[str] = None,
fragment: Optional[str] = None,
):
if path and not path.startswith("/"):
path = "/" + path
if scheme is not None:
scheme = scheme.lower()
return super().__new__(cls, scheme, auth, host, port, path, query, fragment)
def hostname(self) -> Optional[str]:
"""For backwards-compatibility with urlparse. We're nice like that."""
return self.host
def request_uri(self) -> str:
"""Absolute path including the query string."""
uri = self.path or "/"
if self.query is not None:
uri += "?" + self.query
return uri
def netloc(self) -> Optional[str]:
"""Network location including host and port"""
if self.host is None:
return None
if self.port:
return f"{self.host}:{self.port}"
return self.host
def url(self) -> str:
"""
Convert self into a url
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port will have : removed).
Example:
.. code-block:: python
import urllib3
U = urllib3.util.parse_url("https://google.com/mail/")
print(U.url)
# "https://google.com/mail/"
print( urllib3.util.Url("https", "username:password",
"host.com", 80, "/path", "query", "fragment"
).url
)
# "https://username:password@host.com:80/path?query#fragment"
"""
scheme, auth, host, port, path, query, fragment = self
url = ""
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url += scheme + "://"
if auth is not None:
url += auth + "@"
if host is not None:
url += host
if port is not None:
url += ":" + str(port)
if path is not None:
url += path
if query is not None:
url += "?" + query
if fragment is not None:
url += "#" + fragment
return url
def __str__(self) -> str:
return self.url
The provided code snippet includes necessary dependencies for implementing the `connection_requires_http_tunnel` function. Write a Python function `def connection_requires_http_tunnel( proxy_url: Optional[Url] = None, proxy_config: "Optional[ProxyConfig]" = None, destination_scheme: Optional[str] = None, ) -> bool` to solve the following problem:
Returns True if the connection requires an HTTP CONNECT through the proxy. :param URL proxy_url: URL of the proxy. :param ProxyConfig proxy_config: Proxy configuration from poolmanager.py :param str destination_scheme: The scheme of the destination. (i.e https, http, etc)
Here is the function:
def connection_requires_http_tunnel(
proxy_url: Optional[Url] = None,
proxy_config: "Optional[ProxyConfig]" = None,
destination_scheme: Optional[str] = None,
) -> bool:
"""
Returns True if the connection requires an HTTP CONNECT through the proxy.
:param URL proxy_url:
URL of the proxy.
:param ProxyConfig proxy_config:
Proxy configuration from poolmanager.py
:param str destination_scheme:
The scheme of the destination. (i.e https, http, etc)
"""
# If we're not using a proxy, no way to use a tunnel.
if proxy_url is None:
return False
# HTTP destinations never require tunneling, we always forward.
if destination_scheme == "http":
return False
# Support for forwarding with HTTPS proxies and HTTPS destinations.
if (
proxy_url.scheme == "https"
and proxy_config
and proxy_config.use_forwarding_for_https
):
return False
# Otherwise always use a tunnel.
return True | Returns True if the connection requires an HTTP CONNECT through the proxy. :param URL proxy_url: URL of the proxy. :param ProxyConfig proxy_config: Proxy configuration from poolmanager.py :param str destination_scheme: The scheme of the destination. (i.e https, http, etc) |
157,773 | from typing import TYPE_CHECKING, Optional, Union
from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version
from .url import Url
def resolve_cert_reqs(candidate: Union[None, int, str]) -> int:
"""
Resolves the argument to a numeric constant, which can be passed to
the wrap_socket function/method from the ssl module.
Defaults to :data:`ssl.CERT_REQUIRED`.
If given a string it is assumed to be the name of the constant in the
:mod:`ssl` module or its abbreviation.
(So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
If it's neither `None` nor a string we assume it is already the numeric
constant which can directly be passed to wrap_socket.
"""
if candidate is None:
return CERT_REQUIRED
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
res = getattr(ssl, "CERT_" + candidate)
return cast(int, res)
return candidate
def resolve_ssl_version(candidate: Union[None, int, str]) -> int:
"""
like resolve_cert_reqs
"""
if candidate is None:
return PROTOCOL_TLS
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
res = getattr(ssl, "PROTOCOL_" + candidate)
return cast(int, res)
return candidate
def create_urllib3_context(
ssl_version: Optional[int] = None,
cert_reqs: Optional[int] = None,
options: Optional[int] = None,
ciphers: Optional[str] = None,
ssl_minimum_version: Optional[int] = None,
ssl_maximum_version: Optional[int] = None,
) -> "ssl.SSLContext":
"""All arguments have the same meaning as ``ssl_wrap_socket``.
By default, this function does a lot of the same work that
``ssl.create_default_context`` does on Python 3.4+. It:
- Disables SSLv2, SSLv3, and compression
- Sets a restricted set of server ciphers
If you wish to enable SSLv3, you can do::
from urllib3.util import ssl_
context = ssl_.create_urllib3_context()
context.options &= ~ssl_.OP_NO_SSLv3
You can do the same to enable compression (substituting ``COMPRESSION``
for ``SSLv3`` in the last line above).
:param ssl_version:
The desired protocol version to use. This will default to
PROTOCOL_SSLv23 which will negotiate the highest protocol that both
the server and your installation of OpenSSL support.
This parameter is deprecated instead use 'ssl_minimum_version'.
:param ssl_minimum_version:
The minimum version of TLS to be used. Use the 'ssl.TLSVersion' enum for specifying the value.
:param ssl_maximum_version:
The maximum version of TLS to be used. Use the 'ssl.TLSVersion' enum for specifying the value.
Not recommended to set to anything other than 'ssl.TLSVersion.MAXIMUM_SUPPORTED' which is the
default value.
:param cert_reqs:
Whether to require the certificate verification. This defaults to
``ssl.CERT_REQUIRED``.
:param options:
Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
:param ciphers:
Which cipher suites to allow the server to select. Defaults to either system configured
ciphers if OpenSSL 1.1.1+, otherwise uses a secure default set of ciphers.
:returns:
Constructed SSLContext object with specified options
:rtype: SSLContext
"""
if SSLContext is None:
raise TypeError("Can't create an SSLContext object without an ssl module")
# This means 'ssl_version' was specified as an exact value.
if ssl_version not in (None, PROTOCOL_TLS, PROTOCOL_TLS_CLIENT):
# Disallow setting 'ssl_version' and 'ssl_minimum|maximum_version'
# to avoid conflicts.
if ssl_minimum_version is not None or ssl_maximum_version is not None:
raise ValueError(
"Can't specify both 'ssl_version' and either "
"'ssl_minimum_version' or 'ssl_maximum_version'"
)
# 'ssl_version' is deprecated and will be removed in the future.
else:
# Use 'ssl_minimum_version' and 'ssl_maximum_version' instead.
ssl_minimum_version = _SSL_VERSION_TO_TLS_VERSION.get(
ssl_version, TLSVersion.MINIMUM_SUPPORTED
)
ssl_maximum_version = _SSL_VERSION_TO_TLS_VERSION.get(
ssl_version, TLSVersion.MAXIMUM_SUPPORTED
)
# This warning message is pushing users to use 'ssl_minimum_version'
# instead of both min/max. Best practice is to only set the minimum version and
# keep the maximum version to be it's default value: 'TLSVersion.MAXIMUM_SUPPORTED'
warnings.warn(
"'ssl_version' option is deprecated and will be "
"removed in a future release of urllib3 2.x. Instead "
"use 'ssl_minimum_version'",
category=DeprecationWarning,
stacklevel=2,
)
# PROTOCOL_TLS is deprecated in Python 3.10 so we always use PROTOCOL_TLS_CLIENT
context = SSLContext(PROTOCOL_TLS_CLIENT)
if ssl_minimum_version is not None:
context.minimum_version = ssl_minimum_version
else: # Python <3.10 defaults to 'MINIMUM_SUPPORTED' so explicitly set TLSv1.2 here
context.minimum_version = TLSVersion.TLSv1_2
if ssl_maximum_version is not None:
context.maximum_version = ssl_maximum_version
# Unless we're given ciphers defer to either system ciphers in
# the case of OpenSSL 1.1.1+ or use our own secure default ciphers.
if ciphers is not None or not USE_DEFAULT_SSLCONTEXT_CIPHERS:
context.set_ciphers(ciphers or DEFAULT_CIPHERS)
# Setting the default here, as we may have no ssl module on import
cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
if options is None:
options = 0
# SSLv2 is easily broken and is considered harmful and dangerous
options |= OP_NO_SSLv2
# SSLv3 has several problems and is now dangerous
options |= OP_NO_SSLv3
# Disable compression to prevent CRIME attacks for OpenSSL 1.0+
# (issue #309)
options |= OP_NO_COMPRESSION
# TLSv1.2 only. Unless set explicitly, do not request tickets.
# This may save some bandwidth on wire, and although the ticket is encrypted,
# there is a risk associated with it being on wire,
# if the server is not rotating its ticketing keys properly.
options |= OP_NO_TICKET
context.options |= options
# Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is
# necessary for conditional client cert authentication with TLS 1.3.
# The attribute is None for OpenSSL <= 1.1.0 or does not exist in older
# versions of Python. We only enable on Python 3.7.4+ or if certificate
# verification is enabled to work around Python issue #37428
# See: https://bugs.python.org/issue37428
if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr(
context, "post_handshake_auth", None
) is not None:
context.post_handshake_auth = True
# The order of the below lines setting verify_mode and check_hostname
# matter due to safe-guards SSLContext has to prevent an SSLContext with
# check_hostname=True, verify_mode=NONE/OPTIONAL.
if cert_reqs == ssl.CERT_REQUIRED:
context.verify_mode = cert_reqs
context.check_hostname = True
else:
context.check_hostname = False
context.verify_mode = cert_reqs
if HAS_NEVER_CHECK_COMMON_NAME:
context.hostname_checks_common_name = False
# Enable logging of TLS session keys via defacto standard environment variable
# 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
if hasattr(context, "keylog_filename"):
sslkeylogfile = os.environ.get("SSLKEYLOGFILE")
if sslkeylogfile:
context.keylog_filename = sslkeylogfile
return context
The provided code snippet includes necessary dependencies for implementing the `create_proxy_ssl_context` function. Write a Python function `def create_proxy_ssl_context( ssl_version: Optional[Union[int, str]] = None, cert_reqs: Optional[Union[int, str]] = None, ca_certs: Optional[str] = None, ca_cert_dir: Optional[str] = None, ca_cert_data: Union[None, str, bytes] = None, ) -> "ssl.SSLContext"` to solve the following problem:
Generates a default proxy ssl context if one hasn't been provided by the user.
Here is the function:
def create_proxy_ssl_context(
ssl_version: Optional[Union[int, str]] = None,
cert_reqs: Optional[Union[int, str]] = None,
ca_certs: Optional[str] = None,
ca_cert_dir: Optional[str] = None,
ca_cert_data: Union[None, str, bytes] = None,
) -> "ssl.SSLContext":
"""
Generates a default proxy ssl context if one hasn't been provided by the
user.
"""
ssl_context = create_urllib3_context(
ssl_version=resolve_ssl_version(ssl_version),
cert_reqs=resolve_cert_reqs(cert_reqs),
)
if (
not ca_certs
and not ca_cert_dir
and not ca_cert_data
and hasattr(ssl_context, "load_default_certs")
):
ssl_context.load_default_certs()
return ssl_context | Generates a default proxy ssl context if one hasn't been provided by the user. |
157,774 | import socket
from typing import Optional, Sequence, Tuple, Union
from ..exceptions import LocationParseError
from .timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT
from .wait import wait_for_read
def wait_for_read(sock: socket.socket, timeout: Optional[float] = None) -> bool:
"""Waits for reading to be available on a given socket.
Returns True if the socket is readable, or False if the timeout expired.
"""
return wait_for_socket(sock, read=True, timeout=timeout)
The provided code snippet includes necessary dependencies for implementing the `is_connection_dropped` function. Write a Python function `def is_connection_dropped(conn: socket.socket) -> bool` to solve the following problem:
Returns True if the connection is dropped and should be closed. :param conn: :class:`http.client.HTTPConnection` object.
Here is the function:
def is_connection_dropped(conn: socket.socket) -> bool: # Platform-specific
"""
Returns True if the connection is dropped and should be closed.
:param conn:
:class:`http.client.HTTPConnection` object.
"""
sock = getattr(conn, "sock", None)
if sock is None: # Connection already closed (such as by httplib).
return True
# Returns True if readable, which here means it's been dropped
return wait_for_read(sock, timeout=0.0) | Returns True if the connection is dropped and should be closed. :param conn: :class:`http.client.HTTPConnection` object. |
157,775 | import socket
from typing import Optional, Sequence, Tuple, Union
from ..exceptions import LocationParseError
from .timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT
from .wait import wait_for_read
_TYPE_SOCKET_OPTIONS = Sequence[Tuple[int, int, Union[int, bytes]]]
def _set_socket_options(
sock: socket.socket, options: Optional[_TYPE_SOCKET_OPTIONS]
) -> None:
if options is None:
return
for opt in options:
sock.setsockopt(*opt)
def allowed_gai_family() -> socket.AddressFamily:
"""This function is designed to work in the context of
getaddrinfo, where family=socket.AF_UNSPEC is the default and
will perform a DNS search for both IPv6 and IPv4 records."""
family = socket.AF_INET
if HAS_IPV6:
family = socket.AF_UNSPEC
return family
class LocationParseError(LocationValueError):
"""Raised when get_host or similar fails to parse the URL input."""
def __init__(self, location: str) -> None:
message = f"Failed to parse: {location}"
super().__init__(message)
self.location = location
_DEFAULT_TIMEOUT: "Final[_TYPE_DEFAULT]" = _TYPE_DEFAULT.token
_TYPE_TIMEOUT = Optional[Union[float, _TYPE_DEFAULT]]
The provided code snippet includes necessary dependencies for implementing the `create_connection` function. Write a Python function `def create_connection( address: Tuple[str, int], timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, source_address: Optional[Tuple[str, int]] = None, socket_options: Optional[_TYPE_SOCKET_OPTIONS] = None, ) -> socket.socket` to solve the following problem:
Connect to *address* and return the socket object. Convenience function. Connect to *address* (a 2-tuple ``(host, port)``) and return the socket object. Passing the optional *timeout* parameter will set the timeout on the socket instance before attempting to connect. If no *timeout* is supplied, the global default timeout setting returned by :func:`socket.getdefaulttimeout` is used. If *source_address* is set it must be a tuple of (host, port) for the socket to bind as a source address before making the connection. An host of '' or port 0 tells the OS to use the default.
Here is the function:
def create_connection(
address: Tuple[str, int],
timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
source_address: Optional[Tuple[str, int]] = None,
socket_options: Optional[_TYPE_SOCKET_OPTIONS] = None,
) -> socket.socket:
"""Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`socket.getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
An host of '' or port 0 tells the OS to use the default.
"""
host, port = address
if host.startswith("["):
host = host.strip("[]")
err = None
# Using the value from allowed_gai_family() in the context of getaddrinfo lets
# us select whether to work with IPv4 DNS records, IPv6 records, or both.
# The original create_connection function always returns all records.
family = allowed_gai_family()
try:
host.encode("idna")
except UnicodeError:
raise LocationParseError(f"'{host}', label empty or too long") from None
for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
# If provided, set socket level options before connecting.
_set_socket_options(sock, socket_options)
if timeout is not _DEFAULT_TIMEOUT:
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
sock.connect(sa)
# Break explicitly a reference cycle
err = None
return sock
except OSError as _:
err = _
if sock is not None:
sock.close()
if err is not None:
try:
raise err
finally:
# Break explicitly a reference cycle
err = None
else:
raise OSError("getaddrinfo returns an empty list") | Connect to *address* and return the socket object. Convenience function. Connect to *address* (a 2-tuple ``(host, port)``) and return the socket object. Passing the optional *timeout* parameter will set the timeout on the socket instance before attempting to connect. If no *timeout* is supplied, the global default timeout setting returned by :func:`socket.getdefaulttimeout` is used. If *source_address* is set it must be a tuple of (host, port) for the socket to bind as a source address before making the connection. An host of '' or port 0 tells the OS to use the default. |
157,776 | import socket
from typing import Optional, Sequence, Tuple, Union
from ..exceptions import LocationParseError
from .timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT
from .wait import wait_for_read
The provided code snippet includes necessary dependencies for implementing the `_has_ipv6` function. Write a Python function `def _has_ipv6(host: str) -> bool` to solve the following problem:
Returns True if the system can bind an IPv6 address.
Here is the function:
def _has_ipv6(host: str) -> bool:
"""Returns True if the system can bind an IPv6 address."""
sock = None
has_ipv6 = False
if socket.has_ipv6:
# has_ipv6 returns true if cPython was compiled with IPv6 support.
# It does not tell us if the system has IPv6 support enabled. To
# determine that we must bind to an IPv6 address.
# https://github.com/urllib3/urllib3/pull/611
# https://bugs.python.org/issue658327
try:
sock = socket.socket(socket.AF_INET6)
sock.bind((host, 0))
has_ipv6 = True
except Exception:
pass
if sock:
sock.close()
return has_ipv6 | Returns True if the system can bind an IPv6 address. |
157,777 | import sys
from typing import Any, Dict
import tornado.httpserver
import tornado.ioloop
import tornado.web
from dummyserver.proxy import ProxyHandler
from dummyserver.server import DEFAULT_CERTS, ssl_options_to_context
class ProxyHandler(tornado.web.RequestHandler):
SUPPORTED_METHODS = ["GET", "POST", "CONNECT"] # type: ignore[assignment]
async def get(self) -> None:
async def handle_response(response: tornado.httpclient.HTTPResponse) -> None:
if response.error and not isinstance(
response.error, tornado.httpclient.HTTPError
):
self.set_status(500)
self.write("Internal server error:\n" + str(response.error))
await self.finish()
else:
self.set_status(response.code)
for header in (
"Date",
"Cache-Control",
"Server",
"Content-Type",
"Location",
):
v = response.headers.get(header)
if v:
self.set_header(header, v)
if response.body:
self.write(response.body)
await self.finish()
upstream_ca_certs = self.application.settings.get("upstream_ca_certs", None)
ssl_options = None
if upstream_ca_certs:
ssl_options = ssl.create_default_context(cafile=upstream_ca_certs)
assert self.request.uri is not None
assert self.request.method is not None
req = tornado.httpclient.HTTPRequest(
url=self.request.uri,
method=self.request.method,
body=self.request.body,
headers=self.request.headers,
follow_redirects=False,
allow_nonstandard_methods=True,
ssl_options=ssl_options,
)
client = tornado.httpclient.AsyncHTTPClient()
try:
response = await client.fetch(req)
await handle_response(response)
except tornado.httpclient.HTTPError as e:
if hasattr(e, "response") and e.response:
await handle_response(e.response)
else:
self.set_status(500)
self.write("Internal server error:\n" + str(e))
self.finish()
async def post(self) -> None:
await self.get()
async def connect(self) -> None:
assert self.request.uri is not None
host, port = self.request.uri.split(":")
assert self.request.connection is not None
client: tornado.iostream.IOStream = self.request.connection.stream # type: ignore[attr-defined]
async def start_forward(
reader: tornado.iostream.IOStream, writer: tornado.iostream.IOStream
) -> None:
while True:
try:
data = await reader.read_bytes(4096, partial=True)
except tornado.iostream.StreamClosedError:
break
if not data:
break
writer.write(data)
writer.close()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
upstream = tornado.iostream.IOStream(s)
await upstream.connect((host, int(port)))
client.write(b"HTTP/1.0 200 Connection established\r\n\r\n")
fu1 = start_forward(client, upstream)
fu2 = start_forward(upstream, client)
await tornado.gen.multi([fu1, fu2])
DEFAULT_CERTS: Dict[str, Any] = {
"certfile": os.path.join(CERTS_PATH, "server.crt"),
"keyfile": os.path.join(CERTS_PATH, "server.key"),
"cert_reqs": ssl.CERT_OPTIONAL,
"ca_certs": os.path.join(CERTS_PATH, "cacert.pem"),
"alpn_protocols": ALPN_PROTOCOLS,
}
def ssl_options_to_context( # type: ignore[no-untyped-def]
keyfile=None,
certfile=None,
server_side=None,
cert_reqs=None,
ssl_version: Optional[Union[str, int]] = None,
ca_certs=None,
do_handshake_on_connect=None,
suppress_ragged_eofs=None,
ciphers=None,
alpn_protocols=None,
) -> ssl.SSLContext:
"""Return an equivalent SSLContext based on ssl.wrap_socket args."""
ssl_version = resolve_ssl_version(ssl_version)
cert_none = resolve_cert_reqs("CERT_NONE")
if cert_reqs is None:
cert_reqs = cert_none
else:
cert_reqs = resolve_cert_reqs(cert_reqs)
ctx = ssl.SSLContext(ssl_version)
ctx.load_cert_chain(certfile, keyfile)
ctx.verify_mode = cert_reqs
if ctx.verify_mode != cert_none:
ctx.load_verify_locations(cafile=ca_certs)
if alpn_protocols and hasattr(ctx, "set_alpn_protocols"):
try:
ctx.set_alpn_protocols(alpn_protocols)
except NotImplementedError:
pass
return ctx
The provided code snippet includes necessary dependencies for implementing the `run_proxy` function. Write a Python function `def run_proxy(port: int, certs: Dict[str, Any] = DEFAULT_CERTS) -> None` to solve the following problem:
Run proxy on the specified port using the provided certs. Example usage: python -m dummyserver.https_proxy You'll need to ensure you have access to certain packages such as trustme, tornado, urllib3.
Here is the function:
def run_proxy(port: int, certs: Dict[str, Any] = DEFAULT_CERTS) -> None:
"""
Run proxy on the specified port using the provided certs.
Example usage:
python -m dummyserver.https_proxy
You'll need to ensure you have access to certain packages such as trustme,
tornado, urllib3.
"""
upstream_ca_certs = certs.get("ca_certs")
app = tornado.web.Application(
[(r".*", ProxyHandler)], upstream_ca_certs=upstream_ca_certs
)
ssl_opts = ssl_options_to_context(**certs)
http_server = tornado.httpserver.HTTPServer(app, ssl_options=ssl_opts)
http_server.listen(port)
ioloop = tornado.ioloop.IOLoop.instance()
try:
ioloop.start()
except KeyboardInterrupt:
ioloop.stop() | Run proxy on the specified port using the provided certs. Example usage: python -m dummyserver.https_proxy You'll need to ensure you have access to certain packages such as trustme, tornado, urllib3. |
157,778 | import logging
import os
import socket
import ssl
import sys
import threading
import warnings
from datetime import datetime
from typing import Any, Callable, Dict, Optional, Tuple, Union
import tornado.httpserver
import tornado.ioloop
import tornado.netutil
import tornado.web
import trustme
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from urllib3.exceptions import HTTPWarning
from urllib3.util import ALPN_PROTOCOLS, resolve_cert_reqs, resolve_ssl_version
def _resolves_to_ipv6(host: str) -> bool:
"""Returns True if the system resolves host to an IPv6 address by default."""
resolves_to_ipv6 = False
try:
for res in socket.getaddrinfo(host, None, socket.AF_UNSPEC):
af, _, _, _, _ = res
if af == socket.AF_INET6:
resolves_to_ipv6 = True
except socket.gaierror:
pass
return resolves_to_ipv6
The provided code snippet includes necessary dependencies for implementing the `_has_ipv6` function. Write a Python function `def _has_ipv6(host: str) -> bool` to solve the following problem:
Returns True if the system can bind an IPv6 address.
Here is the function:
def _has_ipv6(host: str) -> bool:
"""Returns True if the system can bind an IPv6 address."""
sock = None
has_ipv6 = False
if socket.has_ipv6:
# has_ipv6 returns true if cPython was compiled with IPv6 support.
# It does not tell us if the system has IPv6 support enabled. To
# determine that we must bind to an IPv6 address.
# https://github.com/urllib3/urllib3/pull/611
# https://bugs.python.org/issue658327
try:
sock = socket.socket(socket.AF_INET6)
sock.bind((host, 0))
has_ipv6 = _resolves_to_ipv6("localhost")
except Exception:
pass
if sock:
sock.close()
return has_ipv6 | Returns True if the system can bind an IPv6 address. |
157,779 | import logging
import os
import socket
import ssl
import sys
import threading
import warnings
from datetime import datetime
from typing import Any, Callable, Dict, Optional, Tuple, Union
import tornado.httpserver
import tornado.ioloop
import tornado.netutil
import tornado.web
import trustme
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from urllib3.exceptions import HTTPWarning
from urllib3.util import ALPN_PROTOCOLS, resolve_cert_reqs, resolve_ssl_version
def ssl_options_to_context( # type: ignore[no-untyped-def]
keyfile=None,
certfile=None,
server_side=None,
cert_reqs=None,
ssl_version: Optional[Union[str, int]] = None,
ca_certs=None,
do_handshake_on_connect=None,
suppress_ragged_eofs=None,
ciphers=None,
alpn_protocols=None,
) -> ssl.SSLContext:
"""Return an equivalent SSLContext based on ssl.wrap_socket args."""
ssl_version = resolve_ssl_version(ssl_version)
cert_none = resolve_cert_reqs("CERT_NONE")
if cert_reqs is None:
cert_reqs = cert_none
else:
cert_reqs = resolve_cert_reqs(cert_reqs)
ctx = ssl.SSLContext(ssl_version)
ctx.load_cert_chain(certfile, keyfile)
ctx.verify_mode = cert_reqs
if ctx.verify_mode != cert_none:
ctx.load_verify_locations(cafile=ca_certs)
if alpn_protocols and hasattr(ctx, "set_alpn_protocols"):
try:
ctx.set_alpn_protocols(alpn_protocols)
except NotImplementedError:
pass
return ctx
def run_tornado_app( # type: ignore[no-untyped-def]
app: tornado.web.Application,
io_loop: tornado.ioloop.IOLoop,
certs,
scheme: str,
host: str,
) -> Tuple[tornado.httpserver.HTTPServer, int]:
assert io_loop == tornado.ioloop.IOLoop.current()
# We can't use fromtimestamp(0) because of CPython issue 29097, so we'll
# just construct the datetime object directly.
app.last_req = datetime(1970, 1, 1) # type: ignore[attr-defined]
if scheme == "https":
ssl_opts = ssl_options_to_context(**certs)
http_server = tornado.httpserver.HTTPServer(app, ssl_options=ssl_opts)
else:
http_server = tornado.httpserver.HTTPServer(app)
sockets = tornado.netutil.bind_sockets(None, address=host) # type: ignore[arg-type]
port = sockets[0].getsockname()[1]
http_server.add_sockets(sockets)
return http_server, port | null |
157,780 | import logging
import os
import socket
import ssl
import sys
import threading
import warnings
from datetime import datetime
from typing import Any, Callable, Dict, Optional, Tuple, Union
import tornado.httpserver
import tornado.ioloop
import tornado.netutil
import tornado.web
import trustme
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from urllib3.exceptions import HTTPWarning
from urllib3.util import ALPN_PROTOCOLS, resolve_cert_reqs, resolve_ssl_version
def run_loop_in_thread(io_loop: tornado.ioloop.IOLoop) -> threading.Thread:
t = threading.Thread(target=io_loop.start)
t.start()
return t | null |
157,781 | import logging
import os
import socket
import ssl
import sys
import threading
import warnings
from datetime import datetime
from typing import Any, Callable, Dict, Optional, Tuple, Union
import tornado.httpserver
import tornado.ioloop
import tornado.netutil
import tornado.web
import trustme
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from urllib3.exceptions import HTTPWarning
from urllib3.util import ALPN_PROTOCOLS, resolve_cert_reqs, resolve_ssl_version
def get_unreachable_address() -> Tuple[str, int]:
# reserved as per rfc2606
return ("something.invalid", 54321) | null |
157,782 | import logging
import os
import socket
import ssl
import sys
import threading
import warnings
from datetime import datetime
from typing import Any, Callable, Dict, Optional, Tuple, Union
import tornado.httpserver
import tornado.ioloop
import tornado.netutil
import tornado.web
import trustme
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from urllib3.exceptions import HTTPWarning
from urllib3.util import ALPN_PROTOCOLS, resolve_cert_reqs, resolve_ssl_version
def encrypt_key_pem(private_key_pem: trustme.Blob, password: bytes) -> trustme.Blob:
private_key = serialization.load_pem_private_key(
private_key_pem.bytes(), password=None, backend=default_backend()
)
encrypted_key = private_key.private_bytes(
serialization.Encoding.PEM,
serialization.PrivateFormat.TraditionalOpenSSL,
serialization.BestAvailableEncryption(password),
)
return trustme.Blob(encrypted_key) | null |
157,783 | import socket
import ssl
import sys
import tornado.gen
import tornado.httpclient
import tornado.httpserver
import tornado.ioloop
import tornado.iostream
import tornado.web
class ProxyHandler(tornado.web.RequestHandler):
SUPPORTED_METHODS = ["GET", "POST", "CONNECT"] # type: ignore[assignment]
async def get(self) -> None:
async def handle_response(response: tornado.httpclient.HTTPResponse) -> None:
if response.error and not isinstance(
response.error, tornado.httpclient.HTTPError
):
self.set_status(500)
self.write("Internal server error:\n" + str(response.error))
await self.finish()
else:
self.set_status(response.code)
for header in (
"Date",
"Cache-Control",
"Server",
"Content-Type",
"Location",
):
v = response.headers.get(header)
if v:
self.set_header(header, v)
if response.body:
self.write(response.body)
await self.finish()
upstream_ca_certs = self.application.settings.get("upstream_ca_certs", None)
ssl_options = None
if upstream_ca_certs:
ssl_options = ssl.create_default_context(cafile=upstream_ca_certs)
assert self.request.uri is not None
assert self.request.method is not None
req = tornado.httpclient.HTTPRequest(
url=self.request.uri,
method=self.request.method,
body=self.request.body,
headers=self.request.headers,
follow_redirects=False,
allow_nonstandard_methods=True,
ssl_options=ssl_options,
)
client = tornado.httpclient.AsyncHTTPClient()
try:
response = await client.fetch(req)
await handle_response(response)
except tornado.httpclient.HTTPError as e:
if hasattr(e, "response") and e.response:
await handle_response(e.response)
else:
self.set_status(500)
self.write("Internal server error:\n" + str(e))
self.finish()
async def post(self) -> None:
await self.get()
async def connect(self) -> None:
assert self.request.uri is not None
host, port = self.request.uri.split(":")
assert self.request.connection is not None
client: tornado.iostream.IOStream = self.request.connection.stream # type: ignore[attr-defined]
async def start_forward(
reader: tornado.iostream.IOStream, writer: tornado.iostream.IOStream
) -> None:
while True:
try:
data = await reader.read_bytes(4096, partial=True)
except tornado.iostream.StreamClosedError:
break
if not data:
break
writer.write(data)
writer.close()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
upstream = tornado.iostream.IOStream(s)
await upstream.connect((host, int(port)))
client.write(b"HTTP/1.0 200 Connection established\r\n\r\n")
fu1 = start_forward(client, upstream)
fu2 = start_forward(upstream, client)
await tornado.gen.multi([fu1, fu2])
The provided code snippet includes necessary dependencies for implementing the `run_proxy` function. Write a Python function `def run_proxy(port: int, start_ioloop: bool = True) -> None` to solve the following problem:
Run proxy on the specified port. If start_ioloop is True (default), the tornado IOLoop will be started immediately.
Here is the function:
def run_proxy(port: int, start_ioloop: bool = True) -> None:
"""
Run proxy on the specified port. If start_ioloop is True (default),
the tornado IOLoop will be started immediately.
"""
app = tornado.web.Application([(r".*", ProxyHandler)])
app.listen(port)
ioloop = tornado.ioloop.IOLoop.instance()
if start_ioloop:
ioloop.start() | Run proxy on the specified port. If start_ioloop is True (default), the tornado IOLoop will be started immediately. |
157,784 | import collections
import contextlib
import gzip
import json
import logging
import sys
import zlib
from datetime import datetime, timedelta
from http.client import responses
from io import BytesIO
from typing import Any, Dict, Optional, Sequence, Tuple, Union
from urllib.parse import urlsplit
from tornado import httputil
from tornado.web import RequestHandler
from urllib3.util.util import to_str
def request_params(request: httputil.HTTPServerRequest) -> Dict[str, bytes]:
params = {}
for k, v in request.arguments.items():
params[k] = next(iter(v))
return params | null |
157,785 | import os
import shutil
import subprocess
import nox
def unsupported_python2(session: nox.Session) -> None:
# Can't check both returncode and output with session.run
process = subprocess.run(
["python", "setup.py", "install"],
env={**session.env},
text=True,
capture_output=True,
)
assert process.returncode == 1
print(process.stderr)
assert "Unsupported Python version" in process.stderr | null |
157,786 | import os
import shutil
import subprocess
import nox
def tests_impl(
session: nox.Session,
extras: str = "socks,secure,brotli",
byte_string_comparisons: bool = True,
) -> None:
# Install deps and the package itself.
session.install("-r", "dev-requirements.txt")
session.install(f".[{extras}]")
# Show the pip version.
session.run("pip", "--version")
# Print the Python version and bytesize.
session.run("python", "--version")
session.run("python", "-c", "import struct; print(struct.calcsize('P') * 8)")
# Print OpenSSL information.
session.run("python", "-m", "OpenSSL.debug")
# Inspired from https://github.com/pyca/cryptography
# We use parallel mode and then combine here so that coverage.py will take
# the paths like .tox/pyXY/lib/pythonX.Y/site-packages/urllib3/__init__.py
# and collapse them into src/urllib3/__init__.py.
session.run(
"python",
*(("-bb",) if byte_string_comparisons else ()),
"-m",
"coverage",
"run",
"--parallel-mode",
"-m",
"pytest",
"-r",
"a",
"--tb=native",
"--no-success-flaky-report",
*(session.posargs or ("test/",)),
env={"PYTHONWARNINGS": "always::DeprecationWarning"},
)
session.run("coverage", "combine")
session.run("coverage", "report", "-m")
session.run("coverage", "xml")
The provided code snippet includes necessary dependencies for implementing the `test_brotlipy` function. Write a Python function `def test_brotlipy(session: nox.Session) -> None` to solve the following problem:
Check that if 'brotlipy' is installed instead of 'brotli' or 'brotlicffi' that we still don't blow up.
Here is the function:
def test_brotlipy(session: nox.Session) -> None:
"""Check that if 'brotlipy' is installed instead of 'brotli' or
'brotlicffi' that we still don't blow up.
"""
session.install("brotlipy")
tests_impl(session, extras="socks,secure", byte_string_comparisons=False) | Check that if 'brotlipy' is installed instead of 'brotli' or 'brotlicffi' that we still don't blow up. |
157,787 | import os
import shutil
import subprocess
import nox
def git_clone(session: nox.Session, git_url: str) -> None:
session.run("git", "clone", "--depth", "1", git_url, external=True)
def downstream_botocore(session: nox.Session) -> None:
root = os.getcwd()
tmp_dir = session.create_tmp()
session.cd(tmp_dir)
git_clone(session, "https://github.com/boto/botocore")
session.chdir("botocore")
session.run("git", "rev-parse", "HEAD", external=True)
session.run("python", "scripts/ci/install")
session.cd(root)
session.install(".", silent=False)
session.cd(f"{tmp_dir}/botocore")
session.run("python", "-c", "import urllib3; print(urllib3.__version__)")
session.run("python", "scripts/ci/run-tests") | null |
157,788 | import os
import shutil
import subprocess
import nox
def git_clone(session: nox.Session, git_url: str) -> None:
session.run("git", "clone", "--depth", "1", git_url, external=True)
def downstream_requests(session: nox.Session) -> None:
root = os.getcwd()
tmp_dir = session.create_tmp()
session.cd(tmp_dir)
git_clone(session, "https://github.com/psf/requests")
session.chdir("requests")
session.run("git", "apply", f"{root}/ci/requests.patch", external=True)
session.run("git", "rev-parse", "HEAD", external=True)
session.install(".[socks]", silent=False)
session.install("-r", "requirements-dev.txt", silent=False)
session.cd(root)
session.install(".", silent=False)
session.cd(f"{tmp_dir}/requests")
session.run("python", "-c", "import urllib3; print(urllib3.__version__)")
session.run("pytest", "tests") | null |
157,789 | import os
import shutil
import subprocess
import nox
def lint(session: nox.Session) -> None:
session.install("pre-commit")
session.run("pre-commit", "run", "--all-files")
mypy(session)
The provided code snippet includes necessary dependencies for implementing the `format` function. Write a Python function `def format(session: nox.Session) -> None` to solve the following problem:
Run code formatters.
Here is the function:
def format(session: nox.Session) -> None:
"""Run code formatters."""
session.install("pre-commit")
session.run("pre-commit", "--version")
process = subprocess.run(
["pre-commit", "run", "--all-files"],
env=session.env,
text=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
# Ensure that pre-commit itself ran successfully
assert process.returncode in (0, 1)
lint(session) | Run code formatters. |
157,790 | import os
import shutil
import subprocess
import nox
def docs(session: nox.Session) -> None:
session.install("-r", "docs/requirements.txt")
session.install(".[socks,secure,brotli]")
session.chdir("docs")
if os.path.exists("_build"):
shutil.rmtree("_build")
session.run("sphinx-build", "-b", "html", "-W", ".", "_build/html") | null |
157,791 | import os
import platform
import sys
import eel
def say_hello_py(x):
print('Hello from %s' % x) | null |
157,792 | import eel
import random
def py_random():
return random.random() | null |
157,793 | import eel
import random
def py_exception(error):
if error:
raise ValueError("Test")
else:
return "No Error" | null |
157,794 | import eel
import random
def print_num(n):
print('Got this from Javascript:', n) | null |
157,795 | import eel
import random
def print_num_failed(error, stack):
print("This is an example of what javascript errors would look like:")
print("\tError: ", error)
print("\tStack: ", stack) | null |
157,796 | import os
import platform
import random
import sys
import eel
The provided code snippet includes necessary dependencies for implementing the `expand_user` function. Write a Python function `def expand_user(folder)` to solve the following problem:
Return the full path to display in the UI.
Here is the function:
def expand_user(folder):
"""Return the full path to display in the UI."""
return '{}/*'.format(os.path.expanduser(folder)) | Return the full path to display in the UI. |
157,797 | import os
import platform
import random
import sys
import eel
The provided code snippet includes necessary dependencies for implementing the `pick_file` function. Write a Python function `def pick_file(folder)` to solve the following problem:
Return a random file from the specified folder.
Here is the function:
def pick_file(folder):
"""Return a random file from the specified folder."""
folder = os.path.expanduser(folder)
if os.path.isdir(folder):
listFiles = [_f for _f in os.listdir(folder) if not os.path.isdir(os.path.join(folder, _f))]
if len(listFiles) == 0:
return 'No Files found in {}'.format(folder)
return random.choice(listFiles)
else:
return '{} is not a valid folder'.format(folder) | Return a random file from the specified folder. |
157,798 | import os
import platform
import random
import sys
import eel
sys.path.insert(1, '../../')
def say_hello_py(x):
"""Print message from JavaScript on app initialization, then call a JS function."""
print('Hello from %s' % x) # noqa T001
eel.say_hello_js('Python {from within say_hello_py()}!')
The provided code snippet includes necessary dependencies for implementing the `start_eel` function. Write a Python function `def start_eel(develop)` to solve the following problem:
Start Eel with either production or development configuration.
Here is the function:
def start_eel(develop):
"""Start Eel with either production or development configuration."""
if develop:
directory = 'src'
app = None
page = {'port': 3000}
else:
directory = 'build'
app = 'chrome-app'
page = 'index.html'
eel.init(directory, ['.tsx', '.ts', '.jsx', '.js', '.html'])
# These will be queued until the first connection is made, but won't be repeated on a page reload
say_hello_py('Python World!')
eel.say_hello_js('Python World!') # Call a JavaScript function (must be after `eel.init()`)
eel.show_log('https://github.com/samuelhwilliams/Eel/issues/363 (show_log)')
eel_kwargs = dict(
host='localhost',
port=8080,
size=(1280, 800),
)
try:
eel.start(page, mode=app, **eel_kwargs)
except EnvironmentError:
# If Chrome isn't found, fallback to Microsoft Edge on Win10 or greater
if sys.platform in ['win32', 'win64'] and int(platform.release()) >= 10:
eel.start(page, mode='edge', **eel_kwargs)
else:
raise | Start Eel with either production or development configuration. |
157,799 | import eel
def say_hello_py(x):
print('Hello from %s' % x) | null |
157,800 | import eel, os, random
def pick_file(folder):
if os.path.isdir(folder):
return random.choice(os.listdir(folder))
else:
return 'Not valid folder' | null |
157,801 | import eel
def handleinput(x):
print('%s' % x) | null |
157,802 | import eel, random
def py_random():
return random.random() | null |
157,803 | import random
import eel
def py_random():
return random.random() | null |
157,804 | import random
import eel
def say_hello_py(x):
print('Hello from %s' % x) | null |
157,806 | import subprocess as sps
import webbrowser as wbr
import eel.chrome as chm
import eel.electron as ele
import eel.edge as edge
_browser_paths = {}
_browser_modules = {'chrome': chm,
'electron': ele,
'edge': edge}
def _build_urls(start_pages, options):
urls = []
for page in start_pages:
method = _build_url_from_dict if isinstance(
page, dict) else _build_url_from_string
url = method(page, options)
urls.append(url)
return urls
def open(start_pages, options):
# Build full URLs for starting pages (including host and port)
start_urls = _build_urls(start_pages, options)
mode = options.get('mode')
if mode in [None, False]:
# Don't open a browser
pass
elif mode == 'custom':
# Just run whatever command the user provided
sps.Popen(options['cmdline_args'],
stdout=sps.PIPE, stderr=sps.PIPE, stdin=sps.PIPE)
elif mode in _browser_modules:
# Run with a specific browser
browser_module = _browser_modules[mode]
path = _browser_paths.get(mode)
if path is None:
# Don't know this browser's path, try and find it ourselves
path = browser_module.find_path()
_browser_paths[mode] = path
if path is not None:
browser_module.run(path, options, start_urls)
else:
raise EnvironmentError("Can't find %s installation" % browser_module.name)
else:
# Fall back to system default browser
for url in start_urls:
wbr.open(url) | null |
157,807 | import subprocess as sps
import webbrowser as wbr
import eel.chrome as chm
import eel.electron as ele
import eel.edge as edge
_browser_paths = {}
def set_path(browser_name, path):
_browser_paths[browser_name] = path | null |
157,808 | import subprocess as sps
import webbrowser as wbr
import eel.chrome as chm
import eel.electron as ele
import eel.edge as edge
_browser_paths = {}
def get_path(browser_name):
return _browser_paths.get(browser_name) | null |
157,809 | import http.client
import inspect
import warnings
from enum import Enum
from typing import Any, Dict, List, Optional, Sequence, Set, Tuple, Type, Union, cast
from fastapi import routing
from fastapi.datastructures import DefaultPlaceholder
from fastapi.dependencies.models import Dependant
from fastapi.dependencies.utils import get_flat_dependant, get_flat_params
from fastapi.encoders import jsonable_encoder
from fastapi.openapi.constants import (
METHODS_WITH_BODY,
REF_PREFIX,
STATUS_CODES_WITH_NO_BODY,
)
from fastapi.openapi.models import OpenAPI
from fastapi.params import Body, Param
from fastapi.responses import Response
from fastapi.utils import (
deep_dict_update,
generate_operation_id_for_path,
get_model_definitions,
)
from pydantic import BaseModel
from pydantic.fields import ModelField, Undefined
from pydantic.schema import (
field_schema,
get_flat_models_from_fields,
get_model_name_map,
)
from pydantic.utils import lenient_issubclass
from starlette.responses import JSONResponse
from starlette.routing import BaseRoute
from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY
def generate_operation_id_for_path(
*, name: str, path: str, method: str
) -> str: # pragma: nocover
warnings.warn(
"fastapi.utils.generate_operation_id_for_path() was deprecated, "
"it is not used internally, and will be removed soon",
DeprecationWarning,
stacklevel=2,
)
operation_id = name + path
operation_id = re.sub("[^0-9a-zA-Z_]", "_", operation_id)
operation_id = operation_id + "_" + method.lower()
return operation_id
def generate_operation_id(
*, route: routing.APIRoute, method: str
) -> str: # pragma: nocover
warnings.warn(
"fastapi.openapi.utils.generate_operation_id() was deprecated, "
"it is not used internally, and will be removed soon",
DeprecationWarning,
stacklevel=2,
)
if route.operation_id:
return route.operation_id
path: str = route.path_format
return generate_operation_id_for_path(name=route.name, path=path, method=method) | null |
157,810 | import functools
import re
import warnings
from dataclasses import is_dataclass
from enum import Enum
from typing import TYPE_CHECKING, Any, Dict, Optional, Set, Type, Union, cast
import fastapi
from fastapi.datastructures import DefaultPlaceholder, DefaultType
from fastapi.openapi.constants import REF_PREFIX
from pydantic import BaseConfig, BaseModel, create_model
from pydantic.class_validators import Validator
from pydantic.fields import FieldInfo, ModelField, UndefinedType
from pydantic.schema import model_process_schema
from pydantic.utils import lenient_issubclass
def create_response_field(
name: str,
type_: Type[Any],
class_validators: Optional[Dict[str, Validator]] = None,
default: Optional[Any] = None,
required: Union[bool, UndefinedType] = False,
model_config: Type[BaseConfig] = BaseConfig,
field_info: Optional[FieldInfo] = None,
alias: Optional[str] = None,
) -> ModelField:
"""
Create a new response field. Raises if type_ is invalid.
"""
class_validators = class_validators or {}
field_info = field_info or FieldInfo(None)
response_field = functools.partial(
ModelField,
name=name,
type_=type_,
class_validators=class_validators,
default=default,
required=required,
model_config=model_config,
alias=alias,
)
try:
return response_field(field_info=field_info)
except RuntimeError:
raise fastapi.exceptions.FastAPIError(
f"Invalid args for response field! Hint: check that {type_} is a valid pydantic field type"
)
def create_cloned_field(
field: ModelField,
*,
cloned_types: Optional[Dict[Type[BaseModel], Type[BaseModel]]] = None,
) -> ModelField:
# _cloned_types has already cloned types, to support recursive models
if cloned_types is None:
cloned_types = dict()
original_type = field.type_
if is_dataclass(original_type) and hasattr(original_type, "__pydantic_model__"):
original_type = original_type.__pydantic_model__
use_type = original_type
if lenient_issubclass(original_type, BaseModel):
original_type = cast(Type[BaseModel], original_type)
use_type = cloned_types.get(original_type)
if use_type is None:
use_type = create_model(original_type.__name__, __base__=original_type)
cloned_types[original_type] = use_type
for f in original_type.__fields__.values():
use_type.__fields__[f.name] = create_cloned_field(
f, cloned_types=cloned_types
)
new_field = create_response_field(name=field.name, type_=use_type)
new_field.has_alias = field.has_alias
new_field.alias = field.alias
new_field.class_validators = field.class_validators
new_field.default = field.default
new_field.required = field.required
new_field.model_config = field.model_config
new_field.field_info = field.field_info
new_field.allow_none = field.allow_none
new_field.validate_always = field.validate_always
if field.sub_fields:
new_field.sub_fields = [
create_cloned_field(sub_field, cloned_types=cloned_types)
for sub_field in field.sub_fields
]
if field.key_field:
new_field.key_field = create_cloned_field(
field.key_field, cloned_types=cloned_types
)
new_field.validators = field.validators
new_field.pre_validators = field.pre_validators
new_field.post_validators = field.post_validators
new_field.parse_json = field.parse_json
new_field.shape = field.shape
new_field.populate_validators()
return new_field | null |
157,811 | import functools
import re
import warnings
from dataclasses import is_dataclass
from enum import Enum
from typing import TYPE_CHECKING, Any, Dict, Optional, Set, Type, Union, cast
import fastapi
from fastapi.datastructures import DefaultPlaceholder, DefaultType
from fastapi.openapi.constants import REF_PREFIX
from pydantic import BaseConfig, BaseModel, create_model
from pydantic.class_validators import Validator
from pydantic.fields import FieldInfo, ModelField, UndefinedType
from pydantic.schema import model_process_schema
from pydantic.utils import lenient_issubclass
def generate_unique_id(route: "APIRoute") -> str:
operation_id = route.name + route.path_format
operation_id = re.sub("[^0-9a-zA-Z_]", "_", operation_id)
assert route.methods
operation_id = operation_id + "_" + list(route.methods)[0].lower()
return operation_id | null |
157,812 | import functools
import re
import warnings
from dataclasses import is_dataclass
from enum import Enum
from typing import TYPE_CHECKING, Any, Dict, Optional, Set, Type, Union, cast
import fastapi
from fastapi.datastructures import DefaultPlaceholder, DefaultType
from fastapi.openapi.constants import REF_PREFIX
from pydantic import BaseConfig, BaseModel, create_model
from pydantic.class_validators import Validator
from pydantic.fields import FieldInfo, ModelField, UndefinedType
from pydantic.schema import model_process_schema
from pydantic.utils import lenient_issubclass
class DefaultPlaceholder:
"""
You shouldn't use this class directly.
It's used internally to recognize when a default value has been overwritten, even
if the overridden default value was truthy.
"""
def __init__(self, value: Any):
self.value = value
def __bool__(self) -> bool:
return bool(self.value)
def __eq__(self, o: object) -> bool:
return isinstance(o, DefaultPlaceholder) and o.value == self.value
DefaultType = TypeVar("DefaultType")
The provided code snippet includes necessary dependencies for implementing the `get_value_or_default` function. Write a Python function `def get_value_or_default( first_item: Union[DefaultPlaceholder, DefaultType], *extra_items: Union[DefaultPlaceholder, DefaultType], ) -> Union[DefaultPlaceholder, DefaultType]` to solve the following problem:
Pass items or `DefaultPlaceholder`s by descending priority. The first one to _not_ be a `DefaultPlaceholder` will be returned. Otherwise, the first item (a `DefaultPlaceholder`) will be returned.
Here is the function:
def get_value_or_default(
first_item: Union[DefaultPlaceholder, DefaultType],
*extra_items: Union[DefaultPlaceholder, DefaultType],
) -> Union[DefaultPlaceholder, DefaultType]:
"""
Pass items or `DefaultPlaceholder`s by descending priority.
The first one to _not_ be a `DefaultPlaceholder` will be returned.
Otherwise, the first item (a `DefaultPlaceholder`) will be returned.
"""
items = (first_item,) + extra_items
for item in items:
if not isinstance(item, DefaultPlaceholder):
return item
return first_item | Pass items or `DefaultPlaceholder`s by descending priority. The first one to _not_ be a `DefaultPlaceholder` will be returned. Otherwise, the first item (a `DefaultPlaceholder`) will be returned. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.